Handle VIEW_CONVERT_EXPR for variable-length vectors
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2019 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
27
28 /* The entry points in this file are fold, size_int_wide and size_binop.
29
30 fold takes a tree as argument and returns a simplified tree.
31
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
35
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
38
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
42
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "backend.h"
47 #include "target.h"
48 #include "rtl.h"
49 #include "tree.h"
50 #include "gimple.h"
51 #include "predict.h"
52 #include "memmodel.h"
53 #include "tm_p.h"
54 #include "tree-ssa-operands.h"
55 #include "optabs-query.h"
56 #include "cgraph.h"
57 #include "diagnostic-core.h"
58 #include "flags.h"
59 #include "alias.h"
60 #include "fold-const.h"
61 #include "fold-const-call.h"
62 #include "stor-layout.h"
63 #include "calls.h"
64 #include "tree-iterator.h"
65 #include "expr.h"
66 #include "intl.h"
67 #include "langhooks.h"
68 #include "tree-eh.h"
69 #include "gimplify.h"
70 #include "tree-dfa.h"
71 #include "builtins.h"
72 #include "generic-match.h"
73 #include "gimple-fold.h"
74 #include "tree-into-ssa.h"
75 #include "md5.h"
76 #include "case-cfn-macros.h"
77 #include "stringpool.h"
78 #include "tree-vrp.h"
79 #include "tree-ssanames.h"
80 #include "selftest.h"
81 #include "stringpool.h"
82 #include "attribs.h"
83 #include "tree-vector-builder.h"
84 #include "vec-perm-indices.h"
85
86 /* Nonzero if we are folding constants inside an initializer; zero
87 otherwise. */
88 int folding_initializer = 0;
89
90 /* The following constants represent a bit based encoding of GCC's
91 comparison operators. This encoding simplifies transformations
92 on relational comparison operators, such as AND and OR. */
93 enum comparison_code {
94 COMPCODE_FALSE = 0,
95 COMPCODE_LT = 1,
96 COMPCODE_EQ = 2,
97 COMPCODE_LE = 3,
98 COMPCODE_GT = 4,
99 COMPCODE_LTGT = 5,
100 COMPCODE_GE = 6,
101 COMPCODE_ORD = 7,
102 COMPCODE_UNORD = 8,
103 COMPCODE_UNLT = 9,
104 COMPCODE_UNEQ = 10,
105 COMPCODE_UNLE = 11,
106 COMPCODE_UNGT = 12,
107 COMPCODE_NE = 13,
108 COMPCODE_UNGE = 14,
109 COMPCODE_TRUE = 15
110 };
111
112 static bool negate_expr_p (tree);
113 static tree negate_expr (tree);
114 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
115 static enum comparison_code comparison_to_compcode (enum tree_code);
116 static enum tree_code compcode_to_comparison (enum comparison_code);
117 static bool twoval_comparison_p (tree, tree *, tree *);
118 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
119 static tree optimize_bit_field_compare (location_t, enum tree_code,
120 tree, tree, tree);
121 static bool simple_operand_p (const_tree);
122 static bool simple_operand_p_2 (tree);
123 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
124 static tree range_predecessor (tree);
125 static tree range_successor (tree);
126 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
127 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
128 static tree unextend (tree, int, int, tree);
129 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
130 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
131 static tree fold_binary_op_with_conditional_arg (location_t,
132 enum tree_code, tree,
133 tree, tree,
134 tree, tree, int);
135 static tree fold_negate_const (tree, tree);
136 static tree fold_not_const (const_tree, tree);
137 static tree fold_relational_const (enum tree_code, tree, tree, tree);
138 static tree fold_convert_const (enum tree_code, tree, tree);
139 static tree fold_view_convert_expr (tree, tree);
140 static tree fold_negate_expr (location_t, tree);
141
142
143 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
144 Otherwise, return LOC. */
145
146 static location_t
147 expr_location_or (tree t, location_t loc)
148 {
149 location_t tloc = EXPR_LOCATION (t);
150 return tloc == UNKNOWN_LOCATION ? loc : tloc;
151 }
152
153 /* Similar to protected_set_expr_location, but never modify x in place,
154 if location can and needs to be set, unshare it. */
155
156 static inline tree
157 protected_set_expr_location_unshare (tree x, location_t loc)
158 {
159 if (CAN_HAVE_LOCATION_P (x)
160 && EXPR_LOCATION (x) != loc
161 && !(TREE_CODE (x) == SAVE_EXPR
162 || TREE_CODE (x) == TARGET_EXPR
163 || TREE_CODE (x) == BIND_EXPR))
164 {
165 x = copy_node (x);
166 SET_EXPR_LOCATION (x, loc);
167 }
168 return x;
169 }
170 \f
171 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
172 division and returns the quotient. Otherwise returns
173 NULL_TREE. */
174
175 tree
176 div_if_zero_remainder (const_tree arg1, const_tree arg2)
177 {
178 widest_int quo;
179
180 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
181 SIGNED, &quo))
182 return wide_int_to_tree (TREE_TYPE (arg1), quo);
183
184 return NULL_TREE;
185 }
186 \f
187 /* This is nonzero if we should defer warnings about undefined
188 overflow. This facility exists because these warnings are a
189 special case. The code to estimate loop iterations does not want
190 to issue any warnings, since it works with expressions which do not
191 occur in user code. Various bits of cleanup code call fold(), but
192 only use the result if it has certain characteristics (e.g., is a
193 constant); that code only wants to issue a warning if the result is
194 used. */
195
196 static int fold_deferring_overflow_warnings;
197
198 /* If a warning about undefined overflow is deferred, this is the
199 warning. Note that this may cause us to turn two warnings into
200 one, but that is fine since it is sufficient to only give one
201 warning per expression. */
202
203 static const char* fold_deferred_overflow_warning;
204
205 /* If a warning about undefined overflow is deferred, this is the
206 level at which the warning should be emitted. */
207
208 static enum warn_strict_overflow_code fold_deferred_overflow_code;
209
210 /* Start deferring overflow warnings. We could use a stack here to
211 permit nested calls, but at present it is not necessary. */
212
213 void
214 fold_defer_overflow_warnings (void)
215 {
216 ++fold_deferring_overflow_warnings;
217 }
218
219 /* Stop deferring overflow warnings. If there is a pending warning,
220 and ISSUE is true, then issue the warning if appropriate. STMT is
221 the statement with which the warning should be associated (used for
222 location information); STMT may be NULL. CODE is the level of the
223 warning--a warn_strict_overflow_code value. This function will use
224 the smaller of CODE and the deferred code when deciding whether to
225 issue the warning. CODE may be zero to mean to always use the
226 deferred code. */
227
228 void
229 fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
230 {
231 const char *warnmsg;
232 location_t locus;
233
234 gcc_assert (fold_deferring_overflow_warnings > 0);
235 --fold_deferring_overflow_warnings;
236 if (fold_deferring_overflow_warnings > 0)
237 {
238 if (fold_deferred_overflow_warning != NULL
239 && code != 0
240 && code < (int) fold_deferred_overflow_code)
241 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
242 return;
243 }
244
245 warnmsg = fold_deferred_overflow_warning;
246 fold_deferred_overflow_warning = NULL;
247
248 if (!issue || warnmsg == NULL)
249 return;
250
251 if (gimple_no_warning_p (stmt))
252 return;
253
254 /* Use the smallest code level when deciding to issue the
255 warning. */
256 if (code == 0 || code > (int) fold_deferred_overflow_code)
257 code = fold_deferred_overflow_code;
258
259 if (!issue_strict_overflow_warning (code))
260 return;
261
262 if (stmt == NULL)
263 locus = input_location;
264 else
265 locus = gimple_location (stmt);
266 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
267 }
268
269 /* Stop deferring overflow warnings, ignoring any deferred
270 warnings. */
271
272 void
273 fold_undefer_and_ignore_overflow_warnings (void)
274 {
275 fold_undefer_overflow_warnings (false, NULL, 0);
276 }
277
278 /* Whether we are deferring overflow warnings. */
279
280 bool
281 fold_deferring_overflow_warnings_p (void)
282 {
283 return fold_deferring_overflow_warnings > 0;
284 }
285
286 /* This is called when we fold something based on the fact that signed
287 overflow is undefined. */
288
289 void
290 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
291 {
292 if (fold_deferring_overflow_warnings > 0)
293 {
294 if (fold_deferred_overflow_warning == NULL
295 || wc < fold_deferred_overflow_code)
296 {
297 fold_deferred_overflow_warning = gmsgid;
298 fold_deferred_overflow_code = wc;
299 }
300 }
301 else if (issue_strict_overflow_warning (wc))
302 warning (OPT_Wstrict_overflow, gmsgid);
303 }
304 \f
305 /* Return true if the built-in mathematical function specified by CODE
306 is odd, i.e. -f(x) == f(-x). */
307
308 bool
309 negate_mathfn_p (combined_fn fn)
310 {
311 switch (fn)
312 {
313 CASE_CFN_ASIN:
314 CASE_CFN_ASINH:
315 CASE_CFN_ATAN:
316 CASE_CFN_ATANH:
317 CASE_CFN_CASIN:
318 CASE_CFN_CASINH:
319 CASE_CFN_CATAN:
320 CASE_CFN_CATANH:
321 CASE_CFN_CBRT:
322 CASE_CFN_CPROJ:
323 CASE_CFN_CSIN:
324 CASE_CFN_CSINH:
325 CASE_CFN_CTAN:
326 CASE_CFN_CTANH:
327 CASE_CFN_ERF:
328 CASE_CFN_LLROUND:
329 CASE_CFN_LROUND:
330 CASE_CFN_ROUND:
331 CASE_CFN_ROUNDEVEN:
332 CASE_CFN_ROUNDEVEN_FN:
333 CASE_CFN_SIN:
334 CASE_CFN_SINH:
335 CASE_CFN_TAN:
336 CASE_CFN_TANH:
337 CASE_CFN_TRUNC:
338 return true;
339
340 CASE_CFN_LLRINT:
341 CASE_CFN_LRINT:
342 CASE_CFN_NEARBYINT:
343 CASE_CFN_RINT:
344 return !flag_rounding_math;
345
346 default:
347 break;
348 }
349 return false;
350 }
351
352 /* Check whether we may negate an integer constant T without causing
353 overflow. */
354
355 bool
356 may_negate_without_overflow_p (const_tree t)
357 {
358 tree type;
359
360 gcc_assert (TREE_CODE (t) == INTEGER_CST);
361
362 type = TREE_TYPE (t);
363 if (TYPE_UNSIGNED (type))
364 return false;
365
366 return !wi::only_sign_bit_p (wi::to_wide (t));
367 }
368
369 /* Determine whether an expression T can be cheaply negated using
370 the function negate_expr without introducing undefined overflow. */
371
372 static bool
373 negate_expr_p (tree t)
374 {
375 tree type;
376
377 if (t == 0)
378 return false;
379
380 type = TREE_TYPE (t);
381
382 STRIP_SIGN_NOPS (t);
383 switch (TREE_CODE (t))
384 {
385 case INTEGER_CST:
386 if (INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type))
387 return true;
388
389 /* Check that -CST will not overflow type. */
390 return may_negate_without_overflow_p (t);
391 case BIT_NOT_EXPR:
392 return (INTEGRAL_TYPE_P (type)
393 && TYPE_OVERFLOW_WRAPS (type));
394
395 case FIXED_CST:
396 return true;
397
398 case NEGATE_EXPR:
399 return !TYPE_OVERFLOW_SANITIZED (type);
400
401 case REAL_CST:
402 /* We want to canonicalize to positive real constants. Pretend
403 that only negative ones can be easily negated. */
404 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
405
406 case COMPLEX_CST:
407 return negate_expr_p (TREE_REALPART (t))
408 && negate_expr_p (TREE_IMAGPART (t));
409
410 case VECTOR_CST:
411 {
412 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
413 return true;
414
415 /* Steps don't prevent negation. */
416 unsigned int count = vector_cst_encoded_nelts (t);
417 for (unsigned int i = 0; i < count; ++i)
418 if (!negate_expr_p (VECTOR_CST_ENCODED_ELT (t, i)))
419 return false;
420
421 return true;
422 }
423
424 case COMPLEX_EXPR:
425 return negate_expr_p (TREE_OPERAND (t, 0))
426 && negate_expr_p (TREE_OPERAND (t, 1));
427
428 case CONJ_EXPR:
429 return negate_expr_p (TREE_OPERAND (t, 0));
430
431 case PLUS_EXPR:
432 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
433 || HONOR_SIGNED_ZEROS (element_mode (type))
434 || (ANY_INTEGRAL_TYPE_P (type)
435 && ! TYPE_OVERFLOW_WRAPS (type)))
436 return false;
437 /* -(A + B) -> (-B) - A. */
438 if (negate_expr_p (TREE_OPERAND (t, 1)))
439 return true;
440 /* -(A + B) -> (-A) - B. */
441 return negate_expr_p (TREE_OPERAND (t, 0));
442
443 case MINUS_EXPR:
444 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
445 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
446 && !HONOR_SIGNED_ZEROS (element_mode (type))
447 && (! ANY_INTEGRAL_TYPE_P (type)
448 || TYPE_OVERFLOW_WRAPS (type));
449
450 case MULT_EXPR:
451 if (TYPE_UNSIGNED (type))
452 break;
453 /* INT_MIN/n * n doesn't overflow while negating one operand it does
454 if n is a (negative) power of two. */
455 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
456 && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
457 && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
458 && (wi::popcount
459 (wi::abs (wi::to_wide (TREE_OPERAND (t, 0))))) != 1)
460 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
461 && (wi::popcount
462 (wi::abs (wi::to_wide (TREE_OPERAND (t, 1))))) != 1)))
463 break;
464
465 /* Fall through. */
466
467 case RDIV_EXPR:
468 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
469 return negate_expr_p (TREE_OPERAND (t, 1))
470 || negate_expr_p (TREE_OPERAND (t, 0));
471 break;
472
473 case TRUNC_DIV_EXPR:
474 case ROUND_DIV_EXPR:
475 case EXACT_DIV_EXPR:
476 if (TYPE_UNSIGNED (type))
477 break;
478 /* In general we can't negate A in A / B, because if A is INT_MIN and
479 B is not 1 we change the sign of the result. */
480 if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
481 && negate_expr_p (TREE_OPERAND (t, 0)))
482 return true;
483 /* In general we can't negate B in A / B, because if A is INT_MIN and
484 B is 1, we may turn this into INT_MIN / -1 which is undefined
485 and actually traps on some architectures. */
486 if (! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
487 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
488 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
489 && ! integer_onep (TREE_OPERAND (t, 1))))
490 return negate_expr_p (TREE_OPERAND (t, 1));
491 break;
492
493 case NOP_EXPR:
494 /* Negate -((double)float) as (double)(-float). */
495 if (TREE_CODE (type) == REAL_TYPE)
496 {
497 tree tem = strip_float_extensions (t);
498 if (tem != t)
499 return negate_expr_p (tem);
500 }
501 break;
502
503 case CALL_EXPR:
504 /* Negate -f(x) as f(-x). */
505 if (negate_mathfn_p (get_call_combined_fn (t)))
506 return negate_expr_p (CALL_EXPR_ARG (t, 0));
507 break;
508
509 case RSHIFT_EXPR:
510 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
511 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
512 {
513 tree op1 = TREE_OPERAND (t, 1);
514 if (wi::to_wide (op1) == TYPE_PRECISION (type) - 1)
515 return true;
516 }
517 break;
518
519 default:
520 break;
521 }
522 return false;
523 }
524
525 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
526 simplification is possible.
527 If negate_expr_p would return true for T, NULL_TREE will never be
528 returned. */
529
530 static tree
531 fold_negate_expr_1 (location_t loc, tree t)
532 {
533 tree type = TREE_TYPE (t);
534 tree tem;
535
536 switch (TREE_CODE (t))
537 {
538 /* Convert - (~A) to A + 1. */
539 case BIT_NOT_EXPR:
540 if (INTEGRAL_TYPE_P (type))
541 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
542 build_one_cst (type));
543 break;
544
545 case INTEGER_CST:
546 tem = fold_negate_const (t, type);
547 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
548 || (ANY_INTEGRAL_TYPE_P (type)
549 && !TYPE_OVERFLOW_TRAPS (type)
550 && TYPE_OVERFLOW_WRAPS (type))
551 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
552 return tem;
553 break;
554
555 case POLY_INT_CST:
556 case REAL_CST:
557 case FIXED_CST:
558 tem = fold_negate_const (t, type);
559 return tem;
560
561 case COMPLEX_CST:
562 {
563 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
564 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
565 if (rpart && ipart)
566 return build_complex (type, rpart, ipart);
567 }
568 break;
569
570 case VECTOR_CST:
571 {
572 tree_vector_builder elts;
573 elts.new_unary_operation (type, t, true);
574 unsigned int count = elts.encoded_nelts ();
575 for (unsigned int i = 0; i < count; ++i)
576 {
577 tree elt = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
578 if (elt == NULL_TREE)
579 return NULL_TREE;
580 elts.quick_push (elt);
581 }
582
583 return elts.build ();
584 }
585
586 case COMPLEX_EXPR:
587 if (negate_expr_p (t))
588 return fold_build2_loc (loc, COMPLEX_EXPR, type,
589 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
590 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
591 break;
592
593 case CONJ_EXPR:
594 if (negate_expr_p (t))
595 return fold_build1_loc (loc, CONJ_EXPR, type,
596 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
597 break;
598
599 case NEGATE_EXPR:
600 if (!TYPE_OVERFLOW_SANITIZED (type))
601 return TREE_OPERAND (t, 0);
602 break;
603
604 case PLUS_EXPR:
605 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
606 && !HONOR_SIGNED_ZEROS (element_mode (type)))
607 {
608 /* -(A + B) -> (-B) - A. */
609 if (negate_expr_p (TREE_OPERAND (t, 1)))
610 {
611 tem = negate_expr (TREE_OPERAND (t, 1));
612 return fold_build2_loc (loc, MINUS_EXPR, type,
613 tem, TREE_OPERAND (t, 0));
614 }
615
616 /* -(A + B) -> (-A) - B. */
617 if (negate_expr_p (TREE_OPERAND (t, 0)))
618 {
619 tem = negate_expr (TREE_OPERAND (t, 0));
620 return fold_build2_loc (loc, MINUS_EXPR, type,
621 tem, TREE_OPERAND (t, 1));
622 }
623 }
624 break;
625
626 case MINUS_EXPR:
627 /* - (A - B) -> B - A */
628 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
629 && !HONOR_SIGNED_ZEROS (element_mode (type)))
630 return fold_build2_loc (loc, MINUS_EXPR, type,
631 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
632 break;
633
634 case MULT_EXPR:
635 if (TYPE_UNSIGNED (type))
636 break;
637
638 /* Fall through. */
639
640 case RDIV_EXPR:
641 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
642 {
643 tem = TREE_OPERAND (t, 1);
644 if (negate_expr_p (tem))
645 return fold_build2_loc (loc, TREE_CODE (t), type,
646 TREE_OPERAND (t, 0), negate_expr (tem));
647 tem = TREE_OPERAND (t, 0);
648 if (negate_expr_p (tem))
649 return fold_build2_loc (loc, TREE_CODE (t), type,
650 negate_expr (tem), TREE_OPERAND (t, 1));
651 }
652 break;
653
654 case TRUNC_DIV_EXPR:
655 case ROUND_DIV_EXPR:
656 case EXACT_DIV_EXPR:
657 if (TYPE_UNSIGNED (type))
658 break;
659 /* In general we can't negate A in A / B, because if A is INT_MIN and
660 B is not 1 we change the sign of the result. */
661 if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
662 && negate_expr_p (TREE_OPERAND (t, 0)))
663 return fold_build2_loc (loc, TREE_CODE (t), type,
664 negate_expr (TREE_OPERAND (t, 0)),
665 TREE_OPERAND (t, 1));
666 /* In general we can't negate B in A / B, because if A is INT_MIN and
667 B is 1, we may turn this into INT_MIN / -1 which is undefined
668 and actually traps on some architectures. */
669 if ((! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
670 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
671 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
672 && ! integer_onep (TREE_OPERAND (t, 1))))
673 && negate_expr_p (TREE_OPERAND (t, 1)))
674 return fold_build2_loc (loc, TREE_CODE (t), type,
675 TREE_OPERAND (t, 0),
676 negate_expr (TREE_OPERAND (t, 1)));
677 break;
678
679 case NOP_EXPR:
680 /* Convert -((double)float) into (double)(-float). */
681 if (TREE_CODE (type) == REAL_TYPE)
682 {
683 tem = strip_float_extensions (t);
684 if (tem != t && negate_expr_p (tem))
685 return fold_convert_loc (loc, type, negate_expr (tem));
686 }
687 break;
688
689 case CALL_EXPR:
690 /* Negate -f(x) as f(-x). */
691 if (negate_mathfn_p (get_call_combined_fn (t))
692 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
693 {
694 tree fndecl, arg;
695
696 fndecl = get_callee_fndecl (t);
697 arg = negate_expr (CALL_EXPR_ARG (t, 0));
698 return build_call_expr_loc (loc, fndecl, 1, arg);
699 }
700 break;
701
702 case RSHIFT_EXPR:
703 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
704 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
705 {
706 tree op1 = TREE_OPERAND (t, 1);
707 if (wi::to_wide (op1) == TYPE_PRECISION (type) - 1)
708 {
709 tree ntype = TYPE_UNSIGNED (type)
710 ? signed_type_for (type)
711 : unsigned_type_for (type);
712 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
713 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
714 return fold_convert_loc (loc, type, temp);
715 }
716 }
717 break;
718
719 default:
720 break;
721 }
722
723 return NULL_TREE;
724 }
725
726 /* A wrapper for fold_negate_expr_1. */
727
728 static tree
729 fold_negate_expr (location_t loc, tree t)
730 {
731 tree type = TREE_TYPE (t);
732 STRIP_SIGN_NOPS (t);
733 tree tem = fold_negate_expr_1 (loc, t);
734 if (tem == NULL_TREE)
735 return NULL_TREE;
736 return fold_convert_loc (loc, type, tem);
737 }
738
739 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T cannot be
740 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
741 return NULL_TREE. */
742
743 static tree
744 negate_expr (tree t)
745 {
746 tree type, tem;
747 location_t loc;
748
749 if (t == NULL_TREE)
750 return NULL_TREE;
751
752 loc = EXPR_LOCATION (t);
753 type = TREE_TYPE (t);
754 STRIP_SIGN_NOPS (t);
755
756 tem = fold_negate_expr (loc, t);
757 if (!tem)
758 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
759 return fold_convert_loc (loc, type, tem);
760 }
761 \f
762 /* Split a tree IN into a constant, literal and variable parts that could be
763 combined with CODE to make IN. "constant" means an expression with
764 TREE_CONSTANT but that isn't an actual constant. CODE must be a
765 commutative arithmetic operation. Store the constant part into *CONP,
766 the literal in *LITP and return the variable part. If a part isn't
767 present, set it to null. If the tree does not decompose in this way,
768 return the entire tree as the variable part and the other parts as null.
769
770 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
771 case, we negate an operand that was subtracted. Except if it is a
772 literal for which we use *MINUS_LITP instead.
773
774 If NEGATE_P is true, we are negating all of IN, again except a literal
775 for which we use *MINUS_LITP instead. If a variable part is of pointer
776 type, it is negated after converting to TYPE. This prevents us from
777 generating illegal MINUS pointer expression. LOC is the location of
778 the converted variable part.
779
780 If IN is itself a literal or constant, return it as appropriate.
781
782 Note that we do not guarantee that any of the three values will be the
783 same type as IN, but they will have the same signedness and mode. */
784
785 static tree
786 split_tree (tree in, tree type, enum tree_code code,
787 tree *minus_varp, tree *conp, tree *minus_conp,
788 tree *litp, tree *minus_litp, int negate_p)
789 {
790 tree var = 0;
791 *minus_varp = 0;
792 *conp = 0;
793 *minus_conp = 0;
794 *litp = 0;
795 *minus_litp = 0;
796
797 /* Strip any conversions that don't change the machine mode or signedness. */
798 STRIP_SIGN_NOPS (in);
799
800 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
801 || TREE_CODE (in) == FIXED_CST)
802 *litp = in;
803 else if (TREE_CODE (in) == code
804 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
805 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
806 /* We can associate addition and subtraction together (even
807 though the C standard doesn't say so) for integers because
808 the value is not affected. For reals, the value might be
809 affected, so we can't. */
810 && ((code == PLUS_EXPR && TREE_CODE (in) == POINTER_PLUS_EXPR)
811 || (code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
812 || (code == MINUS_EXPR
813 && (TREE_CODE (in) == PLUS_EXPR
814 || TREE_CODE (in) == POINTER_PLUS_EXPR)))))
815 {
816 tree op0 = TREE_OPERAND (in, 0);
817 tree op1 = TREE_OPERAND (in, 1);
818 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
819 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
820
821 /* First see if either of the operands is a literal, then a constant. */
822 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
823 || TREE_CODE (op0) == FIXED_CST)
824 *litp = op0, op0 = 0;
825 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
826 || TREE_CODE (op1) == FIXED_CST)
827 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
828
829 if (op0 != 0 && TREE_CONSTANT (op0))
830 *conp = op0, op0 = 0;
831 else if (op1 != 0 && TREE_CONSTANT (op1))
832 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
833
834 /* If we haven't dealt with either operand, this is not a case we can
835 decompose. Otherwise, VAR is either of the ones remaining, if any. */
836 if (op0 != 0 && op1 != 0)
837 var = in;
838 else if (op0 != 0)
839 var = op0;
840 else
841 var = op1, neg_var_p = neg1_p;
842
843 /* Now do any needed negations. */
844 if (neg_litp_p)
845 *minus_litp = *litp, *litp = 0;
846 if (neg_conp_p && *conp)
847 *minus_conp = *conp, *conp = 0;
848 if (neg_var_p && var)
849 *minus_varp = var, var = 0;
850 }
851 else if (TREE_CONSTANT (in))
852 *conp = in;
853 else if (TREE_CODE (in) == BIT_NOT_EXPR
854 && code == PLUS_EXPR)
855 {
856 /* -1 - X is folded to ~X, undo that here. Do _not_ do this
857 when IN is constant. */
858 *litp = build_minus_one_cst (type);
859 *minus_varp = TREE_OPERAND (in, 0);
860 }
861 else
862 var = in;
863
864 if (negate_p)
865 {
866 if (*litp)
867 *minus_litp = *litp, *litp = 0;
868 else if (*minus_litp)
869 *litp = *minus_litp, *minus_litp = 0;
870 if (*conp)
871 *minus_conp = *conp, *conp = 0;
872 else if (*minus_conp)
873 *conp = *minus_conp, *minus_conp = 0;
874 if (var)
875 *minus_varp = var, var = 0;
876 else if (*minus_varp)
877 var = *minus_varp, *minus_varp = 0;
878 }
879
880 if (*litp
881 && TREE_OVERFLOW_P (*litp))
882 *litp = drop_tree_overflow (*litp);
883 if (*minus_litp
884 && TREE_OVERFLOW_P (*minus_litp))
885 *minus_litp = drop_tree_overflow (*minus_litp);
886
887 return var;
888 }
889
890 /* Re-associate trees split by the above function. T1 and T2 are
891 either expressions to associate or null. Return the new
892 expression, if any. LOC is the location of the new expression. If
893 we build an operation, do it in TYPE and with CODE. */
894
895 static tree
896 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
897 {
898 if (t1 == 0)
899 {
900 gcc_assert (t2 == 0 || code != MINUS_EXPR);
901 return t2;
902 }
903 else if (t2 == 0)
904 return t1;
905
906 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
907 try to fold this since we will have infinite recursion. But do
908 deal with any NEGATE_EXPRs. */
909 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
910 || TREE_CODE (t1) == PLUS_EXPR || TREE_CODE (t2) == PLUS_EXPR
911 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
912 {
913 if (code == PLUS_EXPR)
914 {
915 if (TREE_CODE (t1) == NEGATE_EXPR)
916 return build2_loc (loc, MINUS_EXPR, type,
917 fold_convert_loc (loc, type, t2),
918 fold_convert_loc (loc, type,
919 TREE_OPERAND (t1, 0)));
920 else if (TREE_CODE (t2) == NEGATE_EXPR)
921 return build2_loc (loc, MINUS_EXPR, type,
922 fold_convert_loc (loc, type, t1),
923 fold_convert_loc (loc, type,
924 TREE_OPERAND (t2, 0)));
925 else if (integer_zerop (t2))
926 return fold_convert_loc (loc, type, t1);
927 }
928 else if (code == MINUS_EXPR)
929 {
930 if (integer_zerop (t2))
931 return fold_convert_loc (loc, type, t1);
932 }
933
934 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
935 fold_convert_loc (loc, type, t2));
936 }
937
938 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
939 fold_convert_loc (loc, type, t2));
940 }
941 \f
942 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
943 for use in int_const_binop, size_binop and size_diffop. */
944
945 static bool
946 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
947 {
948 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
949 return false;
950 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
951 return false;
952
953 switch (code)
954 {
955 case LSHIFT_EXPR:
956 case RSHIFT_EXPR:
957 case LROTATE_EXPR:
958 case RROTATE_EXPR:
959 return true;
960
961 default:
962 break;
963 }
964
965 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
966 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
967 && TYPE_MODE (type1) == TYPE_MODE (type2);
968 }
969
970 /* Combine two wide ints ARG1 and ARG2 under operation CODE to produce
971 a new constant in RES. Return FALSE if we don't know how to
972 evaluate CODE at compile-time. */
973
974 bool
975 wide_int_binop (wide_int &res,
976 enum tree_code code, const wide_int &arg1, const wide_int &arg2,
977 signop sign, wi::overflow_type *overflow)
978 {
979 wide_int tmp;
980 *overflow = wi::OVF_NONE;
981 switch (code)
982 {
983 case BIT_IOR_EXPR:
984 res = wi::bit_or (arg1, arg2);
985 break;
986
987 case BIT_XOR_EXPR:
988 res = wi::bit_xor (arg1, arg2);
989 break;
990
991 case BIT_AND_EXPR:
992 res = wi::bit_and (arg1, arg2);
993 break;
994
995 case RSHIFT_EXPR:
996 case LSHIFT_EXPR:
997 if (wi::neg_p (arg2))
998 {
999 tmp = -arg2;
1000 if (code == RSHIFT_EXPR)
1001 code = LSHIFT_EXPR;
1002 else
1003 code = RSHIFT_EXPR;
1004 }
1005 else
1006 tmp = arg2;
1007
1008 if (code == RSHIFT_EXPR)
1009 /* It's unclear from the C standard whether shifts can overflow.
1010 The following code ignores overflow; perhaps a C standard
1011 interpretation ruling is needed. */
1012 res = wi::rshift (arg1, tmp, sign);
1013 else
1014 res = wi::lshift (arg1, tmp);
1015 break;
1016
1017 case RROTATE_EXPR:
1018 case LROTATE_EXPR:
1019 if (wi::neg_p (arg2))
1020 {
1021 tmp = -arg2;
1022 if (code == RROTATE_EXPR)
1023 code = LROTATE_EXPR;
1024 else
1025 code = RROTATE_EXPR;
1026 }
1027 else
1028 tmp = arg2;
1029
1030 if (code == RROTATE_EXPR)
1031 res = wi::rrotate (arg1, tmp);
1032 else
1033 res = wi::lrotate (arg1, tmp);
1034 break;
1035
1036 case PLUS_EXPR:
1037 res = wi::add (arg1, arg2, sign, overflow);
1038 break;
1039
1040 case MINUS_EXPR:
1041 res = wi::sub (arg1, arg2, sign, overflow);
1042 break;
1043
1044 case MULT_EXPR:
1045 res = wi::mul (arg1, arg2, sign, overflow);
1046 break;
1047
1048 case MULT_HIGHPART_EXPR:
1049 res = wi::mul_high (arg1, arg2, sign);
1050 break;
1051
1052 case TRUNC_DIV_EXPR:
1053 case EXACT_DIV_EXPR:
1054 if (arg2 == 0)
1055 return false;
1056 res = wi::div_trunc (arg1, arg2, sign, overflow);
1057 break;
1058
1059 case FLOOR_DIV_EXPR:
1060 if (arg2 == 0)
1061 return false;
1062 res = wi::div_floor (arg1, arg2, sign, overflow);
1063 break;
1064
1065 case CEIL_DIV_EXPR:
1066 if (arg2 == 0)
1067 return false;
1068 res = wi::div_ceil (arg1, arg2, sign, overflow);
1069 break;
1070
1071 case ROUND_DIV_EXPR:
1072 if (arg2 == 0)
1073 return false;
1074 res = wi::div_round (arg1, arg2, sign, overflow);
1075 break;
1076
1077 case TRUNC_MOD_EXPR:
1078 if (arg2 == 0)
1079 return false;
1080 res = wi::mod_trunc (arg1, arg2, sign, overflow);
1081 break;
1082
1083 case FLOOR_MOD_EXPR:
1084 if (arg2 == 0)
1085 return false;
1086 res = wi::mod_floor (arg1, arg2, sign, overflow);
1087 break;
1088
1089 case CEIL_MOD_EXPR:
1090 if (arg2 == 0)
1091 return false;
1092 res = wi::mod_ceil (arg1, arg2, sign, overflow);
1093 break;
1094
1095 case ROUND_MOD_EXPR:
1096 if (arg2 == 0)
1097 return false;
1098 res = wi::mod_round (arg1, arg2, sign, overflow);
1099 break;
1100
1101 case MIN_EXPR:
1102 res = wi::min (arg1, arg2, sign);
1103 break;
1104
1105 case MAX_EXPR:
1106 res = wi::max (arg1, arg2, sign);
1107 break;
1108
1109 default:
1110 return false;
1111 }
1112 return true;
1113 }
1114
1115 /* Combine two poly int's ARG1 and ARG2 under operation CODE to
1116 produce a new constant in RES. Return FALSE if we don't know how
1117 to evaluate CODE at compile-time. */
1118
1119 static bool
1120 poly_int_binop (poly_wide_int &res, enum tree_code code,
1121 const_tree arg1, const_tree arg2,
1122 signop sign, wi::overflow_type *overflow)
1123 {
1124 gcc_assert (NUM_POLY_INT_COEFFS != 1);
1125 gcc_assert (poly_int_tree_p (arg1) && poly_int_tree_p (arg2));
1126 switch (code)
1127 {
1128 case PLUS_EXPR:
1129 res = wi::add (wi::to_poly_wide (arg1),
1130 wi::to_poly_wide (arg2), sign, overflow);
1131 break;
1132
1133 case MINUS_EXPR:
1134 res = wi::sub (wi::to_poly_wide (arg1),
1135 wi::to_poly_wide (arg2), sign, overflow);
1136 break;
1137
1138 case MULT_EXPR:
1139 if (TREE_CODE (arg2) == INTEGER_CST)
1140 res = wi::mul (wi::to_poly_wide (arg1),
1141 wi::to_wide (arg2), sign, overflow);
1142 else if (TREE_CODE (arg1) == INTEGER_CST)
1143 res = wi::mul (wi::to_poly_wide (arg2),
1144 wi::to_wide (arg1), sign, overflow);
1145 else
1146 return NULL_TREE;
1147 break;
1148
1149 case LSHIFT_EXPR:
1150 if (TREE_CODE (arg2) == INTEGER_CST)
1151 res = wi::to_poly_wide (arg1) << wi::to_wide (arg2);
1152 else
1153 return false;
1154 break;
1155
1156 case BIT_IOR_EXPR:
1157 if (TREE_CODE (arg2) != INTEGER_CST
1158 || !can_ior_p (wi::to_poly_wide (arg1), wi::to_wide (arg2),
1159 &res))
1160 return false;
1161 break;
1162
1163 default:
1164 return false;
1165 }
1166 return true;
1167 }
1168
1169 /* Combine two integer constants ARG1 and ARG2 under operation CODE to
1170 produce a new constant. Return NULL_TREE if we don't know how to
1171 evaluate CODE at compile-time. */
1172
1173 tree
1174 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2,
1175 int overflowable)
1176 {
1177 poly_wide_int poly_res;
1178 tree type = TREE_TYPE (arg1);
1179 signop sign = TYPE_SIGN (type);
1180 wi::overflow_type overflow = wi::OVF_NONE;
1181
1182 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1183 {
1184 wide_int warg1 = wi::to_wide (arg1), res;
1185 wide_int warg2 = wi::to_wide (arg2, TYPE_PRECISION (type));
1186 if (!wide_int_binop (res, code, warg1, warg2, sign, &overflow))
1187 return NULL_TREE;
1188 poly_res = res;
1189 }
1190 else if (!poly_int_tree_p (arg1)
1191 || !poly_int_tree_p (arg2)
1192 || !poly_int_binop (poly_res, code, arg1, arg2, sign, &overflow))
1193 return NULL_TREE;
1194 return force_fit_type (type, poly_res, overflowable,
1195 (((sign == SIGNED || overflowable == -1)
1196 && overflow)
1197 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2)));
1198 }
1199
1200 /* Return true if binary operation OP distributes over addition in operand
1201 OPNO, with the other operand being held constant. OPNO counts from 1. */
1202
1203 static bool
1204 distributes_over_addition_p (tree_code op, int opno)
1205 {
1206 switch (op)
1207 {
1208 case PLUS_EXPR:
1209 case MINUS_EXPR:
1210 case MULT_EXPR:
1211 return true;
1212
1213 case LSHIFT_EXPR:
1214 return opno == 1;
1215
1216 default:
1217 return false;
1218 }
1219 }
1220
1221 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1222 constant. We assume ARG1 and ARG2 have the same data type, or at least
1223 are the same kind of constant and the same machine mode. Return zero if
1224 combining the constants is not allowed in the current operating mode. */
1225
1226 static tree
1227 const_binop (enum tree_code code, tree arg1, tree arg2)
1228 {
1229 /* Sanity check for the recursive cases. */
1230 if (!arg1 || !arg2)
1231 return NULL_TREE;
1232
1233 STRIP_NOPS (arg1);
1234 STRIP_NOPS (arg2);
1235
1236 if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1237 {
1238 if (code == POINTER_PLUS_EXPR)
1239 return int_const_binop (PLUS_EXPR,
1240 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1241
1242 return int_const_binop (code, arg1, arg2);
1243 }
1244
1245 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1246 {
1247 machine_mode mode;
1248 REAL_VALUE_TYPE d1;
1249 REAL_VALUE_TYPE d2;
1250 REAL_VALUE_TYPE value;
1251 REAL_VALUE_TYPE result;
1252 bool inexact;
1253 tree t, type;
1254
1255 /* The following codes are handled by real_arithmetic. */
1256 switch (code)
1257 {
1258 case PLUS_EXPR:
1259 case MINUS_EXPR:
1260 case MULT_EXPR:
1261 case RDIV_EXPR:
1262 case MIN_EXPR:
1263 case MAX_EXPR:
1264 break;
1265
1266 default:
1267 return NULL_TREE;
1268 }
1269
1270 d1 = TREE_REAL_CST (arg1);
1271 d2 = TREE_REAL_CST (arg2);
1272
1273 type = TREE_TYPE (arg1);
1274 mode = TYPE_MODE (type);
1275
1276 /* Don't perform operation if we honor signaling NaNs and
1277 either operand is a signaling NaN. */
1278 if (HONOR_SNANS (mode)
1279 && (REAL_VALUE_ISSIGNALING_NAN (d1)
1280 || REAL_VALUE_ISSIGNALING_NAN (d2)))
1281 return NULL_TREE;
1282
1283 /* Don't perform operation if it would raise a division
1284 by zero exception. */
1285 if (code == RDIV_EXPR
1286 && real_equal (&d2, &dconst0)
1287 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1288 return NULL_TREE;
1289
1290 /* If either operand is a NaN, just return it. Otherwise, set up
1291 for floating-point trap; we return an overflow. */
1292 if (REAL_VALUE_ISNAN (d1))
1293 {
1294 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1295 is off. */
1296 d1.signalling = 0;
1297 t = build_real (type, d1);
1298 return t;
1299 }
1300 else if (REAL_VALUE_ISNAN (d2))
1301 {
1302 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1303 is off. */
1304 d2.signalling = 0;
1305 t = build_real (type, d2);
1306 return t;
1307 }
1308
1309 inexact = real_arithmetic (&value, code, &d1, &d2);
1310 real_convert (&result, mode, &value);
1311
1312 /* Don't constant fold this floating point operation if
1313 the result has overflowed and flag_trapping_math. */
1314 if (flag_trapping_math
1315 && MODE_HAS_INFINITIES (mode)
1316 && REAL_VALUE_ISINF (result)
1317 && !REAL_VALUE_ISINF (d1)
1318 && !REAL_VALUE_ISINF (d2))
1319 return NULL_TREE;
1320
1321 /* Don't constant fold this floating point operation if the
1322 result may dependent upon the run-time rounding mode and
1323 flag_rounding_math is set, or if GCC's software emulation
1324 is unable to accurately represent the result. */
1325 if ((flag_rounding_math
1326 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1327 && (inexact || !real_identical (&result, &value)))
1328 return NULL_TREE;
1329
1330 t = build_real (type, result);
1331
1332 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1333 return t;
1334 }
1335
1336 if (TREE_CODE (arg1) == FIXED_CST)
1337 {
1338 FIXED_VALUE_TYPE f1;
1339 FIXED_VALUE_TYPE f2;
1340 FIXED_VALUE_TYPE result;
1341 tree t, type;
1342 int sat_p;
1343 bool overflow_p;
1344
1345 /* The following codes are handled by fixed_arithmetic. */
1346 switch (code)
1347 {
1348 case PLUS_EXPR:
1349 case MINUS_EXPR:
1350 case MULT_EXPR:
1351 case TRUNC_DIV_EXPR:
1352 if (TREE_CODE (arg2) != FIXED_CST)
1353 return NULL_TREE;
1354 f2 = TREE_FIXED_CST (arg2);
1355 break;
1356
1357 case LSHIFT_EXPR:
1358 case RSHIFT_EXPR:
1359 {
1360 if (TREE_CODE (arg2) != INTEGER_CST)
1361 return NULL_TREE;
1362 wi::tree_to_wide_ref w2 = wi::to_wide (arg2);
1363 f2.data.high = w2.elt (1);
1364 f2.data.low = w2.ulow ();
1365 f2.mode = SImode;
1366 }
1367 break;
1368
1369 default:
1370 return NULL_TREE;
1371 }
1372
1373 f1 = TREE_FIXED_CST (arg1);
1374 type = TREE_TYPE (arg1);
1375 sat_p = TYPE_SATURATING (type);
1376 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1377 t = build_fixed (type, result);
1378 /* Propagate overflow flags. */
1379 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1380 TREE_OVERFLOW (t) = 1;
1381 return t;
1382 }
1383
1384 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1385 {
1386 tree type = TREE_TYPE (arg1);
1387 tree r1 = TREE_REALPART (arg1);
1388 tree i1 = TREE_IMAGPART (arg1);
1389 tree r2 = TREE_REALPART (arg2);
1390 tree i2 = TREE_IMAGPART (arg2);
1391 tree real, imag;
1392
1393 switch (code)
1394 {
1395 case PLUS_EXPR:
1396 case MINUS_EXPR:
1397 real = const_binop (code, r1, r2);
1398 imag = const_binop (code, i1, i2);
1399 break;
1400
1401 case MULT_EXPR:
1402 if (COMPLEX_FLOAT_TYPE_P (type))
1403 return do_mpc_arg2 (arg1, arg2, type,
1404 /* do_nonfinite= */ folding_initializer,
1405 mpc_mul);
1406
1407 real = const_binop (MINUS_EXPR,
1408 const_binop (MULT_EXPR, r1, r2),
1409 const_binop (MULT_EXPR, i1, i2));
1410 imag = const_binop (PLUS_EXPR,
1411 const_binop (MULT_EXPR, r1, i2),
1412 const_binop (MULT_EXPR, i1, r2));
1413 break;
1414
1415 case RDIV_EXPR:
1416 if (COMPLEX_FLOAT_TYPE_P (type))
1417 return do_mpc_arg2 (arg1, arg2, type,
1418 /* do_nonfinite= */ folding_initializer,
1419 mpc_div);
1420 /* Fallthru. */
1421 case TRUNC_DIV_EXPR:
1422 case CEIL_DIV_EXPR:
1423 case FLOOR_DIV_EXPR:
1424 case ROUND_DIV_EXPR:
1425 if (flag_complex_method == 0)
1426 {
1427 /* Keep this algorithm in sync with
1428 tree-complex.c:expand_complex_div_straight().
1429
1430 Expand complex division to scalars, straightforward algorithm.
1431 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1432 t = br*br + bi*bi
1433 */
1434 tree magsquared
1435 = const_binop (PLUS_EXPR,
1436 const_binop (MULT_EXPR, r2, r2),
1437 const_binop (MULT_EXPR, i2, i2));
1438 tree t1
1439 = const_binop (PLUS_EXPR,
1440 const_binop (MULT_EXPR, r1, r2),
1441 const_binop (MULT_EXPR, i1, i2));
1442 tree t2
1443 = const_binop (MINUS_EXPR,
1444 const_binop (MULT_EXPR, i1, r2),
1445 const_binop (MULT_EXPR, r1, i2));
1446
1447 real = const_binop (code, t1, magsquared);
1448 imag = const_binop (code, t2, magsquared);
1449 }
1450 else
1451 {
1452 /* Keep this algorithm in sync with
1453 tree-complex.c:expand_complex_div_wide().
1454
1455 Expand complex division to scalars, modified algorithm to minimize
1456 overflow with wide input ranges. */
1457 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1458 fold_abs_const (r2, TREE_TYPE (type)),
1459 fold_abs_const (i2, TREE_TYPE (type)));
1460
1461 if (integer_nonzerop (compare))
1462 {
1463 /* In the TRUE branch, we compute
1464 ratio = br/bi;
1465 div = (br * ratio) + bi;
1466 tr = (ar * ratio) + ai;
1467 ti = (ai * ratio) - ar;
1468 tr = tr / div;
1469 ti = ti / div; */
1470 tree ratio = const_binop (code, r2, i2);
1471 tree div = const_binop (PLUS_EXPR, i2,
1472 const_binop (MULT_EXPR, r2, ratio));
1473 real = const_binop (MULT_EXPR, r1, ratio);
1474 real = const_binop (PLUS_EXPR, real, i1);
1475 real = const_binop (code, real, div);
1476
1477 imag = const_binop (MULT_EXPR, i1, ratio);
1478 imag = const_binop (MINUS_EXPR, imag, r1);
1479 imag = const_binop (code, imag, div);
1480 }
1481 else
1482 {
1483 /* In the FALSE branch, we compute
1484 ratio = d/c;
1485 divisor = (d * ratio) + c;
1486 tr = (b * ratio) + a;
1487 ti = b - (a * ratio);
1488 tr = tr / div;
1489 ti = ti / div; */
1490 tree ratio = const_binop (code, i2, r2);
1491 tree div = const_binop (PLUS_EXPR, r2,
1492 const_binop (MULT_EXPR, i2, ratio));
1493
1494 real = const_binop (MULT_EXPR, i1, ratio);
1495 real = const_binop (PLUS_EXPR, real, r1);
1496 real = const_binop (code, real, div);
1497
1498 imag = const_binop (MULT_EXPR, r1, ratio);
1499 imag = const_binop (MINUS_EXPR, i1, imag);
1500 imag = const_binop (code, imag, div);
1501 }
1502 }
1503 break;
1504
1505 default:
1506 return NULL_TREE;
1507 }
1508
1509 if (real && imag)
1510 return build_complex (type, real, imag);
1511 }
1512
1513 if (TREE_CODE (arg1) == VECTOR_CST
1514 && TREE_CODE (arg2) == VECTOR_CST
1515 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)),
1516 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2))))
1517 {
1518 tree type = TREE_TYPE (arg1);
1519 bool step_ok_p;
1520 if (VECTOR_CST_STEPPED_P (arg1)
1521 && VECTOR_CST_STEPPED_P (arg2))
1522 /* We can operate directly on the encoding if:
1523
1524 a3 - a2 == a2 - a1 && b3 - b2 == b2 - b1
1525 implies
1526 (a3 op b3) - (a2 op b2) == (a2 op b2) - (a1 op b1)
1527
1528 Addition and subtraction are the supported operators
1529 for which this is true. */
1530 step_ok_p = (code == PLUS_EXPR || code == MINUS_EXPR);
1531 else if (VECTOR_CST_STEPPED_P (arg1))
1532 /* We can operate directly on stepped encodings if:
1533
1534 a3 - a2 == a2 - a1
1535 implies:
1536 (a3 op c) - (a2 op c) == (a2 op c) - (a1 op c)
1537
1538 which is true if (x -> x op c) distributes over addition. */
1539 step_ok_p = distributes_over_addition_p (code, 1);
1540 else
1541 /* Similarly in reverse. */
1542 step_ok_p = distributes_over_addition_p (code, 2);
1543 tree_vector_builder elts;
1544 if (!elts.new_binary_operation (type, arg1, arg2, step_ok_p))
1545 return NULL_TREE;
1546 unsigned int count = elts.encoded_nelts ();
1547 for (unsigned int i = 0; i < count; ++i)
1548 {
1549 tree elem1 = VECTOR_CST_ELT (arg1, i);
1550 tree elem2 = VECTOR_CST_ELT (arg2, i);
1551
1552 tree elt = const_binop (code, elem1, elem2);
1553
1554 /* It is possible that const_binop cannot handle the given
1555 code and return NULL_TREE */
1556 if (elt == NULL_TREE)
1557 return NULL_TREE;
1558 elts.quick_push (elt);
1559 }
1560
1561 return elts.build ();
1562 }
1563
1564 /* Shifts allow a scalar offset for a vector. */
1565 if (TREE_CODE (arg1) == VECTOR_CST
1566 && TREE_CODE (arg2) == INTEGER_CST)
1567 {
1568 tree type = TREE_TYPE (arg1);
1569 bool step_ok_p = distributes_over_addition_p (code, 1);
1570 tree_vector_builder elts;
1571 if (!elts.new_unary_operation (type, arg1, step_ok_p))
1572 return NULL_TREE;
1573 unsigned int count = elts.encoded_nelts ();
1574 for (unsigned int i = 0; i < count; ++i)
1575 {
1576 tree elem1 = VECTOR_CST_ELT (arg1, i);
1577
1578 tree elt = const_binop (code, elem1, arg2);
1579
1580 /* It is possible that const_binop cannot handle the given
1581 code and return NULL_TREE. */
1582 if (elt == NULL_TREE)
1583 return NULL_TREE;
1584 elts.quick_push (elt);
1585 }
1586
1587 return elts.build ();
1588 }
1589 return NULL_TREE;
1590 }
1591
1592 /* Overload that adds a TYPE parameter to be able to dispatch
1593 to fold_relational_const. */
1594
1595 tree
1596 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1597 {
1598 if (TREE_CODE_CLASS (code) == tcc_comparison)
1599 return fold_relational_const (code, type, arg1, arg2);
1600
1601 /* ??? Until we make the const_binop worker take the type of the
1602 result as argument put those cases that need it here. */
1603 switch (code)
1604 {
1605 case VEC_SERIES_EXPR:
1606 if (CONSTANT_CLASS_P (arg1)
1607 && CONSTANT_CLASS_P (arg2))
1608 return build_vec_series (type, arg1, arg2);
1609 return NULL_TREE;
1610
1611 case COMPLEX_EXPR:
1612 if ((TREE_CODE (arg1) == REAL_CST
1613 && TREE_CODE (arg2) == REAL_CST)
1614 || (TREE_CODE (arg1) == INTEGER_CST
1615 && TREE_CODE (arg2) == INTEGER_CST))
1616 return build_complex (type, arg1, arg2);
1617 return NULL_TREE;
1618
1619 case POINTER_DIFF_EXPR:
1620 if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1621 {
1622 poly_offset_int res = (wi::to_poly_offset (arg1)
1623 - wi::to_poly_offset (arg2));
1624 return force_fit_type (type, res, 1,
1625 TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1626 }
1627 return NULL_TREE;
1628
1629 case VEC_PACK_TRUNC_EXPR:
1630 case VEC_PACK_FIX_TRUNC_EXPR:
1631 case VEC_PACK_FLOAT_EXPR:
1632 {
1633 unsigned int HOST_WIDE_INT out_nelts, in_nelts, i;
1634
1635 if (TREE_CODE (arg1) != VECTOR_CST
1636 || TREE_CODE (arg2) != VECTOR_CST)
1637 return NULL_TREE;
1638
1639 if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1640 return NULL_TREE;
1641
1642 out_nelts = in_nelts * 2;
1643 gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1644 && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1645
1646 tree_vector_builder elts (type, out_nelts, 1);
1647 for (i = 0; i < out_nelts; i++)
1648 {
1649 tree elt = (i < in_nelts
1650 ? VECTOR_CST_ELT (arg1, i)
1651 : VECTOR_CST_ELT (arg2, i - in_nelts));
1652 elt = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1653 ? NOP_EXPR
1654 : code == VEC_PACK_FLOAT_EXPR
1655 ? FLOAT_EXPR : FIX_TRUNC_EXPR,
1656 TREE_TYPE (type), elt);
1657 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1658 return NULL_TREE;
1659 elts.quick_push (elt);
1660 }
1661
1662 return elts.build ();
1663 }
1664
1665 case VEC_WIDEN_MULT_LO_EXPR:
1666 case VEC_WIDEN_MULT_HI_EXPR:
1667 case VEC_WIDEN_MULT_EVEN_EXPR:
1668 case VEC_WIDEN_MULT_ODD_EXPR:
1669 {
1670 unsigned HOST_WIDE_INT out_nelts, in_nelts, out, ofs, scale;
1671
1672 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1673 return NULL_TREE;
1674
1675 if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1676 return NULL_TREE;
1677 out_nelts = in_nelts / 2;
1678 gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1679 && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1680
1681 if (code == VEC_WIDEN_MULT_LO_EXPR)
1682 scale = 0, ofs = BYTES_BIG_ENDIAN ? out_nelts : 0;
1683 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1684 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : out_nelts;
1685 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1686 scale = 1, ofs = 0;
1687 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1688 scale = 1, ofs = 1;
1689
1690 tree_vector_builder elts (type, out_nelts, 1);
1691 for (out = 0; out < out_nelts; out++)
1692 {
1693 unsigned int in = (out << scale) + ofs;
1694 tree t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1695 VECTOR_CST_ELT (arg1, in));
1696 tree t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1697 VECTOR_CST_ELT (arg2, in));
1698
1699 if (t1 == NULL_TREE || t2 == NULL_TREE)
1700 return NULL_TREE;
1701 tree elt = const_binop (MULT_EXPR, t1, t2);
1702 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1703 return NULL_TREE;
1704 elts.quick_push (elt);
1705 }
1706
1707 return elts.build ();
1708 }
1709
1710 default:;
1711 }
1712
1713 if (TREE_CODE_CLASS (code) != tcc_binary)
1714 return NULL_TREE;
1715
1716 /* Make sure type and arg0 have the same saturating flag. */
1717 gcc_checking_assert (TYPE_SATURATING (type)
1718 == TYPE_SATURATING (TREE_TYPE (arg1)));
1719
1720 return const_binop (code, arg1, arg2);
1721 }
1722
1723 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1724 Return zero if computing the constants is not possible. */
1725
1726 tree
1727 const_unop (enum tree_code code, tree type, tree arg0)
1728 {
1729 /* Don't perform the operation, other than NEGATE and ABS, if
1730 flag_signaling_nans is on and the operand is a signaling NaN. */
1731 if (TREE_CODE (arg0) == REAL_CST
1732 && HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
1733 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0))
1734 && code != NEGATE_EXPR
1735 && code != ABS_EXPR
1736 && code != ABSU_EXPR)
1737 return NULL_TREE;
1738
1739 switch (code)
1740 {
1741 CASE_CONVERT:
1742 case FLOAT_EXPR:
1743 case FIX_TRUNC_EXPR:
1744 case FIXED_CONVERT_EXPR:
1745 return fold_convert_const (code, type, arg0);
1746
1747 case ADDR_SPACE_CONVERT_EXPR:
1748 /* If the source address is 0, and the source address space
1749 cannot have a valid object at 0, fold to dest type null. */
1750 if (integer_zerop (arg0)
1751 && !(targetm.addr_space.zero_address_valid
1752 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))))))
1753 return fold_convert_const (code, type, arg0);
1754 break;
1755
1756 case VIEW_CONVERT_EXPR:
1757 return fold_view_convert_expr (type, arg0);
1758
1759 case NEGATE_EXPR:
1760 {
1761 /* Can't call fold_negate_const directly here as that doesn't
1762 handle all cases and we might not be able to negate some
1763 constants. */
1764 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1765 if (tem && CONSTANT_CLASS_P (tem))
1766 return tem;
1767 break;
1768 }
1769
1770 case ABS_EXPR:
1771 case ABSU_EXPR:
1772 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1773 return fold_abs_const (arg0, type);
1774 break;
1775
1776 case CONJ_EXPR:
1777 if (TREE_CODE (arg0) == COMPLEX_CST)
1778 {
1779 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1780 TREE_TYPE (type));
1781 return build_complex (type, TREE_REALPART (arg0), ipart);
1782 }
1783 break;
1784
1785 case BIT_NOT_EXPR:
1786 if (TREE_CODE (arg0) == INTEGER_CST)
1787 return fold_not_const (arg0, type);
1788 else if (POLY_INT_CST_P (arg0))
1789 return wide_int_to_tree (type, -poly_int_cst_value (arg0));
1790 /* Perform BIT_NOT_EXPR on each element individually. */
1791 else if (TREE_CODE (arg0) == VECTOR_CST)
1792 {
1793 tree elem;
1794
1795 /* This can cope with stepped encodings because ~x == -1 - x. */
1796 tree_vector_builder elements;
1797 elements.new_unary_operation (type, arg0, true);
1798 unsigned int i, count = elements.encoded_nelts ();
1799 for (i = 0; i < count; ++i)
1800 {
1801 elem = VECTOR_CST_ELT (arg0, i);
1802 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1803 if (elem == NULL_TREE)
1804 break;
1805 elements.quick_push (elem);
1806 }
1807 if (i == count)
1808 return elements.build ();
1809 }
1810 break;
1811
1812 case TRUTH_NOT_EXPR:
1813 if (TREE_CODE (arg0) == INTEGER_CST)
1814 return constant_boolean_node (integer_zerop (arg0), type);
1815 break;
1816
1817 case REALPART_EXPR:
1818 if (TREE_CODE (arg0) == COMPLEX_CST)
1819 return fold_convert (type, TREE_REALPART (arg0));
1820 break;
1821
1822 case IMAGPART_EXPR:
1823 if (TREE_CODE (arg0) == COMPLEX_CST)
1824 return fold_convert (type, TREE_IMAGPART (arg0));
1825 break;
1826
1827 case VEC_UNPACK_LO_EXPR:
1828 case VEC_UNPACK_HI_EXPR:
1829 case VEC_UNPACK_FLOAT_LO_EXPR:
1830 case VEC_UNPACK_FLOAT_HI_EXPR:
1831 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
1832 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
1833 {
1834 unsigned HOST_WIDE_INT out_nelts, in_nelts, i;
1835 enum tree_code subcode;
1836
1837 if (TREE_CODE (arg0) != VECTOR_CST)
1838 return NULL_TREE;
1839
1840 if (!VECTOR_CST_NELTS (arg0).is_constant (&in_nelts))
1841 return NULL_TREE;
1842 out_nelts = in_nelts / 2;
1843 gcc_assert (known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1844
1845 unsigned int offset = 0;
1846 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1847 || code == VEC_UNPACK_FLOAT_LO_EXPR
1848 || code == VEC_UNPACK_FIX_TRUNC_LO_EXPR))
1849 offset = out_nelts;
1850
1851 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1852 subcode = NOP_EXPR;
1853 else if (code == VEC_UNPACK_FLOAT_LO_EXPR
1854 || code == VEC_UNPACK_FLOAT_HI_EXPR)
1855 subcode = FLOAT_EXPR;
1856 else
1857 subcode = FIX_TRUNC_EXPR;
1858
1859 tree_vector_builder elts (type, out_nelts, 1);
1860 for (i = 0; i < out_nelts; i++)
1861 {
1862 tree elt = fold_convert_const (subcode, TREE_TYPE (type),
1863 VECTOR_CST_ELT (arg0, i + offset));
1864 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1865 return NULL_TREE;
1866 elts.quick_push (elt);
1867 }
1868
1869 return elts.build ();
1870 }
1871
1872 case VEC_DUPLICATE_EXPR:
1873 if (CONSTANT_CLASS_P (arg0))
1874 return build_vector_from_val (type, arg0);
1875 return NULL_TREE;
1876
1877 default:
1878 break;
1879 }
1880
1881 return NULL_TREE;
1882 }
1883
1884 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1885 indicates which particular sizetype to create. */
1886
1887 tree
1888 size_int_kind (poly_int64 number, enum size_type_kind kind)
1889 {
1890 return build_int_cst (sizetype_tab[(int) kind], number);
1891 }
1892 \f
1893 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1894 is a tree code. The type of the result is taken from the operands.
1895 Both must be equivalent integer types, ala int_binop_types_match_p.
1896 If the operands are constant, so is the result. */
1897
1898 tree
1899 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1900 {
1901 tree type = TREE_TYPE (arg0);
1902
1903 if (arg0 == error_mark_node || arg1 == error_mark_node)
1904 return error_mark_node;
1905
1906 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1907 TREE_TYPE (arg1)));
1908
1909 /* Handle the special case of two poly_int constants faster. */
1910 if (poly_int_tree_p (arg0) && poly_int_tree_p (arg1))
1911 {
1912 /* And some specific cases even faster than that. */
1913 if (code == PLUS_EXPR)
1914 {
1915 if (integer_zerop (arg0)
1916 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0)))
1917 return arg1;
1918 if (integer_zerop (arg1)
1919 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1)))
1920 return arg0;
1921 }
1922 else if (code == MINUS_EXPR)
1923 {
1924 if (integer_zerop (arg1)
1925 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1)))
1926 return arg0;
1927 }
1928 else if (code == MULT_EXPR)
1929 {
1930 if (integer_onep (arg0)
1931 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0)))
1932 return arg1;
1933 }
1934
1935 /* Handle general case of two integer constants. For sizetype
1936 constant calculations we always want to know about overflow,
1937 even in the unsigned case. */
1938 tree res = int_const_binop (code, arg0, arg1, -1);
1939 if (res != NULL_TREE)
1940 return res;
1941 }
1942
1943 return fold_build2_loc (loc, code, type, arg0, arg1);
1944 }
1945
1946 /* Given two values, either both of sizetype or both of bitsizetype,
1947 compute the difference between the two values. Return the value
1948 in signed type corresponding to the type of the operands. */
1949
1950 tree
1951 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1952 {
1953 tree type = TREE_TYPE (arg0);
1954 tree ctype;
1955
1956 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1957 TREE_TYPE (arg1)));
1958
1959 /* If the type is already signed, just do the simple thing. */
1960 if (!TYPE_UNSIGNED (type))
1961 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1962
1963 if (type == sizetype)
1964 ctype = ssizetype;
1965 else if (type == bitsizetype)
1966 ctype = sbitsizetype;
1967 else
1968 ctype = signed_type_for (type);
1969
1970 /* If either operand is not a constant, do the conversions to the signed
1971 type and subtract. The hardware will do the right thing with any
1972 overflow in the subtraction. */
1973 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1974 return size_binop_loc (loc, MINUS_EXPR,
1975 fold_convert_loc (loc, ctype, arg0),
1976 fold_convert_loc (loc, ctype, arg1));
1977
1978 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1979 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1980 overflow) and negate (which can't either). Special-case a result
1981 of zero while we're here. */
1982 if (tree_int_cst_equal (arg0, arg1))
1983 return build_int_cst (ctype, 0);
1984 else if (tree_int_cst_lt (arg1, arg0))
1985 return fold_convert_loc (loc, ctype,
1986 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1987 else
1988 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1989 fold_convert_loc (loc, ctype,
1990 size_binop_loc (loc,
1991 MINUS_EXPR,
1992 arg1, arg0)));
1993 }
1994 \f
1995 /* A subroutine of fold_convert_const handling conversions of an
1996 INTEGER_CST to another integer type. */
1997
1998 static tree
1999 fold_convert_const_int_from_int (tree type, const_tree arg1)
2000 {
2001 /* Given an integer constant, make new constant with new type,
2002 appropriately sign-extended or truncated. Use widest_int
2003 so that any extension is done according ARG1's type. */
2004 return force_fit_type (type, wi::to_widest (arg1),
2005 !POINTER_TYPE_P (TREE_TYPE (arg1)),
2006 TREE_OVERFLOW (arg1));
2007 }
2008
2009 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2010 to an integer type. */
2011
2012 static tree
2013 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
2014 {
2015 bool overflow = false;
2016 tree t;
2017
2018 /* The following code implements the floating point to integer
2019 conversion rules required by the Java Language Specification,
2020 that IEEE NaNs are mapped to zero and values that overflow
2021 the target precision saturate, i.e. values greater than
2022 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2023 are mapped to INT_MIN. These semantics are allowed by the
2024 C and C++ standards that simply state that the behavior of
2025 FP-to-integer conversion is unspecified upon overflow. */
2026
2027 wide_int val;
2028 REAL_VALUE_TYPE r;
2029 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2030
2031 switch (code)
2032 {
2033 case FIX_TRUNC_EXPR:
2034 real_trunc (&r, VOIDmode, &x);
2035 break;
2036
2037 default:
2038 gcc_unreachable ();
2039 }
2040
2041 /* If R is NaN, return zero and show we have an overflow. */
2042 if (REAL_VALUE_ISNAN (r))
2043 {
2044 overflow = true;
2045 val = wi::zero (TYPE_PRECISION (type));
2046 }
2047
2048 /* See if R is less than the lower bound or greater than the
2049 upper bound. */
2050
2051 if (! overflow)
2052 {
2053 tree lt = TYPE_MIN_VALUE (type);
2054 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2055 if (real_less (&r, &l))
2056 {
2057 overflow = true;
2058 val = wi::to_wide (lt);
2059 }
2060 }
2061
2062 if (! overflow)
2063 {
2064 tree ut = TYPE_MAX_VALUE (type);
2065 if (ut)
2066 {
2067 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2068 if (real_less (&u, &r))
2069 {
2070 overflow = true;
2071 val = wi::to_wide (ut);
2072 }
2073 }
2074 }
2075
2076 if (! overflow)
2077 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
2078
2079 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
2080 return t;
2081 }
2082
2083 /* A subroutine of fold_convert_const handling conversions of a
2084 FIXED_CST to an integer type. */
2085
2086 static tree
2087 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2088 {
2089 tree t;
2090 double_int temp, temp_trunc;
2091 scalar_mode mode;
2092
2093 /* Right shift FIXED_CST to temp by fbit. */
2094 temp = TREE_FIXED_CST (arg1).data;
2095 mode = TREE_FIXED_CST (arg1).mode;
2096 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
2097 {
2098 temp = temp.rshift (GET_MODE_FBIT (mode),
2099 HOST_BITS_PER_DOUBLE_INT,
2100 SIGNED_FIXED_POINT_MODE_P (mode));
2101
2102 /* Left shift temp to temp_trunc by fbit. */
2103 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
2104 HOST_BITS_PER_DOUBLE_INT,
2105 SIGNED_FIXED_POINT_MODE_P (mode));
2106 }
2107 else
2108 {
2109 temp = double_int_zero;
2110 temp_trunc = double_int_zero;
2111 }
2112
2113 /* If FIXED_CST is negative, we need to round the value toward 0.
2114 By checking if the fractional bits are not zero to add 1 to temp. */
2115 if (SIGNED_FIXED_POINT_MODE_P (mode)
2116 && temp_trunc.is_negative ()
2117 && TREE_FIXED_CST (arg1).data != temp_trunc)
2118 temp += double_int_one;
2119
2120 /* Given a fixed-point constant, make new constant with new type,
2121 appropriately sign-extended or truncated. */
2122 t = force_fit_type (type, temp, -1,
2123 (temp.is_negative ()
2124 && (TYPE_UNSIGNED (type)
2125 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2126 | TREE_OVERFLOW (arg1));
2127
2128 return t;
2129 }
2130
2131 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2132 to another floating point type. */
2133
2134 static tree
2135 fold_convert_const_real_from_real (tree type, const_tree arg1)
2136 {
2137 REAL_VALUE_TYPE value;
2138 tree t;
2139
2140 /* Don't perform the operation if flag_signaling_nans is on
2141 and the operand is a signaling NaN. */
2142 if (HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
2143 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1)))
2144 return NULL_TREE;
2145
2146 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2147 t = build_real (type, value);
2148
2149 /* If converting an infinity or NAN to a representation that doesn't
2150 have one, set the overflow bit so that we can produce some kind of
2151 error message at the appropriate point if necessary. It's not the
2152 most user-friendly message, but it's better than nothing. */
2153 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2154 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2155 TREE_OVERFLOW (t) = 1;
2156 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2157 && !MODE_HAS_NANS (TYPE_MODE (type)))
2158 TREE_OVERFLOW (t) = 1;
2159 /* Regular overflow, conversion produced an infinity in a mode that
2160 can't represent them. */
2161 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2162 && REAL_VALUE_ISINF (value)
2163 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2164 TREE_OVERFLOW (t) = 1;
2165 else
2166 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2167 return t;
2168 }
2169
2170 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2171 to a floating point type. */
2172
2173 static tree
2174 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2175 {
2176 REAL_VALUE_TYPE value;
2177 tree t;
2178
2179 real_convert_from_fixed (&value, SCALAR_FLOAT_TYPE_MODE (type),
2180 &TREE_FIXED_CST (arg1));
2181 t = build_real (type, value);
2182
2183 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2184 return t;
2185 }
2186
2187 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2188 to another fixed-point type. */
2189
2190 static tree
2191 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2192 {
2193 FIXED_VALUE_TYPE value;
2194 tree t;
2195 bool overflow_p;
2196
2197 overflow_p = fixed_convert (&value, SCALAR_TYPE_MODE (type),
2198 &TREE_FIXED_CST (arg1), TYPE_SATURATING (type));
2199 t = build_fixed (type, value);
2200
2201 /* Propagate overflow flags. */
2202 if (overflow_p | TREE_OVERFLOW (arg1))
2203 TREE_OVERFLOW (t) = 1;
2204 return t;
2205 }
2206
2207 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2208 to a fixed-point type. */
2209
2210 static tree
2211 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2212 {
2213 FIXED_VALUE_TYPE value;
2214 tree t;
2215 bool overflow_p;
2216 double_int di;
2217
2218 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2219
2220 di.low = TREE_INT_CST_ELT (arg1, 0);
2221 if (TREE_INT_CST_NUNITS (arg1) == 1)
2222 di.high = (HOST_WIDE_INT) di.low < 0 ? HOST_WIDE_INT_M1 : 0;
2223 else
2224 di.high = TREE_INT_CST_ELT (arg1, 1);
2225
2226 overflow_p = fixed_convert_from_int (&value, SCALAR_TYPE_MODE (type), di,
2227 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2228 TYPE_SATURATING (type));
2229 t = build_fixed (type, value);
2230
2231 /* Propagate overflow flags. */
2232 if (overflow_p | TREE_OVERFLOW (arg1))
2233 TREE_OVERFLOW (t) = 1;
2234 return t;
2235 }
2236
2237 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2238 to a fixed-point type. */
2239
2240 static tree
2241 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2242 {
2243 FIXED_VALUE_TYPE value;
2244 tree t;
2245 bool overflow_p;
2246
2247 overflow_p = fixed_convert_from_real (&value, SCALAR_TYPE_MODE (type),
2248 &TREE_REAL_CST (arg1),
2249 TYPE_SATURATING (type));
2250 t = build_fixed (type, value);
2251
2252 /* Propagate overflow flags. */
2253 if (overflow_p | TREE_OVERFLOW (arg1))
2254 TREE_OVERFLOW (t) = 1;
2255 return t;
2256 }
2257
2258 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2259 type TYPE. If no simplification can be done return NULL_TREE. */
2260
2261 static tree
2262 fold_convert_const (enum tree_code code, tree type, tree arg1)
2263 {
2264 tree arg_type = TREE_TYPE (arg1);
2265 if (arg_type == type)
2266 return arg1;
2267
2268 /* We can't widen types, since the runtime value could overflow the
2269 original type before being extended to the new type. */
2270 if (POLY_INT_CST_P (arg1)
2271 && (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2272 && TYPE_PRECISION (type) <= TYPE_PRECISION (arg_type))
2273 return build_poly_int_cst (type,
2274 poly_wide_int::from (poly_int_cst_value (arg1),
2275 TYPE_PRECISION (type),
2276 TYPE_SIGN (arg_type)));
2277
2278 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2279 || TREE_CODE (type) == OFFSET_TYPE)
2280 {
2281 if (TREE_CODE (arg1) == INTEGER_CST)
2282 return fold_convert_const_int_from_int (type, arg1);
2283 else if (TREE_CODE (arg1) == REAL_CST)
2284 return fold_convert_const_int_from_real (code, type, arg1);
2285 else if (TREE_CODE (arg1) == FIXED_CST)
2286 return fold_convert_const_int_from_fixed (type, arg1);
2287 }
2288 else if (TREE_CODE (type) == REAL_TYPE)
2289 {
2290 if (TREE_CODE (arg1) == INTEGER_CST)
2291 return build_real_from_int_cst (type, arg1);
2292 else if (TREE_CODE (arg1) == REAL_CST)
2293 return fold_convert_const_real_from_real (type, arg1);
2294 else if (TREE_CODE (arg1) == FIXED_CST)
2295 return fold_convert_const_real_from_fixed (type, arg1);
2296 }
2297 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2298 {
2299 if (TREE_CODE (arg1) == FIXED_CST)
2300 return fold_convert_const_fixed_from_fixed (type, arg1);
2301 else if (TREE_CODE (arg1) == INTEGER_CST)
2302 return fold_convert_const_fixed_from_int (type, arg1);
2303 else if (TREE_CODE (arg1) == REAL_CST)
2304 return fold_convert_const_fixed_from_real (type, arg1);
2305 }
2306 else if (TREE_CODE (type) == VECTOR_TYPE)
2307 {
2308 if (TREE_CODE (arg1) == VECTOR_CST
2309 && known_eq (TYPE_VECTOR_SUBPARTS (type), VECTOR_CST_NELTS (arg1)))
2310 {
2311 tree elttype = TREE_TYPE (type);
2312 tree arg1_elttype = TREE_TYPE (TREE_TYPE (arg1));
2313 /* We can't handle steps directly when extending, since the
2314 values need to wrap at the original precision first. */
2315 bool step_ok_p
2316 = (INTEGRAL_TYPE_P (elttype)
2317 && INTEGRAL_TYPE_P (arg1_elttype)
2318 && TYPE_PRECISION (elttype) <= TYPE_PRECISION (arg1_elttype));
2319 tree_vector_builder v;
2320 if (!v.new_unary_operation (type, arg1, step_ok_p))
2321 return NULL_TREE;
2322 unsigned int len = v.encoded_nelts ();
2323 for (unsigned int i = 0; i < len; ++i)
2324 {
2325 tree elt = VECTOR_CST_ELT (arg1, i);
2326 tree cvt = fold_convert_const (code, elttype, elt);
2327 if (cvt == NULL_TREE)
2328 return NULL_TREE;
2329 v.quick_push (cvt);
2330 }
2331 return v.build ();
2332 }
2333 }
2334 return NULL_TREE;
2335 }
2336
2337 /* Construct a vector of zero elements of vector type TYPE. */
2338
2339 static tree
2340 build_zero_vector (tree type)
2341 {
2342 tree t;
2343
2344 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2345 return build_vector_from_val (type, t);
2346 }
2347
2348 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2349
2350 bool
2351 fold_convertible_p (const_tree type, const_tree arg)
2352 {
2353 tree orig = TREE_TYPE (arg);
2354
2355 if (type == orig)
2356 return true;
2357
2358 if (TREE_CODE (arg) == ERROR_MARK
2359 || TREE_CODE (type) == ERROR_MARK
2360 || TREE_CODE (orig) == ERROR_MARK)
2361 return false;
2362
2363 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2364 return true;
2365
2366 switch (TREE_CODE (type))
2367 {
2368 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2369 case POINTER_TYPE: case REFERENCE_TYPE:
2370 case OFFSET_TYPE:
2371 return (INTEGRAL_TYPE_P (orig)
2372 || (POINTER_TYPE_P (orig)
2373 && TYPE_PRECISION (type) <= TYPE_PRECISION (orig))
2374 || TREE_CODE (orig) == OFFSET_TYPE);
2375
2376 case REAL_TYPE:
2377 case FIXED_POINT_TYPE:
2378 case VECTOR_TYPE:
2379 case VOID_TYPE:
2380 return TREE_CODE (type) == TREE_CODE (orig);
2381
2382 default:
2383 return false;
2384 }
2385 }
2386
2387 /* Convert expression ARG to type TYPE. Used by the middle-end for
2388 simple conversions in preference to calling the front-end's convert. */
2389
2390 tree
2391 fold_convert_loc (location_t loc, tree type, tree arg)
2392 {
2393 tree orig = TREE_TYPE (arg);
2394 tree tem;
2395
2396 if (type == orig)
2397 return arg;
2398
2399 if (TREE_CODE (arg) == ERROR_MARK
2400 || TREE_CODE (type) == ERROR_MARK
2401 || TREE_CODE (orig) == ERROR_MARK)
2402 return error_mark_node;
2403
2404 switch (TREE_CODE (type))
2405 {
2406 case POINTER_TYPE:
2407 case REFERENCE_TYPE:
2408 /* Handle conversions between pointers to different address spaces. */
2409 if (POINTER_TYPE_P (orig)
2410 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2411 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2412 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2413 /* fall through */
2414
2415 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2416 case OFFSET_TYPE:
2417 if (TREE_CODE (arg) == INTEGER_CST)
2418 {
2419 tem = fold_convert_const (NOP_EXPR, type, arg);
2420 if (tem != NULL_TREE)
2421 return tem;
2422 }
2423 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2424 || TREE_CODE (orig) == OFFSET_TYPE)
2425 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2426 if (TREE_CODE (orig) == COMPLEX_TYPE)
2427 return fold_convert_loc (loc, type,
2428 fold_build1_loc (loc, REALPART_EXPR,
2429 TREE_TYPE (orig), arg));
2430 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2431 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2432 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2433
2434 case REAL_TYPE:
2435 if (TREE_CODE (arg) == INTEGER_CST)
2436 {
2437 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2438 if (tem != NULL_TREE)
2439 return tem;
2440 }
2441 else if (TREE_CODE (arg) == REAL_CST)
2442 {
2443 tem = fold_convert_const (NOP_EXPR, type, arg);
2444 if (tem != NULL_TREE)
2445 return tem;
2446 }
2447 else if (TREE_CODE (arg) == FIXED_CST)
2448 {
2449 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2450 if (tem != NULL_TREE)
2451 return tem;
2452 }
2453
2454 switch (TREE_CODE (orig))
2455 {
2456 case INTEGER_TYPE:
2457 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2458 case POINTER_TYPE: case REFERENCE_TYPE:
2459 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2460
2461 case REAL_TYPE:
2462 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2463
2464 case FIXED_POINT_TYPE:
2465 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2466
2467 case COMPLEX_TYPE:
2468 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2469 return fold_convert_loc (loc, type, tem);
2470
2471 default:
2472 gcc_unreachable ();
2473 }
2474
2475 case FIXED_POINT_TYPE:
2476 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2477 || TREE_CODE (arg) == REAL_CST)
2478 {
2479 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2480 if (tem != NULL_TREE)
2481 goto fold_convert_exit;
2482 }
2483
2484 switch (TREE_CODE (orig))
2485 {
2486 case FIXED_POINT_TYPE:
2487 case INTEGER_TYPE:
2488 case ENUMERAL_TYPE:
2489 case BOOLEAN_TYPE:
2490 case REAL_TYPE:
2491 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2492
2493 case COMPLEX_TYPE:
2494 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2495 return fold_convert_loc (loc, type, tem);
2496
2497 default:
2498 gcc_unreachable ();
2499 }
2500
2501 case COMPLEX_TYPE:
2502 switch (TREE_CODE (orig))
2503 {
2504 case INTEGER_TYPE:
2505 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2506 case POINTER_TYPE: case REFERENCE_TYPE:
2507 case REAL_TYPE:
2508 case FIXED_POINT_TYPE:
2509 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2510 fold_convert_loc (loc, TREE_TYPE (type), arg),
2511 fold_convert_loc (loc, TREE_TYPE (type),
2512 integer_zero_node));
2513 case COMPLEX_TYPE:
2514 {
2515 tree rpart, ipart;
2516
2517 if (TREE_CODE (arg) == COMPLEX_EXPR)
2518 {
2519 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2520 TREE_OPERAND (arg, 0));
2521 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2522 TREE_OPERAND (arg, 1));
2523 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2524 }
2525
2526 arg = save_expr (arg);
2527 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2528 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2529 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2530 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2531 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2532 }
2533
2534 default:
2535 gcc_unreachable ();
2536 }
2537
2538 case VECTOR_TYPE:
2539 if (integer_zerop (arg))
2540 return build_zero_vector (type);
2541 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2542 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2543 || TREE_CODE (orig) == VECTOR_TYPE);
2544 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2545
2546 case VOID_TYPE:
2547 tem = fold_ignored_result (arg);
2548 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2549
2550 default:
2551 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2552 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2553 gcc_unreachable ();
2554 }
2555 fold_convert_exit:
2556 protected_set_expr_location_unshare (tem, loc);
2557 return tem;
2558 }
2559 \f
2560 /* Return false if expr can be assumed not to be an lvalue, true
2561 otherwise. */
2562
2563 static bool
2564 maybe_lvalue_p (const_tree x)
2565 {
2566 /* We only need to wrap lvalue tree codes. */
2567 switch (TREE_CODE (x))
2568 {
2569 case VAR_DECL:
2570 case PARM_DECL:
2571 case RESULT_DECL:
2572 case LABEL_DECL:
2573 case FUNCTION_DECL:
2574 case SSA_NAME:
2575
2576 case COMPONENT_REF:
2577 case MEM_REF:
2578 case INDIRECT_REF:
2579 case ARRAY_REF:
2580 case ARRAY_RANGE_REF:
2581 case BIT_FIELD_REF:
2582 case OBJ_TYPE_REF:
2583
2584 case REALPART_EXPR:
2585 case IMAGPART_EXPR:
2586 case PREINCREMENT_EXPR:
2587 case PREDECREMENT_EXPR:
2588 case SAVE_EXPR:
2589 case TRY_CATCH_EXPR:
2590 case WITH_CLEANUP_EXPR:
2591 case COMPOUND_EXPR:
2592 case MODIFY_EXPR:
2593 case TARGET_EXPR:
2594 case COND_EXPR:
2595 case BIND_EXPR:
2596 break;
2597
2598 default:
2599 /* Assume the worst for front-end tree codes. */
2600 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2601 break;
2602 return false;
2603 }
2604
2605 return true;
2606 }
2607
2608 /* Return an expr equal to X but certainly not valid as an lvalue. */
2609
2610 tree
2611 non_lvalue_loc (location_t loc, tree x)
2612 {
2613 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2614 us. */
2615 if (in_gimple_form)
2616 return x;
2617
2618 if (! maybe_lvalue_p (x))
2619 return x;
2620 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2621 }
2622
2623 /* When pedantic, return an expr equal to X but certainly not valid as a
2624 pedantic lvalue. Otherwise, return X. */
2625
2626 static tree
2627 pedantic_non_lvalue_loc (location_t loc, tree x)
2628 {
2629 return protected_set_expr_location_unshare (x, loc);
2630 }
2631 \f
2632 /* Given a tree comparison code, return the code that is the logical inverse.
2633 It is generally not safe to do this for floating-point comparisons, except
2634 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2635 ERROR_MARK in this case. */
2636
2637 enum tree_code
2638 invert_tree_comparison (enum tree_code code, bool honor_nans)
2639 {
2640 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2641 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2642 return ERROR_MARK;
2643
2644 switch (code)
2645 {
2646 case EQ_EXPR:
2647 return NE_EXPR;
2648 case NE_EXPR:
2649 return EQ_EXPR;
2650 case GT_EXPR:
2651 return honor_nans ? UNLE_EXPR : LE_EXPR;
2652 case GE_EXPR:
2653 return honor_nans ? UNLT_EXPR : LT_EXPR;
2654 case LT_EXPR:
2655 return honor_nans ? UNGE_EXPR : GE_EXPR;
2656 case LE_EXPR:
2657 return honor_nans ? UNGT_EXPR : GT_EXPR;
2658 case LTGT_EXPR:
2659 return UNEQ_EXPR;
2660 case UNEQ_EXPR:
2661 return LTGT_EXPR;
2662 case UNGT_EXPR:
2663 return LE_EXPR;
2664 case UNGE_EXPR:
2665 return LT_EXPR;
2666 case UNLT_EXPR:
2667 return GE_EXPR;
2668 case UNLE_EXPR:
2669 return GT_EXPR;
2670 case ORDERED_EXPR:
2671 return UNORDERED_EXPR;
2672 case UNORDERED_EXPR:
2673 return ORDERED_EXPR;
2674 default:
2675 gcc_unreachable ();
2676 }
2677 }
2678
2679 /* Similar, but return the comparison that results if the operands are
2680 swapped. This is safe for floating-point. */
2681
2682 enum tree_code
2683 swap_tree_comparison (enum tree_code code)
2684 {
2685 switch (code)
2686 {
2687 case EQ_EXPR:
2688 case NE_EXPR:
2689 case ORDERED_EXPR:
2690 case UNORDERED_EXPR:
2691 case LTGT_EXPR:
2692 case UNEQ_EXPR:
2693 return code;
2694 case GT_EXPR:
2695 return LT_EXPR;
2696 case GE_EXPR:
2697 return LE_EXPR;
2698 case LT_EXPR:
2699 return GT_EXPR;
2700 case LE_EXPR:
2701 return GE_EXPR;
2702 case UNGT_EXPR:
2703 return UNLT_EXPR;
2704 case UNGE_EXPR:
2705 return UNLE_EXPR;
2706 case UNLT_EXPR:
2707 return UNGT_EXPR;
2708 case UNLE_EXPR:
2709 return UNGE_EXPR;
2710 default:
2711 gcc_unreachable ();
2712 }
2713 }
2714
2715
2716 /* Convert a comparison tree code from an enum tree_code representation
2717 into a compcode bit-based encoding. This function is the inverse of
2718 compcode_to_comparison. */
2719
2720 static enum comparison_code
2721 comparison_to_compcode (enum tree_code code)
2722 {
2723 switch (code)
2724 {
2725 case LT_EXPR:
2726 return COMPCODE_LT;
2727 case EQ_EXPR:
2728 return COMPCODE_EQ;
2729 case LE_EXPR:
2730 return COMPCODE_LE;
2731 case GT_EXPR:
2732 return COMPCODE_GT;
2733 case NE_EXPR:
2734 return COMPCODE_NE;
2735 case GE_EXPR:
2736 return COMPCODE_GE;
2737 case ORDERED_EXPR:
2738 return COMPCODE_ORD;
2739 case UNORDERED_EXPR:
2740 return COMPCODE_UNORD;
2741 case UNLT_EXPR:
2742 return COMPCODE_UNLT;
2743 case UNEQ_EXPR:
2744 return COMPCODE_UNEQ;
2745 case UNLE_EXPR:
2746 return COMPCODE_UNLE;
2747 case UNGT_EXPR:
2748 return COMPCODE_UNGT;
2749 case LTGT_EXPR:
2750 return COMPCODE_LTGT;
2751 case UNGE_EXPR:
2752 return COMPCODE_UNGE;
2753 default:
2754 gcc_unreachable ();
2755 }
2756 }
2757
2758 /* Convert a compcode bit-based encoding of a comparison operator back
2759 to GCC's enum tree_code representation. This function is the
2760 inverse of comparison_to_compcode. */
2761
2762 static enum tree_code
2763 compcode_to_comparison (enum comparison_code code)
2764 {
2765 switch (code)
2766 {
2767 case COMPCODE_LT:
2768 return LT_EXPR;
2769 case COMPCODE_EQ:
2770 return EQ_EXPR;
2771 case COMPCODE_LE:
2772 return LE_EXPR;
2773 case COMPCODE_GT:
2774 return GT_EXPR;
2775 case COMPCODE_NE:
2776 return NE_EXPR;
2777 case COMPCODE_GE:
2778 return GE_EXPR;
2779 case COMPCODE_ORD:
2780 return ORDERED_EXPR;
2781 case COMPCODE_UNORD:
2782 return UNORDERED_EXPR;
2783 case COMPCODE_UNLT:
2784 return UNLT_EXPR;
2785 case COMPCODE_UNEQ:
2786 return UNEQ_EXPR;
2787 case COMPCODE_UNLE:
2788 return UNLE_EXPR;
2789 case COMPCODE_UNGT:
2790 return UNGT_EXPR;
2791 case COMPCODE_LTGT:
2792 return LTGT_EXPR;
2793 case COMPCODE_UNGE:
2794 return UNGE_EXPR;
2795 default:
2796 gcc_unreachable ();
2797 }
2798 }
2799
2800 /* Return true if COND1 tests the opposite condition of COND2. */
2801
2802 bool
2803 inverse_conditions_p (const_tree cond1, const_tree cond2)
2804 {
2805 return (COMPARISON_CLASS_P (cond1)
2806 && COMPARISON_CLASS_P (cond2)
2807 && (invert_tree_comparison
2808 (TREE_CODE (cond1),
2809 HONOR_NANS (TREE_OPERAND (cond1, 0))) == TREE_CODE (cond2))
2810 && operand_equal_p (TREE_OPERAND (cond1, 0),
2811 TREE_OPERAND (cond2, 0), 0)
2812 && operand_equal_p (TREE_OPERAND (cond1, 1),
2813 TREE_OPERAND (cond2, 1), 0));
2814 }
2815
2816 /* Return a tree for the comparison which is the combination of
2817 doing the AND or OR (depending on CODE) of the two operations LCODE
2818 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2819 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2820 if this makes the transformation invalid. */
2821
2822 tree
2823 combine_comparisons (location_t loc,
2824 enum tree_code code, enum tree_code lcode,
2825 enum tree_code rcode, tree truth_type,
2826 tree ll_arg, tree lr_arg)
2827 {
2828 bool honor_nans = HONOR_NANS (ll_arg);
2829 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2830 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2831 int compcode;
2832
2833 switch (code)
2834 {
2835 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2836 compcode = lcompcode & rcompcode;
2837 break;
2838
2839 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2840 compcode = lcompcode | rcompcode;
2841 break;
2842
2843 default:
2844 return NULL_TREE;
2845 }
2846
2847 if (!honor_nans)
2848 {
2849 /* Eliminate unordered comparisons, as well as LTGT and ORD
2850 which are not used unless the mode has NaNs. */
2851 compcode &= ~COMPCODE_UNORD;
2852 if (compcode == COMPCODE_LTGT)
2853 compcode = COMPCODE_NE;
2854 else if (compcode == COMPCODE_ORD)
2855 compcode = COMPCODE_TRUE;
2856 }
2857 else if (flag_trapping_math)
2858 {
2859 /* Check that the original operation and the optimized ones will trap
2860 under the same condition. */
2861 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2862 && (lcompcode != COMPCODE_EQ)
2863 && (lcompcode != COMPCODE_ORD);
2864 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2865 && (rcompcode != COMPCODE_EQ)
2866 && (rcompcode != COMPCODE_ORD);
2867 bool trap = (compcode & COMPCODE_UNORD) == 0
2868 && (compcode != COMPCODE_EQ)
2869 && (compcode != COMPCODE_ORD);
2870
2871 /* In a short-circuited boolean expression the LHS might be
2872 such that the RHS, if evaluated, will never trap. For
2873 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2874 if neither x nor y is NaN. (This is a mixed blessing: for
2875 example, the expression above will never trap, hence
2876 optimizing it to x < y would be invalid). */
2877 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2878 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2879 rtrap = false;
2880
2881 /* If the comparison was short-circuited, and only the RHS
2882 trapped, we may now generate a spurious trap. */
2883 if (rtrap && !ltrap
2884 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2885 return NULL_TREE;
2886
2887 /* If we changed the conditions that cause a trap, we lose. */
2888 if ((ltrap || rtrap) != trap)
2889 return NULL_TREE;
2890 }
2891
2892 if (compcode == COMPCODE_TRUE)
2893 return constant_boolean_node (true, truth_type);
2894 else if (compcode == COMPCODE_FALSE)
2895 return constant_boolean_node (false, truth_type);
2896 else
2897 {
2898 enum tree_code tcode;
2899
2900 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2901 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2902 }
2903 }
2904 \f
2905 /* Return nonzero if two operands (typically of the same tree node)
2906 are necessarily equal. FLAGS modifies behavior as follows:
2907
2908 If OEP_ONLY_CONST is set, only return nonzero for constants.
2909 This function tests whether the operands are indistinguishable;
2910 it does not test whether they are equal using C's == operation.
2911 The distinction is important for IEEE floating point, because
2912 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2913 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2914
2915 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2916 even though it may hold multiple values during a function.
2917 This is because a GCC tree node guarantees that nothing else is
2918 executed between the evaluation of its "operands" (which may often
2919 be evaluated in arbitrary order). Hence if the operands themselves
2920 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2921 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2922 unset means assuming isochronic (or instantaneous) tree equivalence.
2923 Unless comparing arbitrary expression trees, such as from different
2924 statements, this flag can usually be left unset.
2925
2926 If OEP_PURE_SAME is set, then pure functions with identical arguments
2927 are considered the same. It is used when the caller has other ways
2928 to ensure that global memory is unchanged in between.
2929
2930 If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
2931 not values of expressions.
2932
2933 If OEP_LEXICOGRAPHIC is set, then also handle expressions with side-effects
2934 such as MODIFY_EXPR, RETURN_EXPR, as well as STATEMENT_LISTs.
2935
2936 Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
2937 any operand with side effect. This is unnecesarily conservative in the
2938 case we know that arg0 and arg1 are in disjoint code paths (such as in
2939 ?: operator). In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
2940 addresses with TREE_CONSTANT flag set so we know that &var == &var
2941 even if var is volatile. */
2942
2943 bool
2944 operand_compare::operand_equal_p (const_tree arg0, const_tree arg1,
2945 unsigned int flags)
2946 {
2947 bool r;
2948 if (verify_hash_value (arg0, arg1, flags, &r))
2949 return r;
2950
2951 STRIP_ANY_LOCATION_WRAPPER (arg0);
2952 STRIP_ANY_LOCATION_WRAPPER (arg1);
2953
2954 /* If either is ERROR_MARK, they aren't equal. */
2955 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2956 || TREE_TYPE (arg0) == error_mark_node
2957 || TREE_TYPE (arg1) == error_mark_node)
2958 return false;
2959
2960 /* Similar, if either does not have a type (like a template id),
2961 they aren't equal. */
2962 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2963 return false;
2964
2965 /* We cannot consider pointers to different address space equal. */
2966 if (POINTER_TYPE_P (TREE_TYPE (arg0))
2967 && POINTER_TYPE_P (TREE_TYPE (arg1))
2968 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2969 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2970 return false;
2971
2972 /* Check equality of integer constants before bailing out due to
2973 precision differences. */
2974 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2975 {
2976 /* Address of INTEGER_CST is not defined; check that we did not forget
2977 to drop the OEP_ADDRESS_OF flags. */
2978 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2979 return tree_int_cst_equal (arg0, arg1);
2980 }
2981
2982 if (!(flags & OEP_ADDRESS_OF))
2983 {
2984 /* If both types don't have the same signedness, then we can't consider
2985 them equal. We must check this before the STRIP_NOPS calls
2986 because they may change the signedness of the arguments. As pointers
2987 strictly don't have a signedness, require either two pointers or
2988 two non-pointers as well. */
2989 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2990 || POINTER_TYPE_P (TREE_TYPE (arg0))
2991 != POINTER_TYPE_P (TREE_TYPE (arg1)))
2992 return false;
2993
2994 /* If both types don't have the same precision, then it is not safe
2995 to strip NOPs. */
2996 if (element_precision (TREE_TYPE (arg0))
2997 != element_precision (TREE_TYPE (arg1)))
2998 return false;
2999
3000 STRIP_NOPS (arg0);
3001 STRIP_NOPS (arg1);
3002 }
3003 #if 0
3004 /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
3005 sanity check once the issue is solved. */
3006 else
3007 /* Addresses of conversions and SSA_NAMEs (and many other things)
3008 are not defined. Check that we did not forget to drop the
3009 OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
3010 gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
3011 && TREE_CODE (arg0) != SSA_NAME);
3012 #endif
3013
3014 /* In case both args are comparisons but with different comparison
3015 code, try to swap the comparison operands of one arg to produce
3016 a match and compare that variant. */
3017 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3018 && COMPARISON_CLASS_P (arg0)
3019 && COMPARISON_CLASS_P (arg1))
3020 {
3021 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3022
3023 if (TREE_CODE (arg0) == swap_code)
3024 return operand_equal_p (TREE_OPERAND (arg0, 0),
3025 TREE_OPERAND (arg1, 1), flags)
3026 && operand_equal_p (TREE_OPERAND (arg0, 1),
3027 TREE_OPERAND (arg1, 0), flags);
3028 }
3029
3030 if (TREE_CODE (arg0) != TREE_CODE (arg1))
3031 {
3032 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
3033 if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
3034 ;
3035 else if (flags & OEP_ADDRESS_OF)
3036 {
3037 /* If we are interested in comparing addresses ignore
3038 MEM_REF wrappings of the base that can appear just for
3039 TBAA reasons. */
3040 if (TREE_CODE (arg0) == MEM_REF
3041 && DECL_P (arg1)
3042 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
3043 && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
3044 && integer_zerop (TREE_OPERAND (arg0, 1)))
3045 return true;
3046 else if (TREE_CODE (arg1) == MEM_REF
3047 && DECL_P (arg0)
3048 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
3049 && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
3050 && integer_zerop (TREE_OPERAND (arg1, 1)))
3051 return true;
3052 return false;
3053 }
3054 else
3055 return false;
3056 }
3057
3058 /* When not checking adddresses, this is needed for conversions and for
3059 COMPONENT_REF. Might as well play it safe and always test this. */
3060 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3061 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3062 || (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))
3063 && !(flags & OEP_ADDRESS_OF)))
3064 return false;
3065
3066 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3067 We don't care about side effects in that case because the SAVE_EXPR
3068 takes care of that for us. In all other cases, two expressions are
3069 equal if they have no side effects. If we have two identical
3070 expressions with side effects that should be treated the same due
3071 to the only side effects being identical SAVE_EXPR's, that will
3072 be detected in the recursive calls below.
3073 If we are taking an invariant address of two identical objects
3074 they are necessarily equal as well. */
3075 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3076 && (TREE_CODE (arg0) == SAVE_EXPR
3077 || (flags & OEP_MATCH_SIDE_EFFECTS)
3078 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3079 return true;
3080
3081 /* Next handle constant cases, those for which we can return 1 even
3082 if ONLY_CONST is set. */
3083 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3084 switch (TREE_CODE (arg0))
3085 {
3086 case INTEGER_CST:
3087 return tree_int_cst_equal (arg0, arg1);
3088
3089 case FIXED_CST:
3090 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3091 TREE_FIXED_CST (arg1));
3092
3093 case REAL_CST:
3094 if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
3095 return true;
3096
3097
3098 if (!HONOR_SIGNED_ZEROS (arg0))
3099 {
3100 /* If we do not distinguish between signed and unsigned zero,
3101 consider them equal. */
3102 if (real_zerop (arg0) && real_zerop (arg1))
3103 return true;
3104 }
3105 return false;
3106
3107 case VECTOR_CST:
3108 {
3109 if (VECTOR_CST_LOG2_NPATTERNS (arg0)
3110 != VECTOR_CST_LOG2_NPATTERNS (arg1))
3111 return false;
3112
3113 if (VECTOR_CST_NELTS_PER_PATTERN (arg0)
3114 != VECTOR_CST_NELTS_PER_PATTERN (arg1))
3115 return false;
3116
3117 unsigned int count = vector_cst_encoded_nelts (arg0);
3118 for (unsigned int i = 0; i < count; ++i)
3119 if (!operand_equal_p (VECTOR_CST_ENCODED_ELT (arg0, i),
3120 VECTOR_CST_ENCODED_ELT (arg1, i), flags))
3121 return false;
3122 return true;
3123 }
3124
3125 case COMPLEX_CST:
3126 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3127 flags)
3128 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3129 flags));
3130
3131 case STRING_CST:
3132 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3133 && ! memcmp (TREE_STRING_POINTER (arg0),
3134 TREE_STRING_POINTER (arg1),
3135 TREE_STRING_LENGTH (arg0)));
3136
3137 case ADDR_EXPR:
3138 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3139 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3140 flags | OEP_ADDRESS_OF
3141 | OEP_MATCH_SIDE_EFFECTS);
3142 case CONSTRUCTOR:
3143 /* In GIMPLE empty constructors are allowed in initializers of
3144 aggregates. */
3145 return !CONSTRUCTOR_NELTS (arg0) && !CONSTRUCTOR_NELTS (arg1);
3146 default:
3147 break;
3148 }
3149
3150 if (flags & OEP_ONLY_CONST)
3151 return false;
3152
3153 /* Define macros to test an operand from arg0 and arg1 for equality and a
3154 variant that allows null and views null as being different from any
3155 non-null value. In the latter case, if either is null, the both
3156 must be; otherwise, do the normal comparison. */
3157 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3158 TREE_OPERAND (arg1, N), flags)
3159
3160 #define OP_SAME_WITH_NULL(N) \
3161 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3162 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3163
3164 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3165 {
3166 case tcc_unary:
3167 /* Two conversions are equal only if signedness and modes match. */
3168 switch (TREE_CODE (arg0))
3169 {
3170 CASE_CONVERT:
3171 case FIX_TRUNC_EXPR:
3172 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3173 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3174 return false;
3175 break;
3176 default:
3177 break;
3178 }
3179
3180 return OP_SAME (0);
3181
3182
3183 case tcc_comparison:
3184 case tcc_binary:
3185 if (OP_SAME (0) && OP_SAME (1))
3186 return true;
3187
3188 /* For commutative ops, allow the other order. */
3189 return (commutative_tree_code (TREE_CODE (arg0))
3190 && operand_equal_p (TREE_OPERAND (arg0, 0),
3191 TREE_OPERAND (arg1, 1), flags)
3192 && operand_equal_p (TREE_OPERAND (arg0, 1),
3193 TREE_OPERAND (arg1, 0), flags));
3194
3195 case tcc_reference:
3196 /* If either of the pointer (or reference) expressions we are
3197 dereferencing contain a side effect, these cannot be equal,
3198 but their addresses can be. */
3199 if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0
3200 && (TREE_SIDE_EFFECTS (arg0)
3201 || TREE_SIDE_EFFECTS (arg1)))
3202 return false;
3203
3204 switch (TREE_CODE (arg0))
3205 {
3206 case INDIRECT_REF:
3207 if (!(flags & OEP_ADDRESS_OF))
3208 {
3209 if (TYPE_ALIGN (TREE_TYPE (arg0))
3210 != TYPE_ALIGN (TREE_TYPE (arg1)))
3211 return false;
3212 /* Verify that the access types are compatible. */
3213 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg0))
3214 != TYPE_MAIN_VARIANT (TREE_TYPE (arg1)))
3215 return false;
3216 }
3217 flags &= ~OEP_ADDRESS_OF;
3218 return OP_SAME (0);
3219
3220 case IMAGPART_EXPR:
3221 /* Require the same offset. */
3222 if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3223 TYPE_SIZE (TREE_TYPE (arg1)),
3224 flags & ~OEP_ADDRESS_OF))
3225 return false;
3226
3227 /* Fallthru. */
3228 case REALPART_EXPR:
3229 case VIEW_CONVERT_EXPR:
3230 return OP_SAME (0);
3231
3232 case TARGET_MEM_REF:
3233 case MEM_REF:
3234 if (!(flags & OEP_ADDRESS_OF))
3235 {
3236 /* Require equal access sizes */
3237 if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1))
3238 && (!TYPE_SIZE (TREE_TYPE (arg0))
3239 || !TYPE_SIZE (TREE_TYPE (arg1))
3240 || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3241 TYPE_SIZE (TREE_TYPE (arg1)),
3242 flags)))
3243 return false;
3244 /* Verify that access happens in similar types. */
3245 if (!types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3246 return false;
3247 /* Verify that accesses are TBAA compatible. */
3248 if (!alias_ptr_types_compatible_p
3249 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
3250 TREE_TYPE (TREE_OPERAND (arg1, 1)))
3251 || (MR_DEPENDENCE_CLIQUE (arg0)
3252 != MR_DEPENDENCE_CLIQUE (arg1))
3253 || (MR_DEPENDENCE_BASE (arg0)
3254 != MR_DEPENDENCE_BASE (arg1)))
3255 return false;
3256 /* Verify that alignment is compatible. */
3257 if (TYPE_ALIGN (TREE_TYPE (arg0))
3258 != TYPE_ALIGN (TREE_TYPE (arg1)))
3259 return false;
3260 }
3261 flags &= ~OEP_ADDRESS_OF;
3262 return (OP_SAME (0) && OP_SAME (1)
3263 /* TARGET_MEM_REF require equal extra operands. */
3264 && (TREE_CODE (arg0) != TARGET_MEM_REF
3265 || (OP_SAME_WITH_NULL (2)
3266 && OP_SAME_WITH_NULL (3)
3267 && OP_SAME_WITH_NULL (4))));
3268
3269 case ARRAY_REF:
3270 case ARRAY_RANGE_REF:
3271 if (!OP_SAME (0))
3272 return false;
3273 flags &= ~OEP_ADDRESS_OF;
3274 /* Compare the array index by value if it is constant first as we
3275 may have different types but same value here. */
3276 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3277 TREE_OPERAND (arg1, 1))
3278 || OP_SAME (1))
3279 && OP_SAME_WITH_NULL (2)
3280 && OP_SAME_WITH_NULL (3)
3281 /* Compare low bound and element size as with OEP_ADDRESS_OF
3282 we have to account for the offset of the ref. */
3283 && (TREE_TYPE (TREE_OPERAND (arg0, 0))
3284 == TREE_TYPE (TREE_OPERAND (arg1, 0))
3285 || (operand_equal_p (array_ref_low_bound
3286 (CONST_CAST_TREE (arg0)),
3287 array_ref_low_bound
3288 (CONST_CAST_TREE (arg1)), flags)
3289 && operand_equal_p (array_ref_element_size
3290 (CONST_CAST_TREE (arg0)),
3291 array_ref_element_size
3292 (CONST_CAST_TREE (arg1)),
3293 flags))));
3294
3295 case COMPONENT_REF:
3296 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3297 may be NULL when we're called to compare MEM_EXPRs. */
3298 if (!OP_SAME_WITH_NULL (0)
3299 || !OP_SAME (1))
3300 return false;
3301 flags &= ~OEP_ADDRESS_OF;
3302 return OP_SAME_WITH_NULL (2);
3303
3304 case BIT_FIELD_REF:
3305 if (!OP_SAME (0))
3306 return false;
3307 flags &= ~OEP_ADDRESS_OF;
3308 return OP_SAME (1) && OP_SAME (2);
3309
3310 /* Virtual table call. */
3311 case OBJ_TYPE_REF:
3312 {
3313 if (!operand_equal_p (OBJ_TYPE_REF_EXPR (arg0),
3314 OBJ_TYPE_REF_EXPR (arg1), flags))
3315 return false;
3316 if (tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg0))
3317 != tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg1)))
3318 return false;
3319 if (!operand_equal_p (OBJ_TYPE_REF_OBJECT (arg0),
3320 OBJ_TYPE_REF_OBJECT (arg1), flags))
3321 return false;
3322 if (!types_same_for_odr (obj_type_ref_class (arg0),
3323 obj_type_ref_class (arg1)))
3324 return false;
3325 return true;
3326 }
3327
3328 default:
3329 return false;
3330 }
3331
3332 case tcc_expression:
3333 switch (TREE_CODE (arg0))
3334 {
3335 case ADDR_EXPR:
3336 /* Be sure we pass right ADDRESS_OF flag. */
3337 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3338 return operand_equal_p (TREE_OPERAND (arg0, 0),
3339 TREE_OPERAND (arg1, 0),
3340 flags | OEP_ADDRESS_OF);
3341
3342 case TRUTH_NOT_EXPR:
3343 return OP_SAME (0);
3344
3345 case TRUTH_ANDIF_EXPR:
3346 case TRUTH_ORIF_EXPR:
3347 return OP_SAME (0) && OP_SAME (1);
3348
3349 case WIDEN_MULT_PLUS_EXPR:
3350 case WIDEN_MULT_MINUS_EXPR:
3351 if (!OP_SAME (2))
3352 return false;
3353 /* The multiplcation operands are commutative. */
3354 /* FALLTHRU */
3355
3356 case TRUTH_AND_EXPR:
3357 case TRUTH_OR_EXPR:
3358 case TRUTH_XOR_EXPR:
3359 if (OP_SAME (0) && OP_SAME (1))
3360 return true;
3361
3362 /* Otherwise take into account this is a commutative operation. */
3363 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3364 TREE_OPERAND (arg1, 1), flags)
3365 && operand_equal_p (TREE_OPERAND (arg0, 1),
3366 TREE_OPERAND (arg1, 0), flags));
3367
3368 case COND_EXPR:
3369 if (! OP_SAME (1) || ! OP_SAME_WITH_NULL (2))
3370 return false;
3371 flags &= ~OEP_ADDRESS_OF;
3372 return OP_SAME (0);
3373
3374 case BIT_INSERT_EXPR:
3375 /* BIT_INSERT_EXPR has an implict operand as the type precision
3376 of op1. Need to check to make sure they are the same. */
3377 if (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
3378 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
3379 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 1)))
3380 != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 1))))
3381 return false;
3382 /* FALLTHRU */
3383
3384 case VEC_COND_EXPR:
3385 case DOT_PROD_EXPR:
3386 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3387
3388 case MODIFY_EXPR:
3389 case INIT_EXPR:
3390 case COMPOUND_EXPR:
3391 case PREDECREMENT_EXPR:
3392 case PREINCREMENT_EXPR:
3393 case POSTDECREMENT_EXPR:
3394 case POSTINCREMENT_EXPR:
3395 if (flags & OEP_LEXICOGRAPHIC)
3396 return OP_SAME (0) && OP_SAME (1);
3397 return false;
3398
3399 case CLEANUP_POINT_EXPR:
3400 case EXPR_STMT:
3401 case SAVE_EXPR:
3402 if (flags & OEP_LEXICOGRAPHIC)
3403 return OP_SAME (0);
3404 return false;
3405
3406 default:
3407 return false;
3408 }
3409
3410 case tcc_vl_exp:
3411 switch (TREE_CODE (arg0))
3412 {
3413 case CALL_EXPR:
3414 if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3415 != (CALL_EXPR_FN (arg1) == NULL_TREE))
3416 /* If not both CALL_EXPRs are either internal or normal function
3417 functions, then they are not equal. */
3418 return false;
3419 else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3420 {
3421 /* If the CALL_EXPRs call different internal functions, then they
3422 are not equal. */
3423 if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3424 return false;
3425 }
3426 else
3427 {
3428 /* If the CALL_EXPRs call different functions, then they are not
3429 equal. */
3430 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3431 flags))
3432 return false;
3433 }
3434
3435 /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */
3436 {
3437 unsigned int cef = call_expr_flags (arg0);
3438 if (flags & OEP_PURE_SAME)
3439 cef &= ECF_CONST | ECF_PURE;
3440 else
3441 cef &= ECF_CONST;
3442 if (!cef && !(flags & OEP_LEXICOGRAPHIC))
3443 return false;
3444 }
3445
3446 /* Now see if all the arguments are the same. */
3447 {
3448 const_call_expr_arg_iterator iter0, iter1;
3449 const_tree a0, a1;
3450 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3451 a1 = first_const_call_expr_arg (arg1, &iter1);
3452 a0 && a1;
3453 a0 = next_const_call_expr_arg (&iter0),
3454 a1 = next_const_call_expr_arg (&iter1))
3455 if (! operand_equal_p (a0, a1, flags))
3456 return false;
3457
3458 /* If we get here and both argument lists are exhausted
3459 then the CALL_EXPRs are equal. */
3460 return ! (a0 || a1);
3461 }
3462 default:
3463 return false;
3464 }
3465
3466 case tcc_declaration:
3467 /* Consider __builtin_sqrt equal to sqrt. */
3468 return (TREE_CODE (arg0) == FUNCTION_DECL
3469 && fndecl_built_in_p (arg0) && fndecl_built_in_p (arg1)
3470 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3471 && (DECL_UNCHECKED_FUNCTION_CODE (arg0)
3472 == DECL_UNCHECKED_FUNCTION_CODE (arg1)));
3473
3474 case tcc_exceptional:
3475 if (TREE_CODE (arg0) == CONSTRUCTOR)
3476 {
3477 if (CONSTRUCTOR_NO_CLEARING (arg0) != CONSTRUCTOR_NO_CLEARING (arg1))
3478 return false;
3479
3480 /* In GIMPLE constructors are used only to build vectors from
3481 elements. Individual elements in the constructor must be
3482 indexed in increasing order and form an initial sequence.
3483
3484 We make no effort to compare constructors in generic.
3485 (see sem_variable::equals in ipa-icf which can do so for
3486 constants). */
3487 if (!VECTOR_TYPE_P (TREE_TYPE (arg0))
3488 || !VECTOR_TYPE_P (TREE_TYPE (arg1)))
3489 return false;
3490
3491 /* Be sure that vectors constructed have the same representation.
3492 We only tested element precision and modes to match.
3493 Vectors may be BLKmode and thus also check that the number of
3494 parts match. */
3495 if (maybe_ne (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)),
3496 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1))))
3497 return false;
3498
3499 vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3500 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3501 unsigned int len = vec_safe_length (v0);
3502
3503 if (len != vec_safe_length (v1))
3504 return false;
3505
3506 for (unsigned int i = 0; i < len; i++)
3507 {
3508 constructor_elt *c0 = &(*v0)[i];
3509 constructor_elt *c1 = &(*v1)[i];
3510
3511 if (!operand_equal_p (c0->value, c1->value, flags)
3512 /* In GIMPLE the indexes can be either NULL or matching i.
3513 Double check this so we won't get false
3514 positives for GENERIC. */
3515 || (c0->index
3516 && (TREE_CODE (c0->index) != INTEGER_CST
3517 || compare_tree_int (c0->index, i)))
3518 || (c1->index
3519 && (TREE_CODE (c1->index) != INTEGER_CST
3520 || compare_tree_int (c1->index, i))))
3521 return false;
3522 }
3523 return true;
3524 }
3525 else if (TREE_CODE (arg0) == STATEMENT_LIST
3526 && (flags & OEP_LEXICOGRAPHIC))
3527 {
3528 /* Compare the STATEMENT_LISTs. */
3529 tree_stmt_iterator tsi1, tsi2;
3530 tree body1 = CONST_CAST_TREE (arg0);
3531 tree body2 = CONST_CAST_TREE (arg1);
3532 for (tsi1 = tsi_start (body1), tsi2 = tsi_start (body2); ;
3533 tsi_next (&tsi1), tsi_next (&tsi2))
3534 {
3535 /* The lists don't have the same number of statements. */
3536 if (tsi_end_p (tsi1) ^ tsi_end_p (tsi2))
3537 return false;
3538 if (tsi_end_p (tsi1) && tsi_end_p (tsi2))
3539 return true;
3540 if (!operand_equal_p (tsi_stmt (tsi1), tsi_stmt (tsi2),
3541 flags & (OEP_LEXICOGRAPHIC
3542 | OEP_NO_HASH_CHECK)))
3543 return false;
3544 }
3545 }
3546 return false;
3547
3548 case tcc_statement:
3549 switch (TREE_CODE (arg0))
3550 {
3551 case RETURN_EXPR:
3552 if (flags & OEP_LEXICOGRAPHIC)
3553 return OP_SAME_WITH_NULL (0);
3554 return false;
3555 case DEBUG_BEGIN_STMT:
3556 if (flags & OEP_LEXICOGRAPHIC)
3557 return true;
3558 return false;
3559 default:
3560 return false;
3561 }
3562
3563 default:
3564 return false;
3565 }
3566
3567 #undef OP_SAME
3568 #undef OP_SAME_WITH_NULL
3569 }
3570
3571 /* Generate a hash value for an expression. This can be used iteratively
3572 by passing a previous result as the HSTATE argument. */
3573
3574 void
3575 operand_compare::hash_operand (const_tree t, inchash::hash &hstate,
3576 unsigned int flags)
3577 {
3578 int i;
3579 enum tree_code code;
3580 enum tree_code_class tclass;
3581
3582 if (t == NULL_TREE || t == error_mark_node)
3583 {
3584 hstate.merge_hash (0);
3585 return;
3586 }
3587
3588 STRIP_ANY_LOCATION_WRAPPER (t);
3589
3590 if (!(flags & OEP_ADDRESS_OF))
3591 STRIP_NOPS (t);
3592
3593 code = TREE_CODE (t);
3594
3595 switch (code)
3596 {
3597 /* Alas, constants aren't shared, so we can't rely on pointer
3598 identity. */
3599 case VOID_CST:
3600 hstate.merge_hash (0);
3601 return;
3602 case INTEGER_CST:
3603 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3604 for (i = 0; i < TREE_INT_CST_EXT_NUNITS (t); i++)
3605 hstate.add_hwi (TREE_INT_CST_ELT (t, i));
3606 return;
3607 case REAL_CST:
3608 {
3609 unsigned int val2;
3610 if (!HONOR_SIGNED_ZEROS (t) && real_zerop (t))
3611 val2 = rvc_zero;
3612 else
3613 val2 = real_hash (TREE_REAL_CST_PTR (t));
3614 hstate.merge_hash (val2);
3615 return;
3616 }
3617 case FIXED_CST:
3618 {
3619 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
3620 hstate.merge_hash (val2);
3621 return;
3622 }
3623 case STRING_CST:
3624 hstate.add ((const void *) TREE_STRING_POINTER (t),
3625 TREE_STRING_LENGTH (t));
3626 return;
3627 case COMPLEX_CST:
3628 hash_operand (TREE_REALPART (t), hstate, flags);
3629 hash_operand (TREE_IMAGPART (t), hstate, flags);
3630 return;
3631 case VECTOR_CST:
3632 {
3633 hstate.add_int (VECTOR_CST_NPATTERNS (t));
3634 hstate.add_int (VECTOR_CST_NELTS_PER_PATTERN (t));
3635 unsigned int count = vector_cst_encoded_nelts (t);
3636 for (unsigned int i = 0; i < count; ++i)
3637 hash_operand (VECTOR_CST_ENCODED_ELT (t, i), hstate, flags);
3638 return;
3639 }
3640 case SSA_NAME:
3641 /* We can just compare by pointer. */
3642 hstate.add_hwi (SSA_NAME_VERSION (t));
3643 return;
3644 case PLACEHOLDER_EXPR:
3645 /* The node itself doesn't matter. */
3646 return;
3647 case BLOCK:
3648 case OMP_CLAUSE:
3649 /* Ignore. */
3650 return;
3651 case TREE_LIST:
3652 /* A list of expressions, for a CALL_EXPR or as the elements of a
3653 VECTOR_CST. */
3654 for (; t; t = TREE_CHAIN (t))
3655 hash_operand (TREE_VALUE (t), hstate, flags);
3656 return;
3657 case CONSTRUCTOR:
3658 {
3659 unsigned HOST_WIDE_INT idx;
3660 tree field, value;
3661 flags &= ~OEP_ADDRESS_OF;
3662 hstate.add_int (CONSTRUCTOR_NO_CLEARING (t));
3663 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
3664 {
3665 /* In GIMPLE the indexes can be either NULL or matching i. */
3666 if (field == NULL_TREE)
3667 field = bitsize_int (idx);
3668 hash_operand (field, hstate, flags);
3669 hash_operand (value, hstate, flags);
3670 }
3671 return;
3672 }
3673 case STATEMENT_LIST:
3674 {
3675 tree_stmt_iterator i;
3676 for (i = tsi_start (CONST_CAST_TREE (t));
3677 !tsi_end_p (i); tsi_next (&i))
3678 hash_operand (tsi_stmt (i), hstate, flags);
3679 return;
3680 }
3681 case TREE_VEC:
3682 for (i = 0; i < TREE_VEC_LENGTH (t); ++i)
3683 hash_operand (TREE_VEC_ELT (t, i), hstate, flags);
3684 return;
3685 case IDENTIFIER_NODE:
3686 hstate.add_object (IDENTIFIER_HASH_VALUE (t));
3687 return;
3688 case FUNCTION_DECL:
3689 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
3690 Otherwise nodes that compare equal according to operand_equal_p might
3691 get different hash codes. However, don't do this for machine specific
3692 or front end builtins, since the function code is overloaded in those
3693 cases. */
3694 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
3695 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
3696 {
3697 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
3698 code = TREE_CODE (t);
3699 }
3700 /* FALL THROUGH */
3701 default:
3702 if (POLY_INT_CST_P (t))
3703 {
3704 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
3705 hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
3706 return;
3707 }
3708 tclass = TREE_CODE_CLASS (code);
3709
3710 if (tclass == tcc_declaration)
3711 {
3712 /* DECL's have a unique ID */
3713 hstate.add_hwi (DECL_UID (t));
3714 }
3715 else if (tclass == tcc_comparison && !commutative_tree_code (code))
3716 {
3717 /* For comparisons that can be swapped, use the lower
3718 tree code. */
3719 enum tree_code ccode = swap_tree_comparison (code);
3720 if (code < ccode)
3721 ccode = code;
3722 hstate.add_object (ccode);
3723 hash_operand (TREE_OPERAND (t, ccode != code), hstate, flags);
3724 hash_operand (TREE_OPERAND (t, ccode == code), hstate, flags);
3725 }
3726 else if (CONVERT_EXPR_CODE_P (code))
3727 {
3728 /* NOP_EXPR and CONVERT_EXPR are considered equal by
3729 operand_equal_p. */
3730 enum tree_code ccode = NOP_EXPR;
3731 hstate.add_object (ccode);
3732
3733 /* Don't hash the type, that can lead to having nodes which
3734 compare equal according to operand_equal_p, but which
3735 have different hash codes. Make sure to include signedness
3736 in the hash computation. */
3737 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
3738 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
3739 }
3740 /* For OEP_ADDRESS_OF, hash MEM_EXPR[&decl, 0] the same as decl. */
3741 else if (code == MEM_REF
3742 && (flags & OEP_ADDRESS_OF) != 0
3743 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
3744 && DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
3745 && integer_zerop (TREE_OPERAND (t, 1)))
3746 hash_operand (TREE_OPERAND (TREE_OPERAND (t, 0), 0),
3747 hstate, flags);
3748 /* Don't ICE on FE specific trees, or their arguments etc.
3749 during operand_equal_p hash verification. */
3750 else if (!IS_EXPR_CODE_CLASS (tclass))
3751 gcc_assert (flags & OEP_HASH_CHECK);
3752 else
3753 {
3754 unsigned int sflags = flags;
3755
3756 hstate.add_object (code);
3757
3758 switch (code)
3759 {
3760 case ADDR_EXPR:
3761 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3762 flags |= OEP_ADDRESS_OF;
3763 sflags = flags;
3764 break;
3765
3766 case INDIRECT_REF:
3767 case MEM_REF:
3768 case TARGET_MEM_REF:
3769 flags &= ~OEP_ADDRESS_OF;
3770 sflags = flags;
3771 break;
3772
3773 case ARRAY_REF:
3774 case ARRAY_RANGE_REF:
3775 case COMPONENT_REF:
3776 case BIT_FIELD_REF:
3777 sflags &= ~OEP_ADDRESS_OF;
3778 break;
3779
3780 case COND_EXPR:
3781 flags &= ~OEP_ADDRESS_OF;
3782 break;
3783
3784 case WIDEN_MULT_PLUS_EXPR:
3785 case WIDEN_MULT_MINUS_EXPR:
3786 {
3787 /* The multiplication operands are commutative. */
3788 inchash::hash one, two;
3789 hash_operand (TREE_OPERAND (t, 0), one, flags);
3790 hash_operand (TREE_OPERAND (t, 1), two, flags);
3791 hstate.add_commutative (one, two);
3792 hash_operand (TREE_OPERAND (t, 2), two, flags);
3793 return;
3794 }
3795
3796 case CALL_EXPR:
3797 if (CALL_EXPR_FN (t) == NULL_TREE)
3798 hstate.add_int (CALL_EXPR_IFN (t));
3799 break;
3800
3801 case TARGET_EXPR:
3802 /* For TARGET_EXPR, just hash on the TARGET_EXPR_SLOT.
3803 Usually different TARGET_EXPRs just should use
3804 different temporaries in their slots. */
3805 hash_operand (TARGET_EXPR_SLOT (t), hstate, flags);
3806 return;
3807
3808 /* Virtual table call. */
3809 case OBJ_TYPE_REF:
3810 inchash::add_expr (OBJ_TYPE_REF_EXPR (t), hstate, flags);
3811 inchash::add_expr (OBJ_TYPE_REF_TOKEN (t), hstate, flags);
3812 inchash::add_expr (OBJ_TYPE_REF_OBJECT (t), hstate, flags);
3813 return;
3814 default:
3815 break;
3816 }
3817
3818 /* Don't hash the type, that can lead to having nodes which
3819 compare equal according to operand_equal_p, but which
3820 have different hash codes. */
3821 if (code == NON_LVALUE_EXPR)
3822 {
3823 /* Make sure to include signness in the hash computation. */
3824 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
3825 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
3826 }
3827
3828 else if (commutative_tree_code (code))
3829 {
3830 /* It's a commutative expression. We want to hash it the same
3831 however it appears. We do this by first hashing both operands
3832 and then rehashing based on the order of their independent
3833 hashes. */
3834 inchash::hash one, two;
3835 hash_operand (TREE_OPERAND (t, 0), one, flags);
3836 hash_operand (TREE_OPERAND (t, 1), two, flags);
3837 hstate.add_commutative (one, two);
3838 }
3839 else
3840 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
3841 hash_operand (TREE_OPERAND (t, i), hstate,
3842 i == 0 ? flags : sflags);
3843 }
3844 return;
3845 }
3846 }
3847
3848 bool
3849 operand_compare::verify_hash_value (const_tree arg0, const_tree arg1,
3850 unsigned int flags, bool *ret)
3851 {
3852 /* When checking, verify at the outermost operand_equal_p call that
3853 if operand_equal_p returns non-zero then ARG0 and ARG1 has the same
3854 hash value. */
3855 if (flag_checking && !(flags & OEP_NO_HASH_CHECK))
3856 {
3857 if (operand_equal_p (arg0, arg1, flags | OEP_NO_HASH_CHECK))
3858 {
3859 if (arg0 != arg1)
3860 {
3861 inchash::hash hstate0 (0), hstate1 (0);
3862 hash_operand (arg0, hstate0, flags | OEP_HASH_CHECK);
3863 hash_operand (arg1, hstate1, flags | OEP_HASH_CHECK);
3864 hashval_t h0 = hstate0.end ();
3865 hashval_t h1 = hstate1.end ();
3866 gcc_assert (h0 == h1);
3867 }
3868 *ret = true;
3869 }
3870 else
3871 *ret = false;
3872
3873 return true;
3874 }
3875
3876 return false;
3877 }
3878
3879
3880 static operand_compare default_compare_instance;
3881
3882 /* Conveinece wrapper around operand_compare class because usually we do
3883 not need to play with the valueizer. */
3884
3885 bool
3886 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
3887 {
3888 return default_compare_instance.operand_equal_p (arg0, arg1, flags);
3889 }
3890
3891 namespace inchash
3892 {
3893
3894 /* Generate a hash value for an expression. This can be used iteratively
3895 by passing a previous result as the HSTATE argument.
3896
3897 This function is intended to produce the same hash for expressions which
3898 would compare equal using operand_equal_p. */
3899 void
3900 add_expr (const_tree t, inchash::hash &hstate, unsigned int flags)
3901 {
3902 default_compare_instance.hash_operand (t, hstate, flags);
3903 }
3904
3905 }
3906 \f
3907 /* Similar to operand_equal_p, but see if ARG0 might be a variant of ARG1
3908 with a different signedness or a narrower precision. */
3909
3910 static bool
3911 operand_equal_for_comparison_p (tree arg0, tree arg1)
3912 {
3913 if (operand_equal_p (arg0, arg1, 0))
3914 return true;
3915
3916 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3917 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3918 return false;
3919
3920 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3921 and see if the inner values are the same. This removes any
3922 signedness comparison, which doesn't matter here. */
3923 tree op0 = arg0;
3924 tree op1 = arg1;
3925 STRIP_NOPS (op0);
3926 STRIP_NOPS (op1);
3927 if (operand_equal_p (op0, op1, 0))
3928 return true;
3929
3930 /* Discard a single widening conversion from ARG1 and see if the inner
3931 value is the same as ARG0. */
3932 if (CONVERT_EXPR_P (arg1)
3933 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0)))
3934 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))
3935 < TYPE_PRECISION (TREE_TYPE (arg1))
3936 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
3937 return true;
3938
3939 return false;
3940 }
3941 \f
3942 /* See if ARG is an expression that is either a comparison or is performing
3943 arithmetic on comparisons. The comparisons must only be comparing
3944 two different values, which will be stored in *CVAL1 and *CVAL2; if
3945 they are nonzero it means that some operands have already been found.
3946 No variables may be used anywhere else in the expression except in the
3947 comparisons.
3948
3949 If this is true, return 1. Otherwise, return zero. */
3950
3951 static bool
3952 twoval_comparison_p (tree arg, tree *cval1, tree *cval2)
3953 {
3954 enum tree_code code = TREE_CODE (arg);
3955 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3956
3957 /* We can handle some of the tcc_expression cases here. */
3958 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3959 tclass = tcc_unary;
3960 else if (tclass == tcc_expression
3961 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3962 || code == COMPOUND_EXPR))
3963 tclass = tcc_binary;
3964
3965 switch (tclass)
3966 {
3967 case tcc_unary:
3968 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2);
3969
3970 case tcc_binary:
3971 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
3972 && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2));
3973
3974 case tcc_constant:
3975 return true;
3976
3977 case tcc_expression:
3978 if (code == COND_EXPR)
3979 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
3980 && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2)
3981 && twoval_comparison_p (TREE_OPERAND (arg, 2), cval1, cval2));
3982 return false;
3983
3984 case tcc_comparison:
3985 /* First see if we can handle the first operand, then the second. For
3986 the second operand, we know *CVAL1 can't be zero. It must be that
3987 one side of the comparison is each of the values; test for the
3988 case where this isn't true by failing if the two operands
3989 are the same. */
3990
3991 if (operand_equal_p (TREE_OPERAND (arg, 0),
3992 TREE_OPERAND (arg, 1), 0))
3993 return false;
3994
3995 if (*cval1 == 0)
3996 *cval1 = TREE_OPERAND (arg, 0);
3997 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3998 ;
3999 else if (*cval2 == 0)
4000 *cval2 = TREE_OPERAND (arg, 0);
4001 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
4002 ;
4003 else
4004 return false;
4005
4006 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
4007 ;
4008 else if (*cval2 == 0)
4009 *cval2 = TREE_OPERAND (arg, 1);
4010 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
4011 ;
4012 else
4013 return false;
4014
4015 return true;
4016
4017 default:
4018 return false;
4019 }
4020 }
4021 \f
4022 /* ARG is a tree that is known to contain just arithmetic operations and
4023 comparisons. Evaluate the operations in the tree substituting NEW0 for
4024 any occurrence of OLD0 as an operand of a comparison and likewise for
4025 NEW1 and OLD1. */
4026
4027 static tree
4028 eval_subst (location_t loc, tree arg, tree old0, tree new0,
4029 tree old1, tree new1)
4030 {
4031 tree type = TREE_TYPE (arg);
4032 enum tree_code code = TREE_CODE (arg);
4033 enum tree_code_class tclass = TREE_CODE_CLASS (code);
4034
4035 /* We can handle some of the tcc_expression cases here. */
4036 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
4037 tclass = tcc_unary;
4038 else if (tclass == tcc_expression
4039 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
4040 tclass = tcc_binary;
4041
4042 switch (tclass)
4043 {
4044 case tcc_unary:
4045 return fold_build1_loc (loc, code, type,
4046 eval_subst (loc, TREE_OPERAND (arg, 0),
4047 old0, new0, old1, new1));
4048
4049 case tcc_binary:
4050 return fold_build2_loc (loc, code, type,
4051 eval_subst (loc, TREE_OPERAND (arg, 0),
4052 old0, new0, old1, new1),
4053 eval_subst (loc, TREE_OPERAND (arg, 1),
4054 old0, new0, old1, new1));
4055
4056 case tcc_expression:
4057 switch (code)
4058 {
4059 case SAVE_EXPR:
4060 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
4061 old1, new1);
4062
4063 case COMPOUND_EXPR:
4064 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
4065 old1, new1);
4066
4067 case COND_EXPR:
4068 return fold_build3_loc (loc, code, type,
4069 eval_subst (loc, TREE_OPERAND (arg, 0),
4070 old0, new0, old1, new1),
4071 eval_subst (loc, TREE_OPERAND (arg, 1),
4072 old0, new0, old1, new1),
4073 eval_subst (loc, TREE_OPERAND (arg, 2),
4074 old0, new0, old1, new1));
4075 default:
4076 break;
4077 }
4078 /* Fall through - ??? */
4079
4080 case tcc_comparison:
4081 {
4082 tree arg0 = TREE_OPERAND (arg, 0);
4083 tree arg1 = TREE_OPERAND (arg, 1);
4084
4085 /* We need to check both for exact equality and tree equality. The
4086 former will be true if the operand has a side-effect. In that
4087 case, we know the operand occurred exactly once. */
4088
4089 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
4090 arg0 = new0;
4091 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
4092 arg0 = new1;
4093
4094 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
4095 arg1 = new0;
4096 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
4097 arg1 = new1;
4098
4099 return fold_build2_loc (loc, code, type, arg0, arg1);
4100 }
4101
4102 default:
4103 return arg;
4104 }
4105 }
4106 \f
4107 /* Return a tree for the case when the result of an expression is RESULT
4108 converted to TYPE and OMITTED was previously an operand of the expression
4109 but is now not needed (e.g., we folded OMITTED * 0).
4110
4111 If OMITTED has side effects, we must evaluate it. Otherwise, just do
4112 the conversion of RESULT to TYPE. */
4113
4114 tree
4115 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
4116 {
4117 tree t = fold_convert_loc (loc, type, result);
4118
4119 /* If the resulting operand is an empty statement, just return the omitted
4120 statement casted to void. */
4121 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
4122 return build1_loc (loc, NOP_EXPR, void_type_node,
4123 fold_ignored_result (omitted));
4124
4125 if (TREE_SIDE_EFFECTS (omitted))
4126 return build2_loc (loc, COMPOUND_EXPR, type,
4127 fold_ignored_result (omitted), t);
4128
4129 return non_lvalue_loc (loc, t);
4130 }
4131
4132 /* Return a tree for the case when the result of an expression is RESULT
4133 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
4134 of the expression but are now not needed.
4135
4136 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
4137 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
4138 evaluated before OMITTED2. Otherwise, if neither has side effects,
4139 just do the conversion of RESULT to TYPE. */
4140
4141 tree
4142 omit_two_operands_loc (location_t loc, tree type, tree result,
4143 tree omitted1, tree omitted2)
4144 {
4145 tree t = fold_convert_loc (loc, type, result);
4146
4147 if (TREE_SIDE_EFFECTS (omitted2))
4148 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
4149 if (TREE_SIDE_EFFECTS (omitted1))
4150 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
4151
4152 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
4153 }
4154
4155 \f
4156 /* Return a simplified tree node for the truth-negation of ARG. This
4157 never alters ARG itself. We assume that ARG is an operation that
4158 returns a truth value (0 or 1).
4159
4160 FIXME: one would think we would fold the result, but it causes
4161 problems with the dominator optimizer. */
4162
4163 static tree
4164 fold_truth_not_expr (location_t loc, tree arg)
4165 {
4166 tree type = TREE_TYPE (arg);
4167 enum tree_code code = TREE_CODE (arg);
4168 location_t loc1, loc2;
4169
4170 /* If this is a comparison, we can simply invert it, except for
4171 floating-point non-equality comparisons, in which case we just
4172 enclose a TRUTH_NOT_EXPR around what we have. */
4173
4174 if (TREE_CODE_CLASS (code) == tcc_comparison)
4175 {
4176 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
4177 if (FLOAT_TYPE_P (op_type)
4178 && flag_trapping_math
4179 && code != ORDERED_EXPR && code != UNORDERED_EXPR
4180 && code != NE_EXPR && code != EQ_EXPR)
4181 return NULL_TREE;
4182
4183 code = invert_tree_comparison (code, HONOR_NANS (op_type));
4184 if (code == ERROR_MARK)
4185 return NULL_TREE;
4186
4187 tree ret = build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
4188 TREE_OPERAND (arg, 1));
4189 if (TREE_NO_WARNING (arg))
4190 TREE_NO_WARNING (ret) = 1;
4191 return ret;
4192 }
4193
4194 switch (code)
4195 {
4196 case INTEGER_CST:
4197 return constant_boolean_node (integer_zerop (arg), type);
4198
4199 case TRUTH_AND_EXPR:
4200 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4201 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4202 return build2_loc (loc, TRUTH_OR_EXPR, type,
4203 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4204 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4205
4206 case TRUTH_OR_EXPR:
4207 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4208 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4209 return build2_loc (loc, TRUTH_AND_EXPR, type,
4210 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4211 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4212
4213 case TRUTH_XOR_EXPR:
4214 /* Here we can invert either operand. We invert the first operand
4215 unless the second operand is a TRUTH_NOT_EXPR in which case our
4216 result is the XOR of the first operand with the inside of the
4217 negation of the second operand. */
4218
4219 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
4220 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
4221 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
4222 else
4223 return build2_loc (loc, TRUTH_XOR_EXPR, type,
4224 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
4225 TREE_OPERAND (arg, 1));
4226
4227 case TRUTH_ANDIF_EXPR:
4228 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4229 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4230 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
4231 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4232 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4233
4234 case TRUTH_ORIF_EXPR:
4235 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4236 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4237 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
4238 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4239 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4240
4241 case TRUTH_NOT_EXPR:
4242 return TREE_OPERAND (arg, 0);
4243
4244 case COND_EXPR:
4245 {
4246 tree arg1 = TREE_OPERAND (arg, 1);
4247 tree arg2 = TREE_OPERAND (arg, 2);
4248
4249 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4250 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
4251
4252 /* A COND_EXPR may have a throw as one operand, which
4253 then has void type. Just leave void operands
4254 as they are. */
4255 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
4256 VOID_TYPE_P (TREE_TYPE (arg1))
4257 ? arg1 : invert_truthvalue_loc (loc1, arg1),
4258 VOID_TYPE_P (TREE_TYPE (arg2))
4259 ? arg2 : invert_truthvalue_loc (loc2, arg2));
4260 }
4261
4262 case COMPOUND_EXPR:
4263 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4264 return build2_loc (loc, COMPOUND_EXPR, type,
4265 TREE_OPERAND (arg, 0),
4266 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
4267
4268 case NON_LVALUE_EXPR:
4269 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4270 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
4271
4272 CASE_CONVERT:
4273 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
4274 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
4275
4276 /* fall through */
4277
4278 case FLOAT_EXPR:
4279 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4280 return build1_loc (loc, TREE_CODE (arg), type,
4281 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
4282
4283 case BIT_AND_EXPR:
4284 if (!integer_onep (TREE_OPERAND (arg, 1)))
4285 return NULL_TREE;
4286 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
4287
4288 case SAVE_EXPR:
4289 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
4290
4291 case CLEANUP_POINT_EXPR:
4292 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4293 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
4294 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
4295
4296 default:
4297 return NULL_TREE;
4298 }
4299 }
4300
4301 /* Fold the truth-negation of ARG. This never alters ARG itself. We
4302 assume that ARG is an operation that returns a truth value (0 or 1
4303 for scalars, 0 or -1 for vectors). Return the folded expression if
4304 folding is successful. Otherwise, return NULL_TREE. */
4305
4306 static tree
4307 fold_invert_truthvalue (location_t loc, tree arg)
4308 {
4309 tree type = TREE_TYPE (arg);
4310 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
4311 ? BIT_NOT_EXPR
4312 : TRUTH_NOT_EXPR,
4313 type, arg);
4314 }
4315
4316 /* Return a simplified tree node for the truth-negation of ARG. This
4317 never alters ARG itself. We assume that ARG is an operation that
4318 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
4319
4320 tree
4321 invert_truthvalue_loc (location_t loc, tree arg)
4322 {
4323 if (TREE_CODE (arg) == ERROR_MARK)
4324 return arg;
4325
4326 tree type = TREE_TYPE (arg);
4327 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
4328 ? BIT_NOT_EXPR
4329 : TRUTH_NOT_EXPR,
4330 type, arg);
4331 }
4332 \f
4333 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
4334 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero
4335 and uses reverse storage order if REVERSEP is nonzero. ORIG_INNER
4336 is the original memory reference used to preserve the alias set of
4337 the access. */
4338
4339 static tree
4340 make_bit_field_ref (location_t loc, tree inner, tree orig_inner, tree type,
4341 HOST_WIDE_INT bitsize, poly_int64 bitpos,
4342 int unsignedp, int reversep)
4343 {
4344 tree result, bftype;
4345
4346 /* Attempt not to lose the access path if possible. */
4347 if (TREE_CODE (orig_inner) == COMPONENT_REF)
4348 {
4349 tree ninner = TREE_OPERAND (orig_inner, 0);
4350 machine_mode nmode;
4351 poly_int64 nbitsize, nbitpos;
4352 tree noffset;
4353 int nunsignedp, nreversep, nvolatilep = 0;
4354 tree base = get_inner_reference (ninner, &nbitsize, &nbitpos,
4355 &noffset, &nmode, &nunsignedp,
4356 &nreversep, &nvolatilep);
4357 if (base == inner
4358 && noffset == NULL_TREE
4359 && known_subrange_p (bitpos, bitsize, nbitpos, nbitsize)
4360 && !reversep
4361 && !nreversep
4362 && !nvolatilep)
4363 {
4364 inner = ninner;
4365 bitpos -= nbitpos;
4366 }
4367 }
4368
4369 alias_set_type iset = get_alias_set (orig_inner);
4370 if (iset == 0 && get_alias_set (inner) != iset)
4371 inner = fold_build2 (MEM_REF, TREE_TYPE (inner),
4372 build_fold_addr_expr (inner),
4373 build_int_cst (ptr_type_node, 0));
4374
4375 if (known_eq (bitpos, 0) && !reversep)
4376 {
4377 tree size = TYPE_SIZE (TREE_TYPE (inner));
4378 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
4379 || POINTER_TYPE_P (TREE_TYPE (inner)))
4380 && tree_fits_shwi_p (size)
4381 && tree_to_shwi (size) == bitsize)
4382 return fold_convert_loc (loc, type, inner);
4383 }
4384
4385 bftype = type;
4386 if (TYPE_PRECISION (bftype) != bitsize
4387 || TYPE_UNSIGNED (bftype) == !unsignedp)
4388 bftype = build_nonstandard_integer_type (bitsize, 0);
4389
4390 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
4391 bitsize_int (bitsize), bitsize_int (bitpos));
4392 REF_REVERSE_STORAGE_ORDER (result) = reversep;
4393
4394 if (bftype != type)
4395 result = fold_convert_loc (loc, type, result);
4396
4397 return result;
4398 }
4399
4400 /* Optimize a bit-field compare.
4401
4402 There are two cases: First is a compare against a constant and the
4403 second is a comparison of two items where the fields are at the same
4404 bit position relative to the start of a chunk (byte, halfword, word)
4405 large enough to contain it. In these cases we can avoid the shift
4406 implicit in bitfield extractions.
4407
4408 For constants, we emit a compare of the shifted constant with the
4409 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
4410 compared. For two fields at the same position, we do the ANDs with the
4411 similar mask and compare the result of the ANDs.
4412
4413 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
4414 COMPARE_TYPE is the type of the comparison, and LHS and RHS
4415 are the left and right operands of the comparison, respectively.
4416
4417 If the optimization described above can be done, we return the resulting
4418 tree. Otherwise we return zero. */
4419
4420 static tree
4421 optimize_bit_field_compare (location_t loc, enum tree_code code,
4422 tree compare_type, tree lhs, tree rhs)
4423 {
4424 poly_int64 plbitpos, plbitsize, rbitpos, rbitsize;
4425 HOST_WIDE_INT lbitpos, lbitsize, nbitpos, nbitsize;
4426 tree type = TREE_TYPE (lhs);
4427 tree unsigned_type;
4428 int const_p = TREE_CODE (rhs) == INTEGER_CST;
4429 machine_mode lmode, rmode;
4430 scalar_int_mode nmode;
4431 int lunsignedp, runsignedp;
4432 int lreversep, rreversep;
4433 int lvolatilep = 0, rvolatilep = 0;
4434 tree linner, rinner = NULL_TREE;
4435 tree mask;
4436 tree offset;
4437
4438 /* Get all the information about the extractions being done. If the bit size
4439 is the same as the size of the underlying object, we aren't doing an
4440 extraction at all and so can do nothing. We also don't want to
4441 do anything if the inner expression is a PLACEHOLDER_EXPR since we
4442 then will no longer be able to replace it. */
4443 linner = get_inner_reference (lhs, &plbitsize, &plbitpos, &offset, &lmode,
4444 &lunsignedp, &lreversep, &lvolatilep);
4445 if (linner == lhs
4446 || !known_size_p (plbitsize)
4447 || !plbitsize.is_constant (&lbitsize)
4448 || !plbitpos.is_constant (&lbitpos)
4449 || known_eq (lbitsize, GET_MODE_BITSIZE (lmode))
4450 || offset != 0
4451 || TREE_CODE (linner) == PLACEHOLDER_EXPR
4452 || lvolatilep)
4453 return 0;
4454
4455 if (const_p)
4456 rreversep = lreversep;
4457 else
4458 {
4459 /* If this is not a constant, we can only do something if bit positions,
4460 sizes, signedness and storage order are the same. */
4461 rinner
4462 = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
4463 &runsignedp, &rreversep, &rvolatilep);
4464
4465 if (rinner == rhs
4466 || maybe_ne (lbitpos, rbitpos)
4467 || maybe_ne (lbitsize, rbitsize)
4468 || lunsignedp != runsignedp
4469 || lreversep != rreversep
4470 || offset != 0
4471 || TREE_CODE (rinner) == PLACEHOLDER_EXPR
4472 || rvolatilep)
4473 return 0;
4474 }
4475
4476 /* Honor the C++ memory model and mimic what RTL expansion does. */
4477 poly_uint64 bitstart = 0;
4478 poly_uint64 bitend = 0;
4479 if (TREE_CODE (lhs) == COMPONENT_REF)
4480 {
4481 get_bit_range (&bitstart, &bitend, lhs, &plbitpos, &offset);
4482 if (!plbitpos.is_constant (&lbitpos) || offset != NULL_TREE)
4483 return 0;
4484 }
4485
4486 /* See if we can find a mode to refer to this field. We should be able to,
4487 but fail if we can't. */
4488 if (!get_best_mode (lbitsize, lbitpos, bitstart, bitend,
4489 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
4490 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
4491 TYPE_ALIGN (TREE_TYPE (rinner))),
4492 BITS_PER_WORD, false, &nmode))
4493 return 0;
4494
4495 /* Set signed and unsigned types of the precision of this mode for the
4496 shifts below. */
4497 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
4498
4499 /* Compute the bit position and size for the new reference and our offset
4500 within it. If the new reference is the same size as the original, we
4501 won't optimize anything, so return zero. */
4502 nbitsize = GET_MODE_BITSIZE (nmode);
4503 nbitpos = lbitpos & ~ (nbitsize - 1);
4504 lbitpos -= nbitpos;
4505 if (nbitsize == lbitsize)
4506 return 0;
4507
4508 if (lreversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
4509 lbitpos = nbitsize - lbitsize - lbitpos;
4510
4511 /* Make the mask to be used against the extracted field. */
4512 mask = build_int_cst_type (unsigned_type, -1);
4513 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
4514 mask = const_binop (RSHIFT_EXPR, mask,
4515 size_int (nbitsize - lbitsize - lbitpos));
4516
4517 if (! const_p)
4518 {
4519 if (nbitpos < 0)
4520 return 0;
4521
4522 /* If not comparing with constant, just rework the comparison
4523 and return. */
4524 tree t1 = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4525 nbitsize, nbitpos, 1, lreversep);
4526 t1 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t1, mask);
4527 tree t2 = make_bit_field_ref (loc, rinner, rhs, unsigned_type,
4528 nbitsize, nbitpos, 1, rreversep);
4529 t2 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t2, mask);
4530 return fold_build2_loc (loc, code, compare_type, t1, t2);
4531 }
4532
4533 /* Otherwise, we are handling the constant case. See if the constant is too
4534 big for the field. Warn and return a tree for 0 (false) if so. We do
4535 this not only for its own sake, but to avoid having to test for this
4536 error case below. If we didn't, we might generate wrong code.
4537
4538 For unsigned fields, the constant shifted right by the field length should
4539 be all zero. For signed fields, the high-order bits should agree with
4540 the sign bit. */
4541
4542 if (lunsignedp)
4543 {
4544 if (wi::lrshift (wi::to_wide (rhs), lbitsize) != 0)
4545 {
4546 warning (0, "comparison is always %d due to width of bit-field",
4547 code == NE_EXPR);
4548 return constant_boolean_node (code == NE_EXPR, compare_type);
4549 }
4550 }
4551 else
4552 {
4553 wide_int tem = wi::arshift (wi::to_wide (rhs), lbitsize - 1);
4554 if (tem != 0 && tem != -1)
4555 {
4556 warning (0, "comparison is always %d due to width of bit-field",
4557 code == NE_EXPR);
4558 return constant_boolean_node (code == NE_EXPR, compare_type);
4559 }
4560 }
4561
4562 if (nbitpos < 0)
4563 return 0;
4564
4565 /* Single-bit compares should always be against zero. */
4566 if (lbitsize == 1 && ! integer_zerop (rhs))
4567 {
4568 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4569 rhs = build_int_cst (type, 0);
4570 }
4571
4572 /* Make a new bitfield reference, shift the constant over the
4573 appropriate number of bits and mask it with the computed mask
4574 (in case this was a signed field). If we changed it, make a new one. */
4575 lhs = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4576 nbitsize, nbitpos, 1, lreversep);
4577
4578 rhs = const_binop (BIT_AND_EXPR,
4579 const_binop (LSHIFT_EXPR,
4580 fold_convert_loc (loc, unsigned_type, rhs),
4581 size_int (lbitpos)),
4582 mask);
4583
4584 lhs = build2_loc (loc, code, compare_type,
4585 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
4586 return lhs;
4587 }
4588 \f
4589 /* Subroutine for fold_truth_andor_1: decode a field reference.
4590
4591 If EXP is a comparison reference, we return the innermost reference.
4592
4593 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4594 set to the starting bit number.
4595
4596 If the innermost field can be completely contained in a mode-sized
4597 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4598
4599 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4600 otherwise it is not changed.
4601
4602 *PUNSIGNEDP is set to the signedness of the field.
4603
4604 *PREVERSEP is set to the storage order of the field.
4605
4606 *PMASK is set to the mask used. This is either contained in a
4607 BIT_AND_EXPR or derived from the width of the field.
4608
4609 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4610
4611 Return 0 if this is not a component reference or is one that we can't
4612 do anything with. */
4613
4614 static tree
4615 decode_field_reference (location_t loc, tree *exp_, HOST_WIDE_INT *pbitsize,
4616 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
4617 int *punsignedp, int *preversep, int *pvolatilep,
4618 tree *pmask, tree *pand_mask)
4619 {
4620 tree exp = *exp_;
4621 tree outer_type = 0;
4622 tree and_mask = 0;
4623 tree mask, inner, offset;
4624 tree unsigned_type;
4625 unsigned int precision;
4626
4627 /* All the optimizations using this function assume integer fields.
4628 There are problems with FP fields since the type_for_size call
4629 below can fail for, e.g., XFmode. */
4630 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4631 return NULL_TREE;
4632
4633 /* We are interested in the bare arrangement of bits, so strip everything
4634 that doesn't affect the machine mode. However, record the type of the
4635 outermost expression if it may matter below. */
4636 if (CONVERT_EXPR_P (exp)
4637 || TREE_CODE (exp) == NON_LVALUE_EXPR)
4638 outer_type = TREE_TYPE (exp);
4639 STRIP_NOPS (exp);
4640
4641 if (TREE_CODE (exp) == BIT_AND_EXPR)
4642 {
4643 and_mask = TREE_OPERAND (exp, 1);
4644 exp = TREE_OPERAND (exp, 0);
4645 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4646 if (TREE_CODE (and_mask) != INTEGER_CST)
4647 return NULL_TREE;
4648 }
4649
4650 poly_int64 poly_bitsize, poly_bitpos;
4651 inner = get_inner_reference (exp, &poly_bitsize, &poly_bitpos, &offset,
4652 pmode, punsignedp, preversep, pvolatilep);
4653 if ((inner == exp && and_mask == 0)
4654 || !poly_bitsize.is_constant (pbitsize)
4655 || !poly_bitpos.is_constant (pbitpos)
4656 || *pbitsize < 0
4657 || offset != 0
4658 || TREE_CODE (inner) == PLACEHOLDER_EXPR
4659 /* Reject out-of-bound accesses (PR79731). */
4660 || (! AGGREGATE_TYPE_P (TREE_TYPE (inner))
4661 && compare_tree_int (TYPE_SIZE (TREE_TYPE (inner)),
4662 *pbitpos + *pbitsize) < 0))
4663 return NULL_TREE;
4664
4665 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4666 if (unsigned_type == NULL_TREE)
4667 return NULL_TREE;
4668
4669 *exp_ = exp;
4670
4671 /* If the number of bits in the reference is the same as the bitsize of
4672 the outer type, then the outer type gives the signedness. Otherwise
4673 (in case of a small bitfield) the signedness is unchanged. */
4674 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4675 *punsignedp = TYPE_UNSIGNED (outer_type);
4676
4677 /* Compute the mask to access the bitfield. */
4678 precision = TYPE_PRECISION (unsigned_type);
4679
4680 mask = build_int_cst_type (unsigned_type, -1);
4681
4682 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4683 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4684
4685 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4686 if (and_mask != 0)
4687 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4688 fold_convert_loc (loc, unsigned_type, and_mask), mask);
4689
4690 *pmask = mask;
4691 *pand_mask = and_mask;
4692 return inner;
4693 }
4694
4695 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4696 bit positions and MASK is SIGNED. */
4697
4698 static bool
4699 all_ones_mask_p (const_tree mask, unsigned int size)
4700 {
4701 tree type = TREE_TYPE (mask);
4702 unsigned int precision = TYPE_PRECISION (type);
4703
4704 /* If this function returns true when the type of the mask is
4705 UNSIGNED, then there will be errors. In particular see
4706 gcc.c-torture/execute/990326-1.c. There does not appear to be
4707 any documentation paper trail as to why this is so. But the pre
4708 wide-int worked with that restriction and it has been preserved
4709 here. */
4710 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
4711 return false;
4712
4713 return wi::mask (size, false, precision) == wi::to_wide (mask);
4714 }
4715
4716 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4717 represents the sign bit of EXP's type. If EXP represents a sign
4718 or zero extension, also test VAL against the unextended type.
4719 The return value is the (sub)expression whose sign bit is VAL,
4720 or NULL_TREE otherwise. */
4721
4722 tree
4723 sign_bit_p (tree exp, const_tree val)
4724 {
4725 int width;
4726 tree t;
4727
4728 /* Tree EXP must have an integral type. */
4729 t = TREE_TYPE (exp);
4730 if (! INTEGRAL_TYPE_P (t))
4731 return NULL_TREE;
4732
4733 /* Tree VAL must be an integer constant. */
4734 if (TREE_CODE (val) != INTEGER_CST
4735 || TREE_OVERFLOW (val))
4736 return NULL_TREE;
4737
4738 width = TYPE_PRECISION (t);
4739 if (wi::only_sign_bit_p (wi::to_wide (val), width))
4740 return exp;
4741
4742 /* Handle extension from a narrower type. */
4743 if (TREE_CODE (exp) == NOP_EXPR
4744 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4745 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4746
4747 return NULL_TREE;
4748 }
4749
4750 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
4751 to be evaluated unconditionally. */
4752
4753 static bool
4754 simple_operand_p (const_tree exp)
4755 {
4756 /* Strip any conversions that don't change the machine mode. */
4757 STRIP_NOPS (exp);
4758
4759 return (CONSTANT_CLASS_P (exp)
4760 || TREE_CODE (exp) == SSA_NAME
4761 || (DECL_P (exp)
4762 && ! TREE_ADDRESSABLE (exp)
4763 && ! TREE_THIS_VOLATILE (exp)
4764 && ! DECL_NONLOCAL (exp)
4765 /* Don't regard global variables as simple. They may be
4766 allocated in ways unknown to the compiler (shared memory,
4767 #pragma weak, etc). */
4768 && ! TREE_PUBLIC (exp)
4769 && ! DECL_EXTERNAL (exp)
4770 /* Weakrefs are not safe to be read, since they can be NULL.
4771 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4772 have DECL_WEAK flag set. */
4773 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4774 /* Loading a static variable is unduly expensive, but global
4775 registers aren't expensive. */
4776 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4777 }
4778
4779 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4780 to be evaluated unconditionally.
4781 I addition to simple_operand_p, we assume that comparisons, conversions,
4782 and logic-not operations are simple, if their operands are simple, too. */
4783
4784 static bool
4785 simple_operand_p_2 (tree exp)
4786 {
4787 enum tree_code code;
4788
4789 if (TREE_SIDE_EFFECTS (exp) || generic_expr_could_trap_p (exp))
4790 return false;
4791
4792 while (CONVERT_EXPR_P (exp))
4793 exp = TREE_OPERAND (exp, 0);
4794
4795 code = TREE_CODE (exp);
4796
4797 if (TREE_CODE_CLASS (code) == tcc_comparison)
4798 return (simple_operand_p (TREE_OPERAND (exp, 0))
4799 && simple_operand_p (TREE_OPERAND (exp, 1)));
4800
4801 if (code == TRUTH_NOT_EXPR)
4802 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4803
4804 return simple_operand_p (exp);
4805 }
4806
4807 \f
4808 /* The following functions are subroutines to fold_range_test and allow it to
4809 try to change a logical combination of comparisons into a range test.
4810
4811 For example, both
4812 X == 2 || X == 3 || X == 4 || X == 5
4813 and
4814 X >= 2 && X <= 5
4815 are converted to
4816 (unsigned) (X - 2) <= 3
4817
4818 We describe each set of comparisons as being either inside or outside
4819 a range, using a variable named like IN_P, and then describe the
4820 range with a lower and upper bound. If one of the bounds is omitted,
4821 it represents either the highest or lowest value of the type.
4822
4823 In the comments below, we represent a range by two numbers in brackets
4824 preceded by a "+" to designate being inside that range, or a "-" to
4825 designate being outside that range, so the condition can be inverted by
4826 flipping the prefix. An omitted bound is represented by a "-". For
4827 example, "- [-, 10]" means being outside the range starting at the lowest
4828 possible value and ending at 10, in other words, being greater than 10.
4829 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4830 always false.
4831
4832 We set up things so that the missing bounds are handled in a consistent
4833 manner so neither a missing bound nor "true" and "false" need to be
4834 handled using a special case. */
4835
4836 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4837 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4838 and UPPER1_P are nonzero if the respective argument is an upper bound
4839 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4840 must be specified for a comparison. ARG1 will be converted to ARG0's
4841 type if both are specified. */
4842
4843 static tree
4844 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4845 tree arg1, int upper1_p)
4846 {
4847 tree tem;
4848 int result;
4849 int sgn0, sgn1;
4850
4851 /* If neither arg represents infinity, do the normal operation.
4852 Else, if not a comparison, return infinity. Else handle the special
4853 comparison rules. Note that most of the cases below won't occur, but
4854 are handled for consistency. */
4855
4856 if (arg0 != 0 && arg1 != 0)
4857 {
4858 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4859 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4860 STRIP_NOPS (tem);
4861 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4862 }
4863
4864 if (TREE_CODE_CLASS (code) != tcc_comparison)
4865 return 0;
4866
4867 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4868 for neither. In real maths, we cannot assume open ended ranges are
4869 the same. But, this is computer arithmetic, where numbers are finite.
4870 We can therefore make the transformation of any unbounded range with
4871 the value Z, Z being greater than any representable number. This permits
4872 us to treat unbounded ranges as equal. */
4873 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4874 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4875 switch (code)
4876 {
4877 case EQ_EXPR:
4878 result = sgn0 == sgn1;
4879 break;
4880 case NE_EXPR:
4881 result = sgn0 != sgn1;
4882 break;
4883 case LT_EXPR:
4884 result = sgn0 < sgn1;
4885 break;
4886 case LE_EXPR:
4887 result = sgn0 <= sgn1;
4888 break;
4889 case GT_EXPR:
4890 result = sgn0 > sgn1;
4891 break;
4892 case GE_EXPR:
4893 result = sgn0 >= sgn1;
4894 break;
4895 default:
4896 gcc_unreachable ();
4897 }
4898
4899 return constant_boolean_node (result, type);
4900 }
4901 \f
4902 /* Helper routine for make_range. Perform one step for it, return
4903 new expression if the loop should continue or NULL_TREE if it should
4904 stop. */
4905
4906 tree
4907 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4908 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4909 bool *strict_overflow_p)
4910 {
4911 tree arg0_type = TREE_TYPE (arg0);
4912 tree n_low, n_high, low = *p_low, high = *p_high;
4913 int in_p = *p_in_p, n_in_p;
4914
4915 switch (code)
4916 {
4917 case TRUTH_NOT_EXPR:
4918 /* We can only do something if the range is testing for zero. */
4919 if (low == NULL_TREE || high == NULL_TREE
4920 || ! integer_zerop (low) || ! integer_zerop (high))
4921 return NULL_TREE;
4922 *p_in_p = ! in_p;
4923 return arg0;
4924
4925 case EQ_EXPR: case NE_EXPR:
4926 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4927 /* We can only do something if the range is testing for zero
4928 and if the second operand is an integer constant. Note that
4929 saying something is "in" the range we make is done by
4930 complementing IN_P since it will set in the initial case of
4931 being not equal to zero; "out" is leaving it alone. */
4932 if (low == NULL_TREE || high == NULL_TREE
4933 || ! integer_zerop (low) || ! integer_zerop (high)
4934 || TREE_CODE (arg1) != INTEGER_CST)
4935 return NULL_TREE;
4936
4937 switch (code)
4938 {
4939 case NE_EXPR: /* - [c, c] */
4940 low = high = arg1;
4941 break;
4942 case EQ_EXPR: /* + [c, c] */
4943 in_p = ! in_p, low = high = arg1;
4944 break;
4945 case GT_EXPR: /* - [-, c] */
4946 low = 0, high = arg1;
4947 break;
4948 case GE_EXPR: /* + [c, -] */
4949 in_p = ! in_p, low = arg1, high = 0;
4950 break;
4951 case LT_EXPR: /* - [c, -] */
4952 low = arg1, high = 0;
4953 break;
4954 case LE_EXPR: /* + [-, c] */
4955 in_p = ! in_p, low = 0, high = arg1;
4956 break;
4957 default:
4958 gcc_unreachable ();
4959 }
4960
4961 /* If this is an unsigned comparison, we also know that EXP is
4962 greater than or equal to zero. We base the range tests we make
4963 on that fact, so we record it here so we can parse existing
4964 range tests. We test arg0_type since often the return type
4965 of, e.g. EQ_EXPR, is boolean. */
4966 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4967 {
4968 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4969 in_p, low, high, 1,
4970 build_int_cst (arg0_type, 0),
4971 NULL_TREE))
4972 return NULL_TREE;
4973
4974 in_p = n_in_p, low = n_low, high = n_high;
4975
4976 /* If the high bound is missing, but we have a nonzero low
4977 bound, reverse the range so it goes from zero to the low bound
4978 minus 1. */
4979 if (high == 0 && low && ! integer_zerop (low))
4980 {
4981 in_p = ! in_p;
4982 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4983 build_int_cst (TREE_TYPE (low), 1), 0);
4984 low = build_int_cst (arg0_type, 0);
4985 }
4986 }
4987
4988 *p_low = low;
4989 *p_high = high;
4990 *p_in_p = in_p;
4991 return arg0;
4992
4993 case NEGATE_EXPR:
4994 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4995 low and high are non-NULL, then normalize will DTRT. */
4996 if (!TYPE_UNSIGNED (arg0_type)
4997 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4998 {
4999 if (low == NULL_TREE)
5000 low = TYPE_MIN_VALUE (arg0_type);
5001 if (high == NULL_TREE)
5002 high = TYPE_MAX_VALUE (arg0_type);
5003 }
5004
5005 /* (-x) IN [a,b] -> x in [-b, -a] */
5006 n_low = range_binop (MINUS_EXPR, exp_type,
5007 build_int_cst (exp_type, 0),
5008 0, high, 1);
5009 n_high = range_binop (MINUS_EXPR, exp_type,
5010 build_int_cst (exp_type, 0),
5011 0, low, 0);
5012 if (n_high != 0 && TREE_OVERFLOW (n_high))
5013 return NULL_TREE;
5014 goto normalize;
5015
5016 case BIT_NOT_EXPR:
5017 /* ~ X -> -X - 1 */
5018 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
5019 build_int_cst (exp_type, 1));
5020
5021 case PLUS_EXPR:
5022 case MINUS_EXPR:
5023 if (TREE_CODE (arg1) != INTEGER_CST)
5024 return NULL_TREE;
5025
5026 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
5027 move a constant to the other side. */
5028 if (!TYPE_UNSIGNED (arg0_type)
5029 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
5030 return NULL_TREE;
5031
5032 /* If EXP is signed, any overflow in the computation is undefined,
5033 so we don't worry about it so long as our computations on
5034 the bounds don't overflow. For unsigned, overflow is defined
5035 and this is exactly the right thing. */
5036 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
5037 arg0_type, low, 0, arg1, 0);
5038 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
5039 arg0_type, high, 1, arg1, 0);
5040 if ((n_low != 0 && TREE_OVERFLOW (n_low))
5041 || (n_high != 0 && TREE_OVERFLOW (n_high)))
5042 return NULL_TREE;
5043
5044 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
5045 *strict_overflow_p = true;
5046
5047 normalize:
5048 /* Check for an unsigned range which has wrapped around the maximum
5049 value thus making n_high < n_low, and normalize it. */
5050 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
5051 {
5052 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
5053 build_int_cst (TREE_TYPE (n_high), 1), 0);
5054 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
5055 build_int_cst (TREE_TYPE (n_low), 1), 0);
5056
5057 /* If the range is of the form +/- [ x+1, x ], we won't
5058 be able to normalize it. But then, it represents the
5059 whole range or the empty set, so make it
5060 +/- [ -, - ]. */
5061 if (tree_int_cst_equal (n_low, low)
5062 && tree_int_cst_equal (n_high, high))
5063 low = high = 0;
5064 else
5065 in_p = ! in_p;
5066 }
5067 else
5068 low = n_low, high = n_high;
5069
5070 *p_low = low;
5071 *p_high = high;
5072 *p_in_p = in_p;
5073 return arg0;
5074
5075 CASE_CONVERT:
5076 case NON_LVALUE_EXPR:
5077 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
5078 return NULL_TREE;
5079
5080 if (! INTEGRAL_TYPE_P (arg0_type)
5081 || (low != 0 && ! int_fits_type_p (low, arg0_type))
5082 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
5083 return NULL_TREE;
5084
5085 n_low = low, n_high = high;
5086
5087 if (n_low != 0)
5088 n_low = fold_convert_loc (loc, arg0_type, n_low);
5089
5090 if (n_high != 0)
5091 n_high = fold_convert_loc (loc, arg0_type, n_high);
5092
5093 /* If we're converting arg0 from an unsigned type, to exp,
5094 a signed type, we will be doing the comparison as unsigned.
5095 The tests above have already verified that LOW and HIGH
5096 are both positive.
5097
5098 So we have to ensure that we will handle large unsigned
5099 values the same way that the current signed bounds treat
5100 negative values. */
5101
5102 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
5103 {
5104 tree high_positive;
5105 tree equiv_type;
5106 /* For fixed-point modes, we need to pass the saturating flag
5107 as the 2nd parameter. */
5108 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
5109 equiv_type
5110 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
5111 TYPE_SATURATING (arg0_type));
5112 else
5113 equiv_type
5114 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
5115
5116 /* A range without an upper bound is, naturally, unbounded.
5117 Since convert would have cropped a very large value, use
5118 the max value for the destination type. */
5119 high_positive
5120 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
5121 : TYPE_MAX_VALUE (arg0_type);
5122
5123 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
5124 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
5125 fold_convert_loc (loc, arg0_type,
5126 high_positive),
5127 build_int_cst (arg0_type, 1));
5128
5129 /* If the low bound is specified, "and" the range with the
5130 range for which the original unsigned value will be
5131 positive. */
5132 if (low != 0)
5133 {
5134 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
5135 1, fold_convert_loc (loc, arg0_type,
5136 integer_zero_node),
5137 high_positive))
5138 return NULL_TREE;
5139
5140 in_p = (n_in_p == in_p);
5141 }
5142 else
5143 {
5144 /* Otherwise, "or" the range with the range of the input
5145 that will be interpreted as negative. */
5146 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
5147 1, fold_convert_loc (loc, arg0_type,
5148 integer_zero_node),
5149 high_positive))
5150 return NULL_TREE;
5151
5152 in_p = (in_p != n_in_p);
5153 }
5154 }
5155
5156 *p_low = n_low;
5157 *p_high = n_high;
5158 *p_in_p = in_p;
5159 return arg0;
5160
5161 default:
5162 return NULL_TREE;
5163 }
5164 }
5165
5166 /* Given EXP, a logical expression, set the range it is testing into
5167 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
5168 actually being tested. *PLOW and *PHIGH will be made of the same
5169 type as the returned expression. If EXP is not a comparison, we
5170 will most likely not be returning a useful value and range. Set
5171 *STRICT_OVERFLOW_P to true if the return value is only valid
5172 because signed overflow is undefined; otherwise, do not change
5173 *STRICT_OVERFLOW_P. */
5174
5175 tree
5176 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
5177 bool *strict_overflow_p)
5178 {
5179 enum tree_code code;
5180 tree arg0, arg1 = NULL_TREE;
5181 tree exp_type, nexp;
5182 int in_p;
5183 tree low, high;
5184 location_t loc = EXPR_LOCATION (exp);
5185
5186 /* Start with simply saying "EXP != 0" and then look at the code of EXP
5187 and see if we can refine the range. Some of the cases below may not
5188 happen, but it doesn't seem worth worrying about this. We "continue"
5189 the outer loop when we've changed something; otherwise we "break"
5190 the switch, which will "break" the while. */
5191
5192 in_p = 0;
5193 low = high = build_int_cst (TREE_TYPE (exp), 0);
5194
5195 while (1)
5196 {
5197 code = TREE_CODE (exp);
5198 exp_type = TREE_TYPE (exp);
5199 arg0 = NULL_TREE;
5200
5201 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
5202 {
5203 if (TREE_OPERAND_LENGTH (exp) > 0)
5204 arg0 = TREE_OPERAND (exp, 0);
5205 if (TREE_CODE_CLASS (code) == tcc_binary
5206 || TREE_CODE_CLASS (code) == tcc_comparison
5207 || (TREE_CODE_CLASS (code) == tcc_expression
5208 && TREE_OPERAND_LENGTH (exp) > 1))
5209 arg1 = TREE_OPERAND (exp, 1);
5210 }
5211 if (arg0 == NULL_TREE)
5212 break;
5213
5214 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
5215 &high, &in_p, strict_overflow_p);
5216 if (nexp == NULL_TREE)
5217 break;
5218 exp = nexp;
5219 }
5220
5221 /* If EXP is a constant, we can evaluate whether this is true or false. */
5222 if (TREE_CODE (exp) == INTEGER_CST)
5223 {
5224 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
5225 exp, 0, low, 0))
5226 && integer_onep (range_binop (LE_EXPR, integer_type_node,
5227 exp, 1, high, 1)));
5228 low = high = 0;
5229 exp = 0;
5230 }
5231
5232 *pin_p = in_p, *plow = low, *phigh = high;
5233 return exp;
5234 }
5235
5236 /* Returns TRUE if [LOW, HIGH] range check can be optimized to
5237 a bitwise check i.e. when
5238 LOW == 0xXX...X00...0
5239 HIGH == 0xXX...X11...1
5240 Return corresponding mask in MASK and stem in VALUE. */
5241
5242 static bool
5243 maskable_range_p (const_tree low, const_tree high, tree type, tree *mask,
5244 tree *value)
5245 {
5246 if (TREE_CODE (low) != INTEGER_CST
5247 || TREE_CODE (high) != INTEGER_CST)
5248 return false;
5249
5250 unsigned prec = TYPE_PRECISION (type);
5251 wide_int lo = wi::to_wide (low, prec);
5252 wide_int hi = wi::to_wide (high, prec);
5253
5254 wide_int end_mask = lo ^ hi;
5255 if ((end_mask & (end_mask + 1)) != 0
5256 || (lo & end_mask) != 0)
5257 return false;
5258
5259 wide_int stem_mask = ~end_mask;
5260 wide_int stem = lo & stem_mask;
5261 if (stem != (hi & stem_mask))
5262 return false;
5263
5264 *mask = wide_int_to_tree (type, stem_mask);
5265 *value = wide_int_to_tree (type, stem);
5266
5267 return true;
5268 }
5269 \f
5270 /* Helper routine for build_range_check and match.pd. Return the type to
5271 perform the check or NULL if it shouldn't be optimized. */
5272
5273 tree
5274 range_check_type (tree etype)
5275 {
5276 /* First make sure that arithmetics in this type is valid, then make sure
5277 that it wraps around. */
5278 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
5279 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype), 1);
5280
5281 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_UNSIGNED (etype))
5282 {
5283 tree utype, minv, maxv;
5284
5285 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
5286 for the type in question, as we rely on this here. */
5287 utype = unsigned_type_for (etype);
5288 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
5289 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
5290 build_int_cst (TREE_TYPE (maxv), 1), 1);
5291 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
5292
5293 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
5294 minv, 1, maxv, 1)))
5295 etype = utype;
5296 else
5297 return NULL_TREE;
5298 }
5299 else if (POINTER_TYPE_P (etype))
5300 etype = unsigned_type_for (etype);
5301 return etype;
5302 }
5303
5304 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
5305 type, TYPE, return an expression to test if EXP is in (or out of, depending
5306 on IN_P) the range. Return 0 if the test couldn't be created. */
5307
5308 tree
5309 build_range_check (location_t loc, tree type, tree exp, int in_p,
5310 tree low, tree high)
5311 {
5312 tree etype = TREE_TYPE (exp), mask, value;
5313
5314 /* Disable this optimization for function pointer expressions
5315 on targets that require function pointer canonicalization. */
5316 if (targetm.have_canonicalize_funcptr_for_compare ()
5317 && POINTER_TYPE_P (etype)
5318 && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (etype)))
5319 return NULL_TREE;
5320
5321 if (! in_p)
5322 {
5323 value = build_range_check (loc, type, exp, 1, low, high);
5324 if (value != 0)
5325 return invert_truthvalue_loc (loc, value);
5326
5327 return 0;
5328 }
5329
5330 if (low == 0 && high == 0)
5331 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
5332
5333 if (low == 0)
5334 return fold_build2_loc (loc, LE_EXPR, type, exp,
5335 fold_convert_loc (loc, etype, high));
5336
5337 if (high == 0)
5338 return fold_build2_loc (loc, GE_EXPR, type, exp,
5339 fold_convert_loc (loc, etype, low));
5340
5341 if (operand_equal_p (low, high, 0))
5342 return fold_build2_loc (loc, EQ_EXPR, type, exp,
5343 fold_convert_loc (loc, etype, low));
5344
5345 if (TREE_CODE (exp) == BIT_AND_EXPR
5346 && maskable_range_p (low, high, etype, &mask, &value))
5347 return fold_build2_loc (loc, EQ_EXPR, type,
5348 fold_build2_loc (loc, BIT_AND_EXPR, etype,
5349 exp, mask),
5350 value);
5351
5352 if (integer_zerop (low))
5353 {
5354 if (! TYPE_UNSIGNED (etype))
5355 {
5356 etype = unsigned_type_for (etype);
5357 high = fold_convert_loc (loc, etype, high);
5358 exp = fold_convert_loc (loc, etype, exp);
5359 }
5360 return build_range_check (loc, type, exp, 1, 0, high);
5361 }
5362
5363 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
5364 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
5365 {
5366 int prec = TYPE_PRECISION (etype);
5367
5368 if (wi::mask <widest_int> (prec - 1, false) == wi::to_widest (high))
5369 {
5370 if (TYPE_UNSIGNED (etype))
5371 {
5372 tree signed_etype = signed_type_for (etype);
5373 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
5374 etype
5375 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
5376 else
5377 etype = signed_etype;
5378 exp = fold_convert_loc (loc, etype, exp);
5379 }
5380 return fold_build2_loc (loc, GT_EXPR, type, exp,
5381 build_int_cst (etype, 0));
5382 }
5383 }
5384
5385 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
5386 This requires wrap-around arithmetics for the type of the expression. */
5387 etype = range_check_type (etype);
5388 if (etype == NULL_TREE)
5389 return NULL_TREE;
5390
5391 high = fold_convert_loc (loc, etype, high);
5392 low = fold_convert_loc (loc, etype, low);
5393 exp = fold_convert_loc (loc, etype, exp);
5394
5395 value = const_binop (MINUS_EXPR, high, low);
5396
5397 if (value != 0 && !TREE_OVERFLOW (value))
5398 return build_range_check (loc, type,
5399 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
5400 1, build_int_cst (etype, 0), value);
5401
5402 return 0;
5403 }
5404 \f
5405 /* Return the predecessor of VAL in its type, handling the infinite case. */
5406
5407 static tree
5408 range_predecessor (tree val)
5409 {
5410 tree type = TREE_TYPE (val);
5411
5412 if (INTEGRAL_TYPE_P (type)
5413 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
5414 return 0;
5415 else
5416 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
5417 build_int_cst (TREE_TYPE (val), 1), 0);
5418 }
5419
5420 /* Return the successor of VAL in its type, handling the infinite case. */
5421
5422 static tree
5423 range_successor (tree val)
5424 {
5425 tree type = TREE_TYPE (val);
5426
5427 if (INTEGRAL_TYPE_P (type)
5428 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
5429 return 0;
5430 else
5431 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
5432 build_int_cst (TREE_TYPE (val), 1), 0);
5433 }
5434
5435 /* Given two ranges, see if we can merge them into one. Return 1 if we
5436 can, 0 if we can't. Set the output range into the specified parameters. */
5437
5438 bool
5439 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
5440 tree high0, int in1_p, tree low1, tree high1)
5441 {
5442 int no_overlap;
5443 int subset;
5444 int temp;
5445 tree tem;
5446 int in_p;
5447 tree low, high;
5448 int lowequal = ((low0 == 0 && low1 == 0)
5449 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5450 low0, 0, low1, 0)));
5451 int highequal = ((high0 == 0 && high1 == 0)
5452 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5453 high0, 1, high1, 1)));
5454
5455 /* Make range 0 be the range that starts first, or ends last if they
5456 start at the same value. Swap them if it isn't. */
5457 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
5458 low0, 0, low1, 0))
5459 || (lowequal
5460 && integer_onep (range_binop (GT_EXPR, integer_type_node,
5461 high1, 1, high0, 1))))
5462 {
5463 temp = in0_p, in0_p = in1_p, in1_p = temp;
5464 tem = low0, low0 = low1, low1 = tem;
5465 tem = high0, high0 = high1, high1 = tem;
5466 }
5467
5468 /* If the second range is != high1 where high1 is the type maximum of
5469 the type, try first merging with < high1 range. */
5470 if (low1
5471 && high1
5472 && TREE_CODE (low1) == INTEGER_CST
5473 && (TREE_CODE (TREE_TYPE (low1)) == INTEGER_TYPE
5474 || (TREE_CODE (TREE_TYPE (low1)) == ENUMERAL_TYPE
5475 && known_eq (TYPE_PRECISION (TREE_TYPE (low1)),
5476 GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low1))))))
5477 && operand_equal_p (low1, high1, 0))
5478 {
5479 if (tree_int_cst_equal (low1, TYPE_MAX_VALUE (TREE_TYPE (low1)))
5480 && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5481 !in1_p, NULL_TREE, range_predecessor (low1)))
5482 return true;
5483 /* Similarly for the second range != low1 where low1 is the type minimum
5484 of the type, try first merging with > low1 range. */
5485 if (tree_int_cst_equal (low1, TYPE_MIN_VALUE (TREE_TYPE (low1)))
5486 && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5487 !in1_p, range_successor (low1), NULL_TREE))
5488 return true;
5489 }
5490
5491 /* Now flag two cases, whether the ranges are disjoint or whether the
5492 second range is totally subsumed in the first. Note that the tests
5493 below are simplified by the ones above. */
5494 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
5495 high0, 1, low1, 0));
5496 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
5497 high1, 1, high0, 1));
5498
5499 /* We now have four cases, depending on whether we are including or
5500 excluding the two ranges. */
5501 if (in0_p && in1_p)
5502 {
5503 /* If they don't overlap, the result is false. If the second range
5504 is a subset it is the result. Otherwise, the range is from the start
5505 of the second to the end of the first. */
5506 if (no_overlap)
5507 in_p = 0, low = high = 0;
5508 else if (subset)
5509 in_p = 1, low = low1, high = high1;
5510 else
5511 in_p = 1, low = low1, high = high0;
5512 }
5513
5514 else if (in0_p && ! in1_p)
5515 {
5516 /* If they don't overlap, the result is the first range. If they are
5517 equal, the result is false. If the second range is a subset of the
5518 first, and the ranges begin at the same place, we go from just after
5519 the end of the second range to the end of the first. If the second
5520 range is not a subset of the first, or if it is a subset and both
5521 ranges end at the same place, the range starts at the start of the
5522 first range and ends just before the second range.
5523 Otherwise, we can't describe this as a single range. */
5524 if (no_overlap)
5525 in_p = 1, low = low0, high = high0;
5526 else if (lowequal && highequal)
5527 in_p = 0, low = high = 0;
5528 else if (subset && lowequal)
5529 {
5530 low = range_successor (high1);
5531 high = high0;
5532 in_p = 1;
5533 if (low == 0)
5534 {
5535 /* We are in the weird situation where high0 > high1 but
5536 high1 has no successor. Punt. */
5537 return 0;
5538 }
5539 }
5540 else if (! subset || highequal)
5541 {
5542 low = low0;
5543 high = range_predecessor (low1);
5544 in_p = 1;
5545 if (high == 0)
5546 {
5547 /* low0 < low1 but low1 has no predecessor. Punt. */
5548 return 0;
5549 }
5550 }
5551 else
5552 return 0;
5553 }
5554
5555 else if (! in0_p && in1_p)
5556 {
5557 /* If they don't overlap, the result is the second range. If the second
5558 is a subset of the first, the result is false. Otherwise,
5559 the range starts just after the first range and ends at the
5560 end of the second. */
5561 if (no_overlap)
5562 in_p = 1, low = low1, high = high1;
5563 else if (subset || highequal)
5564 in_p = 0, low = high = 0;
5565 else
5566 {
5567 low = range_successor (high0);
5568 high = high1;
5569 in_p = 1;
5570 if (low == 0)
5571 {
5572 /* high1 > high0 but high0 has no successor. Punt. */
5573 return 0;
5574 }
5575 }
5576 }
5577
5578 else
5579 {
5580 /* The case where we are excluding both ranges. Here the complex case
5581 is if they don't overlap. In that case, the only time we have a
5582 range is if they are adjacent. If the second is a subset of the
5583 first, the result is the first. Otherwise, the range to exclude
5584 starts at the beginning of the first range and ends at the end of the
5585 second. */
5586 if (no_overlap)
5587 {
5588 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
5589 range_successor (high0),
5590 1, low1, 0)))
5591 in_p = 0, low = low0, high = high1;
5592 else
5593 {
5594 /* Canonicalize - [min, x] into - [-, x]. */
5595 if (low0 && TREE_CODE (low0) == INTEGER_CST)
5596 switch (TREE_CODE (TREE_TYPE (low0)))
5597 {
5598 case ENUMERAL_TYPE:
5599 if (maybe_ne (TYPE_PRECISION (TREE_TYPE (low0)),
5600 GET_MODE_BITSIZE
5601 (TYPE_MODE (TREE_TYPE (low0)))))
5602 break;
5603 /* FALLTHROUGH */
5604 case INTEGER_TYPE:
5605 if (tree_int_cst_equal (low0,
5606 TYPE_MIN_VALUE (TREE_TYPE (low0))))
5607 low0 = 0;
5608 break;
5609 case POINTER_TYPE:
5610 if (TYPE_UNSIGNED (TREE_TYPE (low0))
5611 && integer_zerop (low0))
5612 low0 = 0;
5613 break;
5614 default:
5615 break;
5616 }
5617
5618 /* Canonicalize - [x, max] into - [x, -]. */
5619 if (high1 && TREE_CODE (high1) == INTEGER_CST)
5620 switch (TREE_CODE (TREE_TYPE (high1)))
5621 {
5622 case ENUMERAL_TYPE:
5623 if (maybe_ne (TYPE_PRECISION (TREE_TYPE (high1)),
5624 GET_MODE_BITSIZE
5625 (TYPE_MODE (TREE_TYPE (high1)))))
5626 break;
5627 /* FALLTHROUGH */
5628 case INTEGER_TYPE:
5629 if (tree_int_cst_equal (high1,
5630 TYPE_MAX_VALUE (TREE_TYPE (high1))))
5631 high1 = 0;
5632 break;
5633 case POINTER_TYPE:
5634 if (TYPE_UNSIGNED (TREE_TYPE (high1))
5635 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
5636 high1, 1,
5637 build_int_cst (TREE_TYPE (high1), 1),
5638 1)))
5639 high1 = 0;
5640 break;
5641 default:
5642 break;
5643 }
5644
5645 /* The ranges might be also adjacent between the maximum and
5646 minimum values of the given type. For
5647 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5648 return + [x + 1, y - 1]. */
5649 if (low0 == 0 && high1 == 0)
5650 {
5651 low = range_successor (high0);
5652 high = range_predecessor (low1);
5653 if (low == 0 || high == 0)
5654 return 0;
5655
5656 in_p = 1;
5657 }
5658 else
5659 return 0;
5660 }
5661 }
5662 else if (subset)
5663 in_p = 0, low = low0, high = high0;
5664 else
5665 in_p = 0, low = low0, high = high1;
5666 }
5667
5668 *pin_p = in_p, *plow = low, *phigh = high;
5669 return 1;
5670 }
5671 \f
5672
5673 /* Subroutine of fold, looking inside expressions of the form
5674 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
5675 of the COND_EXPR. This function is being used also to optimize
5676 A op B ? C : A, by reversing the comparison first.
5677
5678 Return a folded expression whose code is not a COND_EXPR
5679 anymore, or NULL_TREE if no folding opportunity is found. */
5680
5681 static tree
5682 fold_cond_expr_with_comparison (location_t loc, tree type,
5683 tree arg0, tree arg1, tree arg2)
5684 {
5685 enum tree_code comp_code = TREE_CODE (arg0);
5686 tree arg00 = TREE_OPERAND (arg0, 0);
5687 tree arg01 = TREE_OPERAND (arg0, 1);
5688 tree arg1_type = TREE_TYPE (arg1);
5689 tree tem;
5690
5691 STRIP_NOPS (arg1);
5692 STRIP_NOPS (arg2);
5693
5694 /* If we have A op 0 ? A : -A, consider applying the following
5695 transformations:
5696
5697 A == 0? A : -A same as -A
5698 A != 0? A : -A same as A
5699 A >= 0? A : -A same as abs (A)
5700 A > 0? A : -A same as abs (A)
5701 A <= 0? A : -A same as -abs (A)
5702 A < 0? A : -A same as -abs (A)
5703
5704 None of these transformations work for modes with signed
5705 zeros. If A is +/-0, the first two transformations will
5706 change the sign of the result (from +0 to -0, or vice
5707 versa). The last four will fix the sign of the result,
5708 even though the original expressions could be positive or
5709 negative, depending on the sign of A.
5710
5711 Note that all these transformations are correct if A is
5712 NaN, since the two alternatives (A and -A) are also NaNs. */
5713 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5714 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5715 ? real_zerop (arg01)
5716 : integer_zerop (arg01))
5717 && ((TREE_CODE (arg2) == NEGATE_EXPR
5718 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5719 /* In the case that A is of the form X-Y, '-A' (arg2) may
5720 have already been folded to Y-X, check for that. */
5721 || (TREE_CODE (arg1) == MINUS_EXPR
5722 && TREE_CODE (arg2) == MINUS_EXPR
5723 && operand_equal_p (TREE_OPERAND (arg1, 0),
5724 TREE_OPERAND (arg2, 1), 0)
5725 && operand_equal_p (TREE_OPERAND (arg1, 1),
5726 TREE_OPERAND (arg2, 0), 0))))
5727 switch (comp_code)
5728 {
5729 case EQ_EXPR:
5730 case UNEQ_EXPR:
5731 tem = fold_convert_loc (loc, arg1_type, arg1);
5732 return fold_convert_loc (loc, type, negate_expr (tem));
5733 case NE_EXPR:
5734 case LTGT_EXPR:
5735 return fold_convert_loc (loc, type, arg1);
5736 case UNGE_EXPR:
5737 case UNGT_EXPR:
5738 if (flag_trapping_math)
5739 break;
5740 /* Fall through. */
5741 case GE_EXPR:
5742 case GT_EXPR:
5743 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5744 break;
5745 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5746 return fold_convert_loc (loc, type, tem);
5747 case UNLE_EXPR:
5748 case UNLT_EXPR:
5749 if (flag_trapping_math)
5750 break;
5751 /* FALLTHRU */
5752 case LE_EXPR:
5753 case LT_EXPR:
5754 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5755 break;
5756 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5757 return negate_expr (fold_convert_loc (loc, type, tem));
5758 default:
5759 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5760 break;
5761 }
5762
5763 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5764 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5765 both transformations are correct when A is NaN: A != 0
5766 is then true, and A == 0 is false. */
5767
5768 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5769 && integer_zerop (arg01) && integer_zerop (arg2))
5770 {
5771 if (comp_code == NE_EXPR)
5772 return fold_convert_loc (loc, type, arg1);
5773 else if (comp_code == EQ_EXPR)
5774 return build_zero_cst (type);
5775 }
5776
5777 /* Try some transformations of A op B ? A : B.
5778
5779 A == B? A : B same as B
5780 A != B? A : B same as A
5781 A >= B? A : B same as max (A, B)
5782 A > B? A : B same as max (B, A)
5783 A <= B? A : B same as min (A, B)
5784 A < B? A : B same as min (B, A)
5785
5786 As above, these transformations don't work in the presence
5787 of signed zeros. For example, if A and B are zeros of
5788 opposite sign, the first two transformations will change
5789 the sign of the result. In the last four, the original
5790 expressions give different results for (A=+0, B=-0) and
5791 (A=-0, B=+0), but the transformed expressions do not.
5792
5793 The first two transformations are correct if either A or B
5794 is a NaN. In the first transformation, the condition will
5795 be false, and B will indeed be chosen. In the case of the
5796 second transformation, the condition A != B will be true,
5797 and A will be chosen.
5798
5799 The conversions to max() and min() are not correct if B is
5800 a number and A is not. The conditions in the original
5801 expressions will be false, so all four give B. The min()
5802 and max() versions would give a NaN instead. */
5803 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5804 && operand_equal_for_comparison_p (arg01, arg2)
5805 /* Avoid these transformations if the COND_EXPR may be used
5806 as an lvalue in the C++ front-end. PR c++/19199. */
5807 && (in_gimple_form
5808 || VECTOR_TYPE_P (type)
5809 || (! lang_GNU_CXX ()
5810 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5811 || ! maybe_lvalue_p (arg1)
5812 || ! maybe_lvalue_p (arg2)))
5813 {
5814 tree comp_op0 = arg00;
5815 tree comp_op1 = arg01;
5816 tree comp_type = TREE_TYPE (comp_op0);
5817
5818 switch (comp_code)
5819 {
5820 case EQ_EXPR:
5821 return fold_convert_loc (loc, type, arg2);
5822 case NE_EXPR:
5823 return fold_convert_loc (loc, type, arg1);
5824 case LE_EXPR:
5825 case LT_EXPR:
5826 case UNLE_EXPR:
5827 case UNLT_EXPR:
5828 /* In C++ a ?: expression can be an lvalue, so put the
5829 operand which will be used if they are equal first
5830 so that we can convert this back to the
5831 corresponding COND_EXPR. */
5832 if (!HONOR_NANS (arg1))
5833 {
5834 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5835 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5836 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5837 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5838 : fold_build2_loc (loc, MIN_EXPR, comp_type,
5839 comp_op1, comp_op0);
5840 return fold_convert_loc (loc, type, tem);
5841 }
5842 break;
5843 case GE_EXPR:
5844 case GT_EXPR:
5845 case UNGE_EXPR:
5846 case UNGT_EXPR:
5847 if (!HONOR_NANS (arg1))
5848 {
5849 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5850 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5851 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5852 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5853 : fold_build2_loc (loc, MAX_EXPR, comp_type,
5854 comp_op1, comp_op0);
5855 return fold_convert_loc (loc, type, tem);
5856 }
5857 break;
5858 case UNEQ_EXPR:
5859 if (!HONOR_NANS (arg1))
5860 return fold_convert_loc (loc, type, arg2);
5861 break;
5862 case LTGT_EXPR:
5863 if (!HONOR_NANS (arg1))
5864 return fold_convert_loc (loc, type, arg1);
5865 break;
5866 default:
5867 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5868 break;
5869 }
5870 }
5871
5872 return NULL_TREE;
5873 }
5874
5875
5876 \f
5877 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5878 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5879 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5880 false) >= 2)
5881 #endif
5882
5883 /* EXP is some logical combination of boolean tests. See if we can
5884 merge it into some range test. Return the new tree if so. */
5885
5886 static tree
5887 fold_range_test (location_t loc, enum tree_code code, tree type,
5888 tree op0, tree op1)
5889 {
5890 int or_op = (code == TRUTH_ORIF_EXPR
5891 || code == TRUTH_OR_EXPR);
5892 int in0_p, in1_p, in_p;
5893 tree low0, low1, low, high0, high1, high;
5894 bool strict_overflow_p = false;
5895 tree tem, lhs, rhs;
5896 const char * const warnmsg = G_("assuming signed overflow does not occur "
5897 "when simplifying range test");
5898
5899 if (!INTEGRAL_TYPE_P (type))
5900 return 0;
5901
5902 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5903 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5904
5905 /* If this is an OR operation, invert both sides; we will invert
5906 again at the end. */
5907 if (or_op)
5908 in0_p = ! in0_p, in1_p = ! in1_p;
5909
5910 /* If both expressions are the same, if we can merge the ranges, and we
5911 can build the range test, return it or it inverted. If one of the
5912 ranges is always true or always false, consider it to be the same
5913 expression as the other. */
5914 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5915 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5916 in1_p, low1, high1)
5917 && (tem = (build_range_check (loc, type,
5918 lhs != 0 ? lhs
5919 : rhs != 0 ? rhs : integer_zero_node,
5920 in_p, low, high))) != 0)
5921 {
5922 if (strict_overflow_p)
5923 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5924 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5925 }
5926
5927 /* On machines where the branch cost is expensive, if this is a
5928 short-circuited branch and the underlying object on both sides
5929 is the same, make a non-short-circuit operation. */
5930 bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
5931 if (param_logical_op_non_short_circuit != -1)
5932 logical_op_non_short_circuit
5933 = param_logical_op_non_short_circuit;
5934 if (logical_op_non_short_circuit
5935 && !flag_sanitize_coverage
5936 && lhs != 0 && rhs != 0
5937 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5938 && operand_equal_p (lhs, rhs, 0))
5939 {
5940 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5941 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5942 which cases we can't do this. */
5943 if (simple_operand_p (lhs))
5944 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5945 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5946 type, op0, op1);
5947
5948 else if (!lang_hooks.decls.global_bindings_p ()
5949 && !CONTAINS_PLACEHOLDER_P (lhs))
5950 {
5951 tree common = save_expr (lhs);
5952
5953 if ((lhs = build_range_check (loc, type, common,
5954 or_op ? ! in0_p : in0_p,
5955 low0, high0)) != 0
5956 && (rhs = build_range_check (loc, type, common,
5957 or_op ? ! in1_p : in1_p,
5958 low1, high1)) != 0)
5959 {
5960 if (strict_overflow_p)
5961 fold_overflow_warning (warnmsg,
5962 WARN_STRICT_OVERFLOW_COMPARISON);
5963 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5964 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5965 type, lhs, rhs);
5966 }
5967 }
5968 }
5969
5970 return 0;
5971 }
5972 \f
5973 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5974 bit value. Arrange things so the extra bits will be set to zero if and
5975 only if C is signed-extended to its full width. If MASK is nonzero,
5976 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5977
5978 static tree
5979 unextend (tree c, int p, int unsignedp, tree mask)
5980 {
5981 tree type = TREE_TYPE (c);
5982 int modesize = GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (type));
5983 tree temp;
5984
5985 if (p == modesize || unsignedp)
5986 return c;
5987
5988 /* We work by getting just the sign bit into the low-order bit, then
5989 into the high-order bit, then sign-extend. We then XOR that value
5990 with C. */
5991 temp = build_int_cst (TREE_TYPE (c),
5992 wi::extract_uhwi (wi::to_wide (c), p - 1, 1));
5993
5994 /* We must use a signed type in order to get an arithmetic right shift.
5995 However, we must also avoid introducing accidental overflows, so that
5996 a subsequent call to integer_zerop will work. Hence we must
5997 do the type conversion here. At this point, the constant is either
5998 zero or one, and the conversion to a signed type can never overflow.
5999 We could get an overflow if this conversion is done anywhere else. */
6000 if (TYPE_UNSIGNED (type))
6001 temp = fold_convert (signed_type_for (type), temp);
6002
6003 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
6004 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
6005 if (mask != 0)
6006 temp = const_binop (BIT_AND_EXPR, temp,
6007 fold_convert (TREE_TYPE (c), mask));
6008 /* If necessary, convert the type back to match the type of C. */
6009 if (TYPE_UNSIGNED (type))
6010 temp = fold_convert (type, temp);
6011
6012 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
6013 }
6014 \f
6015 /* For an expression that has the form
6016 (A && B) || ~B
6017 or
6018 (A || B) && ~B,
6019 we can drop one of the inner expressions and simplify to
6020 A || ~B
6021 or
6022 A && ~B
6023 LOC is the location of the resulting expression. OP is the inner
6024 logical operation; the left-hand side in the examples above, while CMPOP
6025 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
6026 removing a condition that guards another, as in
6027 (A != NULL && A->...) || A == NULL
6028 which we must not transform. If RHS_ONLY is true, only eliminate the
6029 right-most operand of the inner logical operation. */
6030
6031 static tree
6032 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
6033 bool rhs_only)
6034 {
6035 tree type = TREE_TYPE (cmpop);
6036 enum tree_code code = TREE_CODE (cmpop);
6037 enum tree_code truthop_code = TREE_CODE (op);
6038 tree lhs = TREE_OPERAND (op, 0);
6039 tree rhs = TREE_OPERAND (op, 1);
6040 tree orig_lhs = lhs, orig_rhs = rhs;
6041 enum tree_code rhs_code = TREE_CODE (rhs);
6042 enum tree_code lhs_code = TREE_CODE (lhs);
6043 enum tree_code inv_code;
6044
6045 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
6046 return NULL_TREE;
6047
6048 if (TREE_CODE_CLASS (code) != tcc_comparison)
6049 return NULL_TREE;
6050
6051 if (rhs_code == truthop_code)
6052 {
6053 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
6054 if (newrhs != NULL_TREE)
6055 {
6056 rhs = newrhs;
6057 rhs_code = TREE_CODE (rhs);
6058 }
6059 }
6060 if (lhs_code == truthop_code && !rhs_only)
6061 {
6062 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
6063 if (newlhs != NULL_TREE)
6064 {
6065 lhs = newlhs;
6066 lhs_code = TREE_CODE (lhs);
6067 }
6068 }
6069
6070 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
6071 if (inv_code == rhs_code
6072 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
6073 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
6074 return lhs;
6075 if (!rhs_only && inv_code == lhs_code
6076 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
6077 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
6078 return rhs;
6079 if (rhs != orig_rhs || lhs != orig_lhs)
6080 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
6081 lhs, rhs);
6082 return NULL_TREE;
6083 }
6084
6085 /* Find ways of folding logical expressions of LHS and RHS:
6086 Try to merge two comparisons to the same innermost item.
6087 Look for range tests like "ch >= '0' && ch <= '9'".
6088 Look for combinations of simple terms on machines with expensive branches
6089 and evaluate the RHS unconditionally.
6090
6091 For example, if we have p->a == 2 && p->b == 4 and we can make an
6092 object large enough to span both A and B, we can do this with a comparison
6093 against the object ANDed with the a mask.
6094
6095 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
6096 operations to do this with one comparison.
6097
6098 We check for both normal comparisons and the BIT_AND_EXPRs made this by
6099 function and the one above.
6100
6101 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
6102 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
6103
6104 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
6105 two operands.
6106
6107 We return the simplified tree or 0 if no optimization is possible. */
6108
6109 static tree
6110 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
6111 tree lhs, tree rhs)
6112 {
6113 /* If this is the "or" of two comparisons, we can do something if
6114 the comparisons are NE_EXPR. If this is the "and", we can do something
6115 if the comparisons are EQ_EXPR. I.e.,
6116 (a->b == 2 && a->c == 4) can become (a->new == NEW).
6117
6118 WANTED_CODE is this operation code. For single bit fields, we can
6119 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
6120 comparison for one-bit fields. */
6121
6122 enum tree_code wanted_code;
6123 enum tree_code lcode, rcode;
6124 tree ll_arg, lr_arg, rl_arg, rr_arg;
6125 tree ll_inner, lr_inner, rl_inner, rr_inner;
6126 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
6127 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
6128 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
6129 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
6130 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
6131 int ll_reversep, lr_reversep, rl_reversep, rr_reversep;
6132 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
6133 scalar_int_mode lnmode, rnmode;
6134 tree ll_mask, lr_mask, rl_mask, rr_mask;
6135 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
6136 tree l_const, r_const;
6137 tree lntype, rntype, result;
6138 HOST_WIDE_INT first_bit, end_bit;
6139 int volatilep;
6140
6141 /* Start by getting the comparison codes. Fail if anything is volatile.
6142 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
6143 it were surrounded with a NE_EXPR. */
6144
6145 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
6146 return 0;
6147
6148 lcode = TREE_CODE (lhs);
6149 rcode = TREE_CODE (rhs);
6150
6151 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
6152 {
6153 lhs = build2 (NE_EXPR, truth_type, lhs,
6154 build_int_cst (TREE_TYPE (lhs), 0));
6155 lcode = NE_EXPR;
6156 }
6157
6158 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
6159 {
6160 rhs = build2 (NE_EXPR, truth_type, rhs,
6161 build_int_cst (TREE_TYPE (rhs), 0));
6162 rcode = NE_EXPR;
6163 }
6164
6165 if (TREE_CODE_CLASS (lcode) != tcc_comparison
6166 || TREE_CODE_CLASS (rcode) != tcc_comparison)
6167 return 0;
6168
6169 ll_arg = TREE_OPERAND (lhs, 0);
6170 lr_arg = TREE_OPERAND (lhs, 1);
6171 rl_arg = TREE_OPERAND (rhs, 0);
6172 rr_arg = TREE_OPERAND (rhs, 1);
6173
6174 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
6175 if (simple_operand_p (ll_arg)
6176 && simple_operand_p (lr_arg))
6177 {
6178 if (operand_equal_p (ll_arg, rl_arg, 0)
6179 && operand_equal_p (lr_arg, rr_arg, 0))
6180 {
6181 result = combine_comparisons (loc, code, lcode, rcode,
6182 truth_type, ll_arg, lr_arg);
6183 if (result)
6184 return result;
6185 }
6186 else if (operand_equal_p (ll_arg, rr_arg, 0)
6187 && operand_equal_p (lr_arg, rl_arg, 0))
6188 {
6189 result = combine_comparisons (loc, code, lcode,
6190 swap_tree_comparison (rcode),
6191 truth_type, ll_arg, lr_arg);
6192 if (result)
6193 return result;
6194 }
6195 }
6196
6197 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
6198 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
6199
6200 /* If the RHS can be evaluated unconditionally and its operands are
6201 simple, it wins to evaluate the RHS unconditionally on machines
6202 with expensive branches. In this case, this isn't a comparison
6203 that can be merged. */
6204
6205 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
6206 false) >= 2
6207 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
6208 && simple_operand_p (rl_arg)
6209 && simple_operand_p (rr_arg))
6210 {
6211 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
6212 if (code == TRUTH_OR_EXPR
6213 && lcode == NE_EXPR && integer_zerop (lr_arg)
6214 && rcode == NE_EXPR && integer_zerop (rr_arg)
6215 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
6216 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
6217 return build2_loc (loc, NE_EXPR, truth_type,
6218 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
6219 ll_arg, rl_arg),
6220 build_int_cst (TREE_TYPE (ll_arg), 0));
6221
6222 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
6223 if (code == TRUTH_AND_EXPR
6224 && lcode == EQ_EXPR && integer_zerop (lr_arg)
6225 && rcode == EQ_EXPR && integer_zerop (rr_arg)
6226 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
6227 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
6228 return build2_loc (loc, EQ_EXPR, truth_type,
6229 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
6230 ll_arg, rl_arg),
6231 build_int_cst (TREE_TYPE (ll_arg), 0));
6232 }
6233
6234 /* See if the comparisons can be merged. Then get all the parameters for
6235 each side. */
6236
6237 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
6238 || (rcode != EQ_EXPR && rcode != NE_EXPR))
6239 return 0;
6240
6241 ll_reversep = lr_reversep = rl_reversep = rr_reversep = 0;
6242 volatilep = 0;
6243 ll_inner = decode_field_reference (loc, &ll_arg,
6244 &ll_bitsize, &ll_bitpos, &ll_mode,
6245 &ll_unsignedp, &ll_reversep, &volatilep,
6246 &ll_mask, &ll_and_mask);
6247 lr_inner = decode_field_reference (loc, &lr_arg,
6248 &lr_bitsize, &lr_bitpos, &lr_mode,
6249 &lr_unsignedp, &lr_reversep, &volatilep,
6250 &lr_mask, &lr_and_mask);
6251 rl_inner = decode_field_reference (loc, &rl_arg,
6252 &rl_bitsize, &rl_bitpos, &rl_mode,
6253 &rl_unsignedp, &rl_reversep, &volatilep,
6254 &rl_mask, &rl_and_mask);
6255 rr_inner = decode_field_reference (loc, &rr_arg,
6256 &rr_bitsize, &rr_bitpos, &rr_mode,
6257 &rr_unsignedp, &rr_reversep, &volatilep,
6258 &rr_mask, &rr_and_mask);
6259
6260 /* It must be true that the inner operation on the lhs of each
6261 comparison must be the same if we are to be able to do anything.
6262 Then see if we have constants. If not, the same must be true for
6263 the rhs's. */
6264 if (volatilep
6265 || ll_reversep != rl_reversep
6266 || ll_inner == 0 || rl_inner == 0
6267 || ! operand_equal_p (ll_inner, rl_inner, 0))
6268 return 0;
6269
6270 if (TREE_CODE (lr_arg) == INTEGER_CST
6271 && TREE_CODE (rr_arg) == INTEGER_CST)
6272 {
6273 l_const = lr_arg, r_const = rr_arg;
6274 lr_reversep = ll_reversep;
6275 }
6276 else if (lr_reversep != rr_reversep
6277 || lr_inner == 0 || rr_inner == 0
6278 || ! operand_equal_p (lr_inner, rr_inner, 0))
6279 return 0;
6280 else
6281 l_const = r_const = 0;
6282
6283 /* If either comparison code is not correct for our logical operation,
6284 fail. However, we can convert a one-bit comparison against zero into
6285 the opposite comparison against that bit being set in the field. */
6286
6287 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
6288 if (lcode != wanted_code)
6289 {
6290 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
6291 {
6292 /* Make the left operand unsigned, since we are only interested
6293 in the value of one bit. Otherwise we are doing the wrong
6294 thing below. */
6295 ll_unsignedp = 1;
6296 l_const = ll_mask;
6297 }
6298 else
6299 return 0;
6300 }
6301
6302 /* This is analogous to the code for l_const above. */
6303 if (rcode != wanted_code)
6304 {
6305 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
6306 {
6307 rl_unsignedp = 1;
6308 r_const = rl_mask;
6309 }
6310 else
6311 return 0;
6312 }
6313
6314 /* See if we can find a mode that contains both fields being compared on
6315 the left. If we can't, fail. Otherwise, update all constants and masks
6316 to be relative to a field of that size. */
6317 first_bit = MIN (ll_bitpos, rl_bitpos);
6318 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
6319 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
6320 TYPE_ALIGN (TREE_TYPE (ll_inner)), BITS_PER_WORD,
6321 volatilep, &lnmode))
6322 return 0;
6323
6324 lnbitsize = GET_MODE_BITSIZE (lnmode);
6325 lnbitpos = first_bit & ~ (lnbitsize - 1);
6326 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
6327 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
6328
6329 if (ll_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
6330 {
6331 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
6332 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
6333 }
6334
6335 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
6336 size_int (xll_bitpos));
6337 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
6338 size_int (xrl_bitpos));
6339
6340 if (l_const)
6341 {
6342 l_const = fold_convert_loc (loc, lntype, l_const);
6343 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
6344 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
6345 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
6346 fold_build1_loc (loc, BIT_NOT_EXPR,
6347 lntype, ll_mask))))
6348 {
6349 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6350
6351 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6352 }
6353 }
6354 if (r_const)
6355 {
6356 r_const = fold_convert_loc (loc, lntype, r_const);
6357 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
6358 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
6359 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
6360 fold_build1_loc (loc, BIT_NOT_EXPR,
6361 lntype, rl_mask))))
6362 {
6363 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6364
6365 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6366 }
6367 }
6368
6369 /* If the right sides are not constant, do the same for it. Also,
6370 disallow this optimization if a size, signedness or storage order
6371 mismatch occurs between the left and right sides. */
6372 if (l_const == 0)
6373 {
6374 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
6375 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
6376 || ll_reversep != lr_reversep
6377 /* Make sure the two fields on the right
6378 correspond to the left without being swapped. */
6379 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
6380 return 0;
6381
6382 first_bit = MIN (lr_bitpos, rr_bitpos);
6383 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
6384 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
6385 TYPE_ALIGN (TREE_TYPE (lr_inner)), BITS_PER_WORD,
6386 volatilep, &rnmode))
6387 return 0;
6388
6389 rnbitsize = GET_MODE_BITSIZE (rnmode);
6390 rnbitpos = first_bit & ~ (rnbitsize - 1);
6391 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
6392 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
6393
6394 if (lr_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
6395 {
6396 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
6397 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
6398 }
6399
6400 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6401 rntype, lr_mask),
6402 size_int (xlr_bitpos));
6403 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6404 rntype, rr_mask),
6405 size_int (xrr_bitpos));
6406
6407 /* Make a mask that corresponds to both fields being compared.
6408 Do this for both items being compared. If the operands are the
6409 same size and the bits being compared are in the same position
6410 then we can do this by masking both and comparing the masked
6411 results. */
6412 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6413 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
6414 if (lnbitsize == rnbitsize
6415 && xll_bitpos == xlr_bitpos
6416 && lnbitpos >= 0
6417 && rnbitpos >= 0)
6418 {
6419 lhs = make_bit_field_ref (loc, ll_inner, ll_arg,
6420 lntype, lnbitsize, lnbitpos,
6421 ll_unsignedp || rl_unsignedp, ll_reversep);
6422 if (! all_ones_mask_p (ll_mask, lnbitsize))
6423 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
6424
6425 rhs = make_bit_field_ref (loc, lr_inner, lr_arg,
6426 rntype, rnbitsize, rnbitpos,
6427 lr_unsignedp || rr_unsignedp, lr_reversep);
6428 if (! all_ones_mask_p (lr_mask, rnbitsize))
6429 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
6430
6431 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6432 }
6433
6434 /* There is still another way we can do something: If both pairs of
6435 fields being compared are adjacent, we may be able to make a wider
6436 field containing them both.
6437
6438 Note that we still must mask the lhs/rhs expressions. Furthermore,
6439 the mask must be shifted to account for the shift done by
6440 make_bit_field_ref. */
6441 if (((ll_bitsize + ll_bitpos == rl_bitpos
6442 && lr_bitsize + lr_bitpos == rr_bitpos)
6443 || (ll_bitpos == rl_bitpos + rl_bitsize
6444 && lr_bitpos == rr_bitpos + rr_bitsize))
6445 && ll_bitpos >= 0
6446 && rl_bitpos >= 0
6447 && lr_bitpos >= 0
6448 && rr_bitpos >= 0)
6449 {
6450 tree type;
6451
6452 lhs = make_bit_field_ref (loc, ll_inner, ll_arg, lntype,
6453 ll_bitsize + rl_bitsize,
6454 MIN (ll_bitpos, rl_bitpos),
6455 ll_unsignedp, ll_reversep);
6456 rhs = make_bit_field_ref (loc, lr_inner, lr_arg, rntype,
6457 lr_bitsize + rr_bitsize,
6458 MIN (lr_bitpos, rr_bitpos),
6459 lr_unsignedp, lr_reversep);
6460
6461 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
6462 size_int (MIN (xll_bitpos, xrl_bitpos)));
6463 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
6464 size_int (MIN (xlr_bitpos, xrr_bitpos)));
6465
6466 /* Convert to the smaller type before masking out unwanted bits. */
6467 type = lntype;
6468 if (lntype != rntype)
6469 {
6470 if (lnbitsize > rnbitsize)
6471 {
6472 lhs = fold_convert_loc (loc, rntype, lhs);
6473 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
6474 type = rntype;
6475 }
6476 else if (lnbitsize < rnbitsize)
6477 {
6478 rhs = fold_convert_loc (loc, lntype, rhs);
6479 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
6480 type = lntype;
6481 }
6482 }
6483
6484 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
6485 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
6486
6487 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
6488 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
6489
6490 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6491 }
6492
6493 return 0;
6494 }
6495
6496 /* Handle the case of comparisons with constants. If there is something in
6497 common between the masks, those bits of the constants must be the same.
6498 If not, the condition is always false. Test for this to avoid generating
6499 incorrect code below. */
6500 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
6501 if (! integer_zerop (result)
6502 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
6503 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
6504 {
6505 if (wanted_code == NE_EXPR)
6506 {
6507 warning (0, "%<or%> of unmatched not-equal tests is always 1");
6508 return constant_boolean_node (true, truth_type);
6509 }
6510 else
6511 {
6512 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
6513 return constant_boolean_node (false, truth_type);
6514 }
6515 }
6516
6517 if (lnbitpos < 0)
6518 return 0;
6519
6520 /* Construct the expression we will return. First get the component
6521 reference we will make. Unless the mask is all ones the width of
6522 that field, perform the mask operation. Then compare with the
6523 merged constant. */
6524 result = make_bit_field_ref (loc, ll_inner, ll_arg,
6525 lntype, lnbitsize, lnbitpos,
6526 ll_unsignedp || rl_unsignedp, ll_reversep);
6527
6528 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6529 if (! all_ones_mask_p (ll_mask, lnbitsize))
6530 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
6531
6532 return build2_loc (loc, wanted_code, truth_type, result,
6533 const_binop (BIT_IOR_EXPR, l_const, r_const));
6534 }
6535 \f
6536 /* T is an integer expression that is being multiplied, divided, or taken a
6537 modulus (CODE says which and what kind of divide or modulus) by a
6538 constant C. See if we can eliminate that operation by folding it with
6539 other operations already in T. WIDE_TYPE, if non-null, is a type that
6540 should be used for the computation if wider than our type.
6541
6542 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6543 (X * 2) + (Y * 4). We must, however, be assured that either the original
6544 expression would not overflow or that overflow is undefined for the type
6545 in the language in question.
6546
6547 If we return a non-null expression, it is an equivalent form of the
6548 original computation, but need not be in the original type.
6549
6550 We set *STRICT_OVERFLOW_P to true if the return values depends on
6551 signed overflow being undefined. Otherwise we do not change
6552 *STRICT_OVERFLOW_P. */
6553
6554 static tree
6555 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6556 bool *strict_overflow_p)
6557 {
6558 /* To avoid exponential search depth, refuse to allow recursion past
6559 three levels. Beyond that (1) it's highly unlikely that we'll find
6560 something interesting and (2) we've probably processed it before
6561 when we built the inner expression. */
6562
6563 static int depth;
6564 tree ret;
6565
6566 if (depth > 3)
6567 return NULL;
6568
6569 depth++;
6570 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6571 depth--;
6572
6573 return ret;
6574 }
6575
6576 static tree
6577 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6578 bool *strict_overflow_p)
6579 {
6580 tree type = TREE_TYPE (t);
6581 enum tree_code tcode = TREE_CODE (t);
6582 tree ctype = (wide_type != 0
6583 && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (wide_type))
6584 > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type)))
6585 ? wide_type : type);
6586 tree t1, t2;
6587 int same_p = tcode == code;
6588 tree op0 = NULL_TREE, op1 = NULL_TREE;
6589 bool sub_strict_overflow_p;
6590
6591 /* Don't deal with constants of zero here; they confuse the code below. */
6592 if (integer_zerop (c))
6593 return NULL_TREE;
6594
6595 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6596 op0 = TREE_OPERAND (t, 0);
6597
6598 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6599 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6600
6601 /* Note that we need not handle conditional operations here since fold
6602 already handles those cases. So just do arithmetic here. */
6603 switch (tcode)
6604 {
6605 case INTEGER_CST:
6606 /* For a constant, we can always simplify if we are a multiply
6607 or (for divide and modulus) if it is a multiple of our constant. */
6608 if (code == MULT_EXPR
6609 || wi::multiple_of_p (wi::to_wide (t), wi::to_wide (c),
6610 TYPE_SIGN (type)))
6611 {
6612 tree tem = const_binop (code, fold_convert (ctype, t),
6613 fold_convert (ctype, c));
6614 /* If the multiplication overflowed, we lost information on it.
6615 See PR68142 and PR69845. */
6616 if (TREE_OVERFLOW (tem))
6617 return NULL_TREE;
6618 return tem;
6619 }
6620 break;
6621
6622 CASE_CONVERT: case NON_LVALUE_EXPR:
6623 /* If op0 is an expression ... */
6624 if ((COMPARISON_CLASS_P (op0)
6625 || UNARY_CLASS_P (op0)
6626 || BINARY_CLASS_P (op0)
6627 || VL_EXP_CLASS_P (op0)
6628 || EXPRESSION_CLASS_P (op0))
6629 /* ... and has wrapping overflow, and its type is smaller
6630 than ctype, then we cannot pass through as widening. */
6631 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6632 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0)))
6633 && (TYPE_PRECISION (ctype)
6634 > TYPE_PRECISION (TREE_TYPE (op0))))
6635 /* ... or this is a truncation (t is narrower than op0),
6636 then we cannot pass through this narrowing. */
6637 || (TYPE_PRECISION (type)
6638 < TYPE_PRECISION (TREE_TYPE (op0)))
6639 /* ... or signedness changes for division or modulus,
6640 then we cannot pass through this conversion. */
6641 || (code != MULT_EXPR
6642 && (TYPE_UNSIGNED (ctype)
6643 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6644 /* ... or has undefined overflow while the converted to
6645 type has not, we cannot do the operation in the inner type
6646 as that would introduce undefined overflow. */
6647 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6648 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0)))
6649 && !TYPE_OVERFLOW_UNDEFINED (type))))
6650 break;
6651
6652 /* Pass the constant down and see if we can make a simplification. If
6653 we can, replace this expression with the inner simplification for
6654 possible later conversion to our or some other type. */
6655 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6656 && TREE_CODE (t2) == INTEGER_CST
6657 && !TREE_OVERFLOW (t2)
6658 && (t1 = extract_muldiv (op0, t2, code,
6659 code == MULT_EXPR ? ctype : NULL_TREE,
6660 strict_overflow_p)) != 0)
6661 return t1;
6662 break;
6663
6664 case ABS_EXPR:
6665 /* If widening the type changes it from signed to unsigned, then we
6666 must avoid building ABS_EXPR itself as unsigned. */
6667 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6668 {
6669 tree cstype = (*signed_type_for) (ctype);
6670 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6671 != 0)
6672 {
6673 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6674 return fold_convert (ctype, t1);
6675 }
6676 break;
6677 }
6678 /* If the constant is negative, we cannot simplify this. */
6679 if (tree_int_cst_sgn (c) == -1)
6680 break;
6681 /* FALLTHROUGH */
6682 case NEGATE_EXPR:
6683 /* For division and modulus, type can't be unsigned, as e.g.
6684 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6685 For signed types, even with wrapping overflow, this is fine. */
6686 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6687 break;
6688 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6689 != 0)
6690 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6691 break;
6692
6693 case MIN_EXPR: case MAX_EXPR:
6694 /* If widening the type changes the signedness, then we can't perform
6695 this optimization as that changes the result. */
6696 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6697 break;
6698
6699 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6700 sub_strict_overflow_p = false;
6701 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6702 &sub_strict_overflow_p)) != 0
6703 && (t2 = extract_muldiv (op1, c, code, wide_type,
6704 &sub_strict_overflow_p)) != 0)
6705 {
6706 if (tree_int_cst_sgn (c) < 0)
6707 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6708 if (sub_strict_overflow_p)
6709 *strict_overflow_p = true;
6710 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6711 fold_convert (ctype, t2));
6712 }
6713 break;
6714
6715 case LSHIFT_EXPR: case RSHIFT_EXPR:
6716 /* If the second operand is constant, this is a multiplication
6717 or floor division, by a power of two, so we can treat it that
6718 way unless the multiplier or divisor overflows. Signed
6719 left-shift overflow is implementation-defined rather than
6720 undefined in C90, so do not convert signed left shift into
6721 multiplication. */
6722 if (TREE_CODE (op1) == INTEGER_CST
6723 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6724 /* const_binop may not detect overflow correctly,
6725 so check for it explicitly here. */
6726 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
6727 wi::to_wide (op1))
6728 && (t1 = fold_convert (ctype,
6729 const_binop (LSHIFT_EXPR, size_one_node,
6730 op1))) != 0
6731 && !TREE_OVERFLOW (t1))
6732 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6733 ? MULT_EXPR : FLOOR_DIV_EXPR,
6734 ctype,
6735 fold_convert (ctype, op0),
6736 t1),
6737 c, code, wide_type, strict_overflow_p);
6738 break;
6739
6740 case PLUS_EXPR: case MINUS_EXPR:
6741 /* See if we can eliminate the operation on both sides. If we can, we
6742 can return a new PLUS or MINUS. If we can't, the only remaining
6743 cases where we can do anything are if the second operand is a
6744 constant. */
6745 sub_strict_overflow_p = false;
6746 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6747 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6748 if (t1 != 0 && t2 != 0
6749 && TYPE_OVERFLOW_WRAPS (ctype)
6750 && (code == MULT_EXPR
6751 /* If not multiplication, we can only do this if both operands
6752 are divisible by c. */
6753 || (multiple_of_p (ctype, op0, c)
6754 && multiple_of_p (ctype, op1, c))))
6755 {
6756 if (sub_strict_overflow_p)
6757 *strict_overflow_p = true;
6758 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6759 fold_convert (ctype, t2));
6760 }
6761
6762 /* If this was a subtraction, negate OP1 and set it to be an addition.
6763 This simplifies the logic below. */
6764 if (tcode == MINUS_EXPR)
6765 {
6766 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6767 /* If OP1 was not easily negatable, the constant may be OP0. */
6768 if (TREE_CODE (op0) == INTEGER_CST)
6769 {
6770 std::swap (op0, op1);
6771 std::swap (t1, t2);
6772 }
6773 }
6774
6775 if (TREE_CODE (op1) != INTEGER_CST)
6776 break;
6777
6778 /* If either OP1 or C are negative, this optimization is not safe for
6779 some of the division and remainder types while for others we need
6780 to change the code. */
6781 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6782 {
6783 if (code == CEIL_DIV_EXPR)
6784 code = FLOOR_DIV_EXPR;
6785 else if (code == FLOOR_DIV_EXPR)
6786 code = CEIL_DIV_EXPR;
6787 else if (code != MULT_EXPR
6788 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6789 break;
6790 }
6791
6792 /* If it's a multiply or a division/modulus operation of a multiple
6793 of our constant, do the operation and verify it doesn't overflow. */
6794 if (code == MULT_EXPR
6795 || wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6796 TYPE_SIGN (type)))
6797 {
6798 op1 = const_binop (code, fold_convert (ctype, op1),
6799 fold_convert (ctype, c));
6800 /* We allow the constant to overflow with wrapping semantics. */
6801 if (op1 == 0
6802 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6803 break;
6804 }
6805 else
6806 break;
6807
6808 /* If we have an unsigned type, we cannot widen the operation since it
6809 will change the result if the original computation overflowed. */
6810 if (TYPE_UNSIGNED (ctype) && ctype != type)
6811 break;
6812
6813 /* The last case is if we are a multiply. In that case, we can
6814 apply the distributive law to commute the multiply and addition
6815 if the multiplication of the constants doesn't overflow
6816 and overflow is defined. With undefined overflow
6817 op0 * c might overflow, while (op0 + orig_op1) * c doesn't.
6818 But fold_plusminus_mult_expr would factor back any power-of-two
6819 value so do not distribute in the first place in this case. */
6820 if (code == MULT_EXPR
6821 && TYPE_OVERFLOW_WRAPS (ctype)
6822 && !(tree_fits_shwi_p (c) && pow2p_hwi (absu_hwi (tree_to_shwi (c)))))
6823 return fold_build2 (tcode, ctype,
6824 fold_build2 (code, ctype,
6825 fold_convert (ctype, op0),
6826 fold_convert (ctype, c)),
6827 op1);
6828
6829 break;
6830
6831 case MULT_EXPR:
6832 /* We have a special case here if we are doing something like
6833 (C * 8) % 4 since we know that's zero. */
6834 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6835 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6836 /* If the multiplication can overflow we cannot optimize this. */
6837 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6838 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6839 && wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6840 TYPE_SIGN (type)))
6841 {
6842 *strict_overflow_p = true;
6843 return omit_one_operand (type, integer_zero_node, op0);
6844 }
6845
6846 /* ... fall through ... */
6847
6848 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6849 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6850 /* If we can extract our operation from the LHS, do so and return a
6851 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6852 do something only if the second operand is a constant. */
6853 if (same_p
6854 && TYPE_OVERFLOW_WRAPS (ctype)
6855 && (t1 = extract_muldiv (op0, c, code, wide_type,
6856 strict_overflow_p)) != 0)
6857 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6858 fold_convert (ctype, op1));
6859 else if (tcode == MULT_EXPR && code == MULT_EXPR
6860 && TYPE_OVERFLOW_WRAPS (ctype)
6861 && (t1 = extract_muldiv (op1, c, code, wide_type,
6862 strict_overflow_p)) != 0)
6863 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6864 fold_convert (ctype, t1));
6865 else if (TREE_CODE (op1) != INTEGER_CST)
6866 return 0;
6867
6868 /* If these are the same operation types, we can associate them
6869 assuming no overflow. */
6870 if (tcode == code)
6871 {
6872 bool overflow_p = false;
6873 wi::overflow_type overflow_mul;
6874 signop sign = TYPE_SIGN (ctype);
6875 unsigned prec = TYPE_PRECISION (ctype);
6876 wide_int mul = wi::mul (wi::to_wide (op1, prec),
6877 wi::to_wide (c, prec),
6878 sign, &overflow_mul);
6879 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6880 if (overflow_mul
6881 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6882 overflow_p = true;
6883 if (!overflow_p)
6884 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6885 wide_int_to_tree (ctype, mul));
6886 }
6887
6888 /* If these operations "cancel" each other, we have the main
6889 optimizations of this pass, which occur when either constant is a
6890 multiple of the other, in which case we replace this with either an
6891 operation or CODE or TCODE.
6892
6893 If we have an unsigned type, we cannot do this since it will change
6894 the result if the original computation overflowed. */
6895 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6896 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6897 || (tcode == MULT_EXPR
6898 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6899 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6900 && code != MULT_EXPR)))
6901 {
6902 if (wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6903 TYPE_SIGN (type)))
6904 {
6905 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6906 *strict_overflow_p = true;
6907 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6908 fold_convert (ctype,
6909 const_binop (TRUNC_DIV_EXPR,
6910 op1, c)));
6911 }
6912 else if (wi::multiple_of_p (wi::to_wide (c), wi::to_wide (op1),
6913 TYPE_SIGN (type)))
6914 {
6915 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6916 *strict_overflow_p = true;
6917 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6918 fold_convert (ctype,
6919 const_binop (TRUNC_DIV_EXPR,
6920 c, op1)));
6921 }
6922 }
6923 break;
6924
6925 default:
6926 break;
6927 }
6928
6929 return 0;
6930 }
6931 \f
6932 /* Return a node which has the indicated constant VALUE (either 0 or
6933 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6934 and is of the indicated TYPE. */
6935
6936 tree
6937 constant_boolean_node (bool value, tree type)
6938 {
6939 if (type == integer_type_node)
6940 return value ? integer_one_node : integer_zero_node;
6941 else if (type == boolean_type_node)
6942 return value ? boolean_true_node : boolean_false_node;
6943 else if (TREE_CODE (type) == VECTOR_TYPE)
6944 return build_vector_from_val (type,
6945 build_int_cst (TREE_TYPE (type),
6946 value ? -1 : 0));
6947 else
6948 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6949 }
6950
6951
6952 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6953 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6954 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6955 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6956 COND is the first argument to CODE; otherwise (as in the example
6957 given here), it is the second argument. TYPE is the type of the
6958 original expression. Return NULL_TREE if no simplification is
6959 possible. */
6960
6961 static tree
6962 fold_binary_op_with_conditional_arg (location_t loc,
6963 enum tree_code code,
6964 tree type, tree op0, tree op1,
6965 tree cond, tree arg, int cond_first_p)
6966 {
6967 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6968 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6969 tree test, true_value, false_value;
6970 tree lhs = NULL_TREE;
6971 tree rhs = NULL_TREE;
6972 enum tree_code cond_code = COND_EXPR;
6973
6974 /* Do not move possibly trapping operations into the conditional as this
6975 pessimizes code and causes gimplification issues when applied late. */
6976 if (operation_could_trap_p (code, FLOAT_TYPE_P (type),
6977 ANY_INTEGRAL_TYPE_P (type)
6978 && TYPE_OVERFLOW_TRAPS (type), op1))
6979 return NULL_TREE;
6980
6981 if (TREE_CODE (cond) == COND_EXPR
6982 || TREE_CODE (cond) == VEC_COND_EXPR)
6983 {
6984 test = TREE_OPERAND (cond, 0);
6985 true_value = TREE_OPERAND (cond, 1);
6986 false_value = TREE_OPERAND (cond, 2);
6987 /* If this operand throws an expression, then it does not make
6988 sense to try to perform a logical or arithmetic operation
6989 involving it. */
6990 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6991 lhs = true_value;
6992 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6993 rhs = false_value;
6994 }
6995 else if (!(TREE_CODE (type) != VECTOR_TYPE
6996 && TREE_CODE (TREE_TYPE (cond)) == VECTOR_TYPE))
6997 {
6998 tree testtype = TREE_TYPE (cond);
6999 test = cond;
7000 true_value = constant_boolean_node (true, testtype);
7001 false_value = constant_boolean_node (false, testtype);
7002 }
7003 else
7004 /* Detect the case of mixing vector and scalar types - bail out. */
7005 return NULL_TREE;
7006
7007 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
7008 cond_code = VEC_COND_EXPR;
7009
7010 /* This transformation is only worthwhile if we don't have to wrap ARG
7011 in a SAVE_EXPR and the operation can be simplified without recursing
7012 on at least one of the branches once its pushed inside the COND_EXPR. */
7013 if (!TREE_CONSTANT (arg)
7014 && (TREE_SIDE_EFFECTS (arg)
7015 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
7016 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
7017 return NULL_TREE;
7018
7019 arg = fold_convert_loc (loc, arg_type, arg);
7020 if (lhs == 0)
7021 {
7022 true_value = fold_convert_loc (loc, cond_type, true_value);
7023 if (cond_first_p)
7024 lhs = fold_build2_loc (loc, code, type, true_value, arg);
7025 else
7026 lhs = fold_build2_loc (loc, code, type, arg, true_value);
7027 }
7028 if (rhs == 0)
7029 {
7030 false_value = fold_convert_loc (loc, cond_type, false_value);
7031 if (cond_first_p)
7032 rhs = fold_build2_loc (loc, code, type, false_value, arg);
7033 else
7034 rhs = fold_build2_loc (loc, code, type, arg, false_value);
7035 }
7036
7037 /* Check that we have simplified at least one of the branches. */
7038 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
7039 return NULL_TREE;
7040
7041 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
7042 }
7043
7044 \f
7045 /* Subroutine of fold() that checks for the addition of +/- 0.0.
7046
7047 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
7048 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
7049 ADDEND is the same as X.
7050
7051 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
7052 and finite. The problematic cases are when X is zero, and its mode
7053 has signed zeros. In the case of rounding towards -infinity,
7054 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
7055 modes, X + 0 is not the same as X because -0 + 0 is 0. */
7056
7057 bool
7058 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
7059 {
7060 if (!real_zerop (addend))
7061 return false;
7062
7063 /* Don't allow the fold with -fsignaling-nans. */
7064 if (HONOR_SNANS (type))
7065 return false;
7066
7067 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
7068 if (!HONOR_SIGNED_ZEROS (type))
7069 return true;
7070
7071 /* There is no case that is safe for all rounding modes. */
7072 if (HONOR_SIGN_DEPENDENT_ROUNDING (type))
7073 return false;
7074
7075 /* In a vector or complex, we would need to check the sign of all zeros. */
7076 if (TREE_CODE (addend) == VECTOR_CST)
7077 addend = uniform_vector_p (addend);
7078 if (!addend || TREE_CODE (addend) != REAL_CST)
7079 return false;
7080
7081 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
7082 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
7083 negate = !negate;
7084
7085 /* The mode has signed zeros, and we have to honor their sign.
7086 In this situation, there is only one case we can return true for.
7087 X - 0 is the same as X with default rounding. */
7088 return negate;
7089 }
7090
7091 /* Subroutine of match.pd that optimizes comparisons of a division by
7092 a nonzero integer constant against an integer constant, i.e.
7093 X/C1 op C2.
7094
7095 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
7096 GE_EXPR or LE_EXPR. ARG01 and ARG1 must be a INTEGER_CST. */
7097
7098 enum tree_code
7099 fold_div_compare (enum tree_code code, tree c1, tree c2, tree *lo,
7100 tree *hi, bool *neg_overflow)
7101 {
7102 tree prod, tmp, type = TREE_TYPE (c1);
7103 signop sign = TYPE_SIGN (type);
7104 wi::overflow_type overflow;
7105
7106 /* We have to do this the hard way to detect unsigned overflow.
7107 prod = int_const_binop (MULT_EXPR, c1, c2); */
7108 wide_int val = wi::mul (wi::to_wide (c1), wi::to_wide (c2), sign, &overflow);
7109 prod = force_fit_type (type, val, -1, overflow);
7110 *neg_overflow = false;
7111
7112 if (sign == UNSIGNED)
7113 {
7114 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
7115 *lo = prod;
7116
7117 /* Likewise *hi = int_const_binop (PLUS_EXPR, prod, tmp). */
7118 val = wi::add (wi::to_wide (prod), wi::to_wide (tmp), sign, &overflow);
7119 *hi = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (prod));
7120 }
7121 else if (tree_int_cst_sgn (c1) >= 0)
7122 {
7123 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
7124 switch (tree_int_cst_sgn (c2))
7125 {
7126 case -1:
7127 *neg_overflow = true;
7128 *lo = int_const_binop (MINUS_EXPR, prod, tmp);
7129 *hi = prod;
7130 break;
7131
7132 case 0:
7133 *lo = fold_negate_const (tmp, type);
7134 *hi = tmp;
7135 break;
7136
7137 case 1:
7138 *hi = int_const_binop (PLUS_EXPR, prod, tmp);
7139 *lo = prod;
7140 break;
7141
7142 default:
7143 gcc_unreachable ();
7144 }
7145 }
7146 else
7147 {
7148 /* A negative divisor reverses the relational operators. */
7149 code = swap_tree_comparison (code);
7150
7151 tmp = int_const_binop (PLUS_EXPR, c1, build_int_cst (type, 1));
7152 switch (tree_int_cst_sgn (c2))
7153 {
7154 case -1:
7155 *hi = int_const_binop (MINUS_EXPR, prod, tmp);
7156 *lo = prod;
7157 break;
7158
7159 case 0:
7160 *hi = fold_negate_const (tmp, type);
7161 *lo = tmp;
7162 break;
7163
7164 case 1:
7165 *neg_overflow = true;
7166 *lo = int_const_binop (PLUS_EXPR, prod, tmp);
7167 *hi = prod;
7168 break;
7169
7170 default:
7171 gcc_unreachable ();
7172 }
7173 }
7174
7175 if (code != EQ_EXPR && code != NE_EXPR)
7176 return code;
7177
7178 if (TREE_OVERFLOW (*lo)
7179 || operand_equal_p (*lo, TYPE_MIN_VALUE (type), 0))
7180 *lo = NULL_TREE;
7181 if (TREE_OVERFLOW (*hi)
7182 || operand_equal_p (*hi, TYPE_MAX_VALUE (type), 0))
7183 *hi = NULL_TREE;
7184
7185 return code;
7186 }
7187
7188
7189 /* If CODE with arguments ARG0 and ARG1 represents a single bit
7190 equality/inequality test, then return a simplified form of the test
7191 using a sign testing. Otherwise return NULL. TYPE is the desired
7192 result type. */
7193
7194 static tree
7195 fold_single_bit_test_into_sign_test (location_t loc,
7196 enum tree_code code, tree arg0, tree arg1,
7197 tree result_type)
7198 {
7199 /* If this is testing a single bit, we can optimize the test. */
7200 if ((code == NE_EXPR || code == EQ_EXPR)
7201 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7202 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7203 {
7204 /* If we have (A & C) != 0 where C is the sign bit of A, convert
7205 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
7206 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
7207
7208 if (arg00 != NULL_TREE
7209 /* This is only a win if casting to a signed type is cheap,
7210 i.e. when arg00's type is not a partial mode. */
7211 && type_has_mode_precision_p (TREE_TYPE (arg00)))
7212 {
7213 tree stype = signed_type_for (TREE_TYPE (arg00));
7214 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
7215 result_type,
7216 fold_convert_loc (loc, stype, arg00),
7217 build_int_cst (stype, 0));
7218 }
7219 }
7220
7221 return NULL_TREE;
7222 }
7223
7224 /* If CODE with arguments ARG0 and ARG1 represents a single bit
7225 equality/inequality test, then return a simplified form of
7226 the test using shifts and logical operations. Otherwise return
7227 NULL. TYPE is the desired result type. */
7228
7229 tree
7230 fold_single_bit_test (location_t loc, enum tree_code code,
7231 tree arg0, tree arg1, tree result_type)
7232 {
7233 /* If this is testing a single bit, we can optimize the test. */
7234 if ((code == NE_EXPR || code == EQ_EXPR)
7235 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7236 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7237 {
7238 tree inner = TREE_OPERAND (arg0, 0);
7239 tree type = TREE_TYPE (arg0);
7240 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
7241 scalar_int_mode operand_mode = SCALAR_INT_TYPE_MODE (type);
7242 int ops_unsigned;
7243 tree signed_type, unsigned_type, intermediate_type;
7244 tree tem, one;
7245
7246 /* First, see if we can fold the single bit test into a sign-bit
7247 test. */
7248 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
7249 result_type);
7250 if (tem)
7251 return tem;
7252
7253 /* Otherwise we have (A & C) != 0 where C is a single bit,
7254 convert that into ((A >> C2) & 1). Where C2 = log2(C).
7255 Similarly for (A & C) == 0. */
7256
7257 /* If INNER is a right shift of a constant and it plus BITNUM does
7258 not overflow, adjust BITNUM and INNER. */
7259 if (TREE_CODE (inner) == RSHIFT_EXPR
7260 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
7261 && bitnum < TYPE_PRECISION (type)
7262 && wi::ltu_p (wi::to_wide (TREE_OPERAND (inner, 1)),
7263 TYPE_PRECISION (type) - bitnum))
7264 {
7265 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
7266 inner = TREE_OPERAND (inner, 0);
7267 }
7268
7269 /* If we are going to be able to omit the AND below, we must do our
7270 operations as unsigned. If we must use the AND, we have a choice.
7271 Normally unsigned is faster, but for some machines signed is. */
7272 ops_unsigned = (load_extend_op (operand_mode) == SIGN_EXTEND
7273 && !flag_syntax_only) ? 0 : 1;
7274
7275 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
7276 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
7277 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
7278 inner = fold_convert_loc (loc, intermediate_type, inner);
7279
7280 if (bitnum != 0)
7281 inner = build2 (RSHIFT_EXPR, intermediate_type,
7282 inner, size_int (bitnum));
7283
7284 one = build_int_cst (intermediate_type, 1);
7285
7286 if (code == EQ_EXPR)
7287 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
7288
7289 /* Put the AND last so it can combine with more things. */
7290 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
7291
7292 /* Make sure to return the proper type. */
7293 inner = fold_convert_loc (loc, result_type, inner);
7294
7295 return inner;
7296 }
7297 return NULL_TREE;
7298 }
7299
7300 /* Test whether it is preferable two swap two operands, ARG0 and
7301 ARG1, for example because ARG0 is an integer constant and ARG1
7302 isn't. */
7303
7304 bool
7305 tree_swap_operands_p (const_tree arg0, const_tree arg1)
7306 {
7307 if (CONSTANT_CLASS_P (arg1))
7308 return 0;
7309 if (CONSTANT_CLASS_P (arg0))
7310 return 1;
7311
7312 STRIP_NOPS (arg0);
7313 STRIP_NOPS (arg1);
7314
7315 if (TREE_CONSTANT (arg1))
7316 return 0;
7317 if (TREE_CONSTANT (arg0))
7318 return 1;
7319
7320 /* It is preferable to swap two SSA_NAME to ensure a canonical form
7321 for commutative and comparison operators. Ensuring a canonical
7322 form allows the optimizers to find additional redundancies without
7323 having to explicitly check for both orderings. */
7324 if (TREE_CODE (arg0) == SSA_NAME
7325 && TREE_CODE (arg1) == SSA_NAME
7326 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
7327 return 1;
7328
7329 /* Put SSA_NAMEs last. */
7330 if (TREE_CODE (arg1) == SSA_NAME)
7331 return 0;
7332 if (TREE_CODE (arg0) == SSA_NAME)
7333 return 1;
7334
7335 /* Put variables last. */
7336 if (DECL_P (arg1))
7337 return 0;
7338 if (DECL_P (arg0))
7339 return 1;
7340
7341 return 0;
7342 }
7343
7344
7345 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7346 means A >= Y && A != MAX, but in this case we know that
7347 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7348
7349 static tree
7350 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7351 {
7352 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7353
7354 if (TREE_CODE (bound) == LT_EXPR)
7355 a = TREE_OPERAND (bound, 0);
7356 else if (TREE_CODE (bound) == GT_EXPR)
7357 a = TREE_OPERAND (bound, 1);
7358 else
7359 return NULL_TREE;
7360
7361 typea = TREE_TYPE (a);
7362 if (!INTEGRAL_TYPE_P (typea)
7363 && !POINTER_TYPE_P (typea))
7364 return NULL_TREE;
7365
7366 if (TREE_CODE (ineq) == LT_EXPR)
7367 {
7368 a1 = TREE_OPERAND (ineq, 1);
7369 y = TREE_OPERAND (ineq, 0);
7370 }
7371 else if (TREE_CODE (ineq) == GT_EXPR)
7372 {
7373 a1 = TREE_OPERAND (ineq, 0);
7374 y = TREE_OPERAND (ineq, 1);
7375 }
7376 else
7377 return NULL_TREE;
7378
7379 if (TREE_TYPE (a1) != typea)
7380 return NULL_TREE;
7381
7382 if (POINTER_TYPE_P (typea))
7383 {
7384 /* Convert the pointer types into integer before taking the difference. */
7385 tree ta = fold_convert_loc (loc, ssizetype, a);
7386 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7387 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7388 }
7389 else
7390 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7391
7392 if (!diff || !integer_onep (diff))
7393 return NULL_TREE;
7394
7395 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7396 }
7397
7398 /* Fold a sum or difference of at least one multiplication.
7399 Returns the folded tree or NULL if no simplification could be made. */
7400
7401 static tree
7402 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7403 tree arg0, tree arg1)
7404 {
7405 tree arg00, arg01, arg10, arg11;
7406 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7407
7408 /* (A * C) +- (B * C) -> (A+-B) * C.
7409 (A * C) +- A -> A * (C+-1).
7410 We are most concerned about the case where C is a constant,
7411 but other combinations show up during loop reduction. Since
7412 it is not difficult, try all four possibilities. */
7413
7414 if (TREE_CODE (arg0) == MULT_EXPR)
7415 {
7416 arg00 = TREE_OPERAND (arg0, 0);
7417 arg01 = TREE_OPERAND (arg0, 1);
7418 }
7419 else if (TREE_CODE (arg0) == INTEGER_CST)
7420 {
7421 arg00 = build_one_cst (type);
7422 arg01 = arg0;
7423 }
7424 else
7425 {
7426 /* We cannot generate constant 1 for fract. */
7427 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7428 return NULL_TREE;
7429 arg00 = arg0;
7430 arg01 = build_one_cst (type);
7431 }
7432 if (TREE_CODE (arg1) == MULT_EXPR)
7433 {
7434 arg10 = TREE_OPERAND (arg1, 0);
7435 arg11 = TREE_OPERAND (arg1, 1);
7436 }
7437 else if (TREE_CODE (arg1) == INTEGER_CST)
7438 {
7439 arg10 = build_one_cst (type);
7440 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7441 the purpose of this canonicalization. */
7442 if (wi::neg_p (wi::to_wide (arg1), TYPE_SIGN (TREE_TYPE (arg1)))
7443 && negate_expr_p (arg1)
7444 && code == PLUS_EXPR)
7445 {
7446 arg11 = negate_expr (arg1);
7447 code = MINUS_EXPR;
7448 }
7449 else
7450 arg11 = arg1;
7451 }
7452 else
7453 {
7454 /* We cannot generate constant 1 for fract. */
7455 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7456 return NULL_TREE;
7457 arg10 = arg1;
7458 arg11 = build_one_cst (type);
7459 }
7460 same = NULL_TREE;
7461
7462 /* Prefer factoring a common non-constant. */
7463 if (operand_equal_p (arg00, arg10, 0))
7464 same = arg00, alt0 = arg01, alt1 = arg11;
7465 else if (operand_equal_p (arg01, arg11, 0))
7466 same = arg01, alt0 = arg00, alt1 = arg10;
7467 else if (operand_equal_p (arg00, arg11, 0))
7468 same = arg00, alt0 = arg01, alt1 = arg10;
7469 else if (operand_equal_p (arg01, arg10, 0))
7470 same = arg01, alt0 = arg00, alt1 = arg11;
7471
7472 /* No identical multiplicands; see if we can find a common
7473 power-of-two factor in non-power-of-two multiplies. This
7474 can help in multi-dimensional array access. */
7475 else if (tree_fits_shwi_p (arg01) && tree_fits_shwi_p (arg11))
7476 {
7477 HOST_WIDE_INT int01 = tree_to_shwi (arg01);
7478 HOST_WIDE_INT int11 = tree_to_shwi (arg11);
7479 HOST_WIDE_INT tmp;
7480 bool swap = false;
7481 tree maybe_same;
7482
7483 /* Move min of absolute values to int11. */
7484 if (absu_hwi (int01) < absu_hwi (int11))
7485 {
7486 tmp = int01, int01 = int11, int11 = tmp;
7487 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7488 maybe_same = arg01;
7489 swap = true;
7490 }
7491 else
7492 maybe_same = arg11;
7493
7494 const unsigned HOST_WIDE_INT factor = absu_hwi (int11);
7495 if (factor > 1
7496 && pow2p_hwi (factor)
7497 && (int01 & (factor - 1)) == 0
7498 /* The remainder should not be a constant, otherwise we
7499 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7500 increased the number of multiplications necessary. */
7501 && TREE_CODE (arg10) != INTEGER_CST)
7502 {
7503 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7504 build_int_cst (TREE_TYPE (arg00),
7505 int01 / int11));
7506 alt1 = arg10;
7507 same = maybe_same;
7508 if (swap)
7509 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7510 }
7511 }
7512
7513 if (!same)
7514 return NULL_TREE;
7515
7516 if (! ANY_INTEGRAL_TYPE_P (type)
7517 || TYPE_OVERFLOW_WRAPS (type)
7518 /* We are neither factoring zero nor minus one. */
7519 || TREE_CODE (same) == INTEGER_CST)
7520 return fold_build2_loc (loc, MULT_EXPR, type,
7521 fold_build2_loc (loc, code, type,
7522 fold_convert_loc (loc, type, alt0),
7523 fold_convert_loc (loc, type, alt1)),
7524 fold_convert_loc (loc, type, same));
7525
7526 /* Same may be zero and thus the operation 'code' may overflow. Likewise
7527 same may be minus one and thus the multiplication may overflow. Perform
7528 the sum operation in an unsigned type. */
7529 tree utype = unsigned_type_for (type);
7530 tree tem = fold_build2_loc (loc, code, utype,
7531 fold_convert_loc (loc, utype, alt0),
7532 fold_convert_loc (loc, utype, alt1));
7533 /* If the sum evaluated to a constant that is not -INF the multiplication
7534 cannot overflow. */
7535 if (TREE_CODE (tem) == INTEGER_CST
7536 && (wi::to_wide (tem)
7537 != wi::min_value (TYPE_PRECISION (utype), SIGNED)))
7538 return fold_build2_loc (loc, MULT_EXPR, type,
7539 fold_convert (type, tem), same);
7540
7541 /* Do not resort to unsigned multiplication because
7542 we lose the no-overflow property of the expression. */
7543 return NULL_TREE;
7544 }
7545
7546 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7547 specified by EXPR into the buffer PTR of length LEN bytes.
7548 Return the number of bytes placed in the buffer, or zero
7549 upon failure. */
7550
7551 static int
7552 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7553 {
7554 tree type = TREE_TYPE (expr);
7555 int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
7556 int byte, offset, word, words;
7557 unsigned char value;
7558
7559 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7560 return 0;
7561 if (off == -1)
7562 off = 0;
7563
7564 if (ptr == NULL)
7565 /* Dry run. */
7566 return MIN (len, total_bytes - off);
7567
7568 words = total_bytes / UNITS_PER_WORD;
7569
7570 for (byte = 0; byte < total_bytes; byte++)
7571 {
7572 int bitpos = byte * BITS_PER_UNIT;
7573 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7574 number of bytes. */
7575 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7576
7577 if (total_bytes > UNITS_PER_WORD)
7578 {
7579 word = byte / UNITS_PER_WORD;
7580 if (WORDS_BIG_ENDIAN)
7581 word = (words - 1) - word;
7582 offset = word * UNITS_PER_WORD;
7583 if (BYTES_BIG_ENDIAN)
7584 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7585 else
7586 offset += byte % UNITS_PER_WORD;
7587 }
7588 else
7589 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7590 if (offset >= off && offset - off < len)
7591 ptr[offset - off] = value;
7592 }
7593 return MIN (len, total_bytes - off);
7594 }
7595
7596
7597 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7598 specified by EXPR into the buffer PTR of length LEN bytes.
7599 Return the number of bytes placed in the buffer, or zero
7600 upon failure. */
7601
7602 static int
7603 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7604 {
7605 tree type = TREE_TYPE (expr);
7606 scalar_mode mode = SCALAR_TYPE_MODE (type);
7607 int total_bytes = GET_MODE_SIZE (mode);
7608 FIXED_VALUE_TYPE value;
7609 tree i_value, i_type;
7610
7611 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7612 return 0;
7613
7614 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7615
7616 if (NULL_TREE == i_type || TYPE_PRECISION (i_type) != total_bytes)
7617 return 0;
7618
7619 value = TREE_FIXED_CST (expr);
7620 i_value = double_int_to_tree (i_type, value.data);
7621
7622 return native_encode_int (i_value, ptr, len, off);
7623 }
7624
7625
7626 /* Subroutine of native_encode_expr. Encode the REAL_CST
7627 specified by EXPR into the buffer PTR of length LEN bytes.
7628 Return the number of bytes placed in the buffer, or zero
7629 upon failure. */
7630
7631 static int
7632 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7633 {
7634 tree type = TREE_TYPE (expr);
7635 int total_bytes = GET_MODE_SIZE (SCALAR_FLOAT_TYPE_MODE (type));
7636 int byte, offset, word, words, bitpos;
7637 unsigned char value;
7638
7639 /* There are always 32 bits in each long, no matter the size of
7640 the hosts long. We handle floating point representations with
7641 up to 192 bits. */
7642 long tmp[6];
7643
7644 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7645 return 0;
7646 if (off == -1)
7647 off = 0;
7648
7649 if (ptr == NULL)
7650 /* Dry run. */
7651 return MIN (len, total_bytes - off);
7652
7653 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7654
7655 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7656
7657 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7658 bitpos += BITS_PER_UNIT)
7659 {
7660 byte = (bitpos / BITS_PER_UNIT) & 3;
7661 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7662
7663 if (UNITS_PER_WORD < 4)
7664 {
7665 word = byte / UNITS_PER_WORD;
7666 if (WORDS_BIG_ENDIAN)
7667 word = (words - 1) - word;
7668 offset = word * UNITS_PER_WORD;
7669 if (BYTES_BIG_ENDIAN)
7670 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7671 else
7672 offset += byte % UNITS_PER_WORD;
7673 }
7674 else
7675 {
7676 offset = byte;
7677 if (BYTES_BIG_ENDIAN)
7678 {
7679 /* Reverse bytes within each long, or within the entire float
7680 if it's smaller than a long (for HFmode). */
7681 offset = MIN (3, total_bytes - 1) - offset;
7682 gcc_assert (offset >= 0);
7683 }
7684 }
7685 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7686 if (offset >= off
7687 && offset - off < len)
7688 ptr[offset - off] = value;
7689 }
7690 return MIN (len, total_bytes - off);
7691 }
7692
7693 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7694 specified by EXPR into the buffer PTR of length LEN bytes.
7695 Return the number of bytes placed in the buffer, or zero
7696 upon failure. */
7697
7698 static int
7699 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7700 {
7701 int rsize, isize;
7702 tree part;
7703
7704 part = TREE_REALPART (expr);
7705 rsize = native_encode_expr (part, ptr, len, off);
7706 if (off == -1 && rsize == 0)
7707 return 0;
7708 part = TREE_IMAGPART (expr);
7709 if (off != -1)
7710 off = MAX (0, off - GET_MODE_SIZE (SCALAR_TYPE_MODE (TREE_TYPE (part))));
7711 isize = native_encode_expr (part, ptr ? ptr + rsize : NULL,
7712 len - rsize, off);
7713 if (off == -1 && isize != rsize)
7714 return 0;
7715 return rsize + isize;
7716 }
7717
7718 /* Like native_encode_vector, but only encode the first COUNT elements.
7719 The other arguments are as for native_encode_vector. */
7720
7721 static int
7722 native_encode_vector_part (const_tree expr, unsigned char *ptr, int len,
7723 int off, unsigned HOST_WIDE_INT count)
7724 {
7725 unsigned HOST_WIDE_INT i;
7726 int size, offset;
7727 tree itype, elem;
7728
7729 offset = 0;
7730 itype = TREE_TYPE (TREE_TYPE (expr));
7731 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (itype));
7732 for (i = 0; i < count; i++)
7733 {
7734 if (off >= size)
7735 {
7736 off -= size;
7737 continue;
7738 }
7739 elem = VECTOR_CST_ELT (expr, i);
7740 int res = native_encode_expr (elem, ptr ? ptr + offset : NULL,
7741 len - offset, off);
7742 if ((off == -1 && res != size) || res == 0)
7743 return 0;
7744 offset += res;
7745 if (offset >= len)
7746 return (off == -1 && i < count - 1) ? 0 : offset;
7747 if (off != -1)
7748 off = 0;
7749 }
7750 return offset;
7751 }
7752
7753 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7754 specified by EXPR into the buffer PTR of length LEN bytes.
7755 Return the number of bytes placed in the buffer, or zero
7756 upon failure. */
7757
7758 static int
7759 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7760 {
7761 unsigned HOST_WIDE_INT count;
7762 if (!VECTOR_CST_NELTS (expr).is_constant (&count))
7763 return 0;
7764 return native_encode_vector_part (expr, ptr, len, off, count);
7765 }
7766
7767
7768 /* Subroutine of native_encode_expr. Encode the STRING_CST
7769 specified by EXPR into the buffer PTR of length LEN bytes.
7770 Return the number of bytes placed in the buffer, or zero
7771 upon failure. */
7772
7773 static int
7774 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7775 {
7776 tree type = TREE_TYPE (expr);
7777
7778 /* Wide-char strings are encoded in target byte-order so native
7779 encoding them is trivial. */
7780 if (BITS_PER_UNIT != CHAR_BIT
7781 || TREE_CODE (type) != ARRAY_TYPE
7782 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7783 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7784 return 0;
7785
7786 HOST_WIDE_INT total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
7787 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7788 return 0;
7789 if (off == -1)
7790 off = 0;
7791 if (ptr == NULL)
7792 /* Dry run. */;
7793 else if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7794 {
7795 int written = 0;
7796 if (off < TREE_STRING_LENGTH (expr))
7797 {
7798 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7799 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7800 }
7801 memset (ptr + written, 0,
7802 MIN (total_bytes - written, len - written));
7803 }
7804 else
7805 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7806 return MIN (total_bytes - off, len);
7807 }
7808
7809
7810 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7811 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7812 buffer PTR of length LEN bytes. If PTR is NULL, don't actually store
7813 anything, just do a dry run. If OFF is not -1 then start
7814 the encoding at byte offset OFF and encode at most LEN bytes.
7815 Return the number of bytes placed in the buffer, or zero upon failure. */
7816
7817 int
7818 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7819 {
7820 /* We don't support starting at negative offset and -1 is special. */
7821 if (off < -1)
7822 return 0;
7823
7824 switch (TREE_CODE (expr))
7825 {
7826 case INTEGER_CST:
7827 return native_encode_int (expr, ptr, len, off);
7828
7829 case REAL_CST:
7830 return native_encode_real (expr, ptr, len, off);
7831
7832 case FIXED_CST:
7833 return native_encode_fixed (expr, ptr, len, off);
7834
7835 case COMPLEX_CST:
7836 return native_encode_complex (expr, ptr, len, off);
7837
7838 case VECTOR_CST:
7839 return native_encode_vector (expr, ptr, len, off);
7840
7841 case STRING_CST:
7842 return native_encode_string (expr, ptr, len, off);
7843
7844 default:
7845 return 0;
7846 }
7847 }
7848
7849
7850 /* Subroutine of native_interpret_expr. Interpret the contents of
7851 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7852 If the buffer cannot be interpreted, return NULL_TREE. */
7853
7854 static tree
7855 native_interpret_int (tree type, const unsigned char *ptr, int len)
7856 {
7857 int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
7858
7859 if (total_bytes > len
7860 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7861 return NULL_TREE;
7862
7863 wide_int result = wi::from_buffer (ptr, total_bytes);
7864
7865 return wide_int_to_tree (type, result);
7866 }
7867
7868
7869 /* Subroutine of native_interpret_expr. Interpret the contents of
7870 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7871 If the buffer cannot be interpreted, return NULL_TREE. */
7872
7873 static tree
7874 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7875 {
7876 scalar_mode mode = SCALAR_TYPE_MODE (type);
7877 int total_bytes = GET_MODE_SIZE (mode);
7878 double_int result;
7879 FIXED_VALUE_TYPE fixed_value;
7880
7881 if (total_bytes > len
7882 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7883 return NULL_TREE;
7884
7885 result = double_int::from_buffer (ptr, total_bytes);
7886 fixed_value = fixed_from_double_int (result, mode);
7887
7888 return build_fixed (type, fixed_value);
7889 }
7890
7891
7892 /* Subroutine of native_interpret_expr. Interpret the contents of
7893 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7894 If the buffer cannot be interpreted, return NULL_TREE. */
7895
7896 static tree
7897 native_interpret_real (tree type, const unsigned char *ptr, int len)
7898 {
7899 scalar_float_mode mode = SCALAR_FLOAT_TYPE_MODE (type);
7900 int total_bytes = GET_MODE_SIZE (mode);
7901 unsigned char value;
7902 /* There are always 32 bits in each long, no matter the size of
7903 the hosts long. We handle floating point representations with
7904 up to 192 bits. */
7905 REAL_VALUE_TYPE r;
7906 long tmp[6];
7907
7908 if (total_bytes > len || total_bytes > 24)
7909 return NULL_TREE;
7910 int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7911
7912 memset (tmp, 0, sizeof (tmp));
7913 for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7914 bitpos += BITS_PER_UNIT)
7915 {
7916 /* Both OFFSET and BYTE index within a long;
7917 bitpos indexes the whole float. */
7918 int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
7919 if (UNITS_PER_WORD < 4)
7920 {
7921 int word = byte / UNITS_PER_WORD;
7922 if (WORDS_BIG_ENDIAN)
7923 word = (words - 1) - word;
7924 offset = word * UNITS_PER_WORD;
7925 if (BYTES_BIG_ENDIAN)
7926 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7927 else
7928 offset += byte % UNITS_PER_WORD;
7929 }
7930 else
7931 {
7932 offset = byte;
7933 if (BYTES_BIG_ENDIAN)
7934 {
7935 /* Reverse bytes within each long, or within the entire float
7936 if it's smaller than a long (for HFmode). */
7937 offset = MIN (3, total_bytes - 1) - offset;
7938 gcc_assert (offset >= 0);
7939 }
7940 }
7941 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7942
7943 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7944 }
7945
7946 real_from_target (&r, tmp, mode);
7947 return build_real (type, r);
7948 }
7949
7950
7951 /* Subroutine of native_interpret_expr. Interpret the contents of
7952 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7953 If the buffer cannot be interpreted, return NULL_TREE. */
7954
7955 static tree
7956 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7957 {
7958 tree etype, rpart, ipart;
7959 int size;
7960
7961 etype = TREE_TYPE (type);
7962 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
7963 if (size * 2 > len)
7964 return NULL_TREE;
7965 rpart = native_interpret_expr (etype, ptr, size);
7966 if (!rpart)
7967 return NULL_TREE;
7968 ipart = native_interpret_expr (etype, ptr+size, size);
7969 if (!ipart)
7970 return NULL_TREE;
7971 return build_complex (type, rpart, ipart);
7972 }
7973
7974
7975 /* Subroutine of native_interpret_expr. Interpret the contents of
7976 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7977 If the buffer cannot be interpreted, return NULL_TREE. */
7978
7979 static tree
7980 native_interpret_vector (tree type, const unsigned char *ptr, unsigned int len)
7981 {
7982 tree etype, elem;
7983 unsigned int i, size;
7984 unsigned HOST_WIDE_INT count;
7985
7986 etype = TREE_TYPE (type);
7987 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
7988 if (!TYPE_VECTOR_SUBPARTS (type).is_constant (&count)
7989 || size * count > len)
7990 return NULL_TREE;
7991
7992 tree_vector_builder elements (type, count, 1);
7993 for (i = 0; i < count; ++i)
7994 {
7995 elem = native_interpret_expr (etype, ptr+(i*size), size);
7996 if (!elem)
7997 return NULL_TREE;
7998 elements.quick_push (elem);
7999 }
8000 return elements.build ();
8001 }
8002
8003
8004 /* Subroutine of fold_view_convert_expr. Interpret the contents of
8005 the buffer PTR of length LEN as a constant of type TYPE. For
8006 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
8007 we return a REAL_CST, etc... If the buffer cannot be interpreted,
8008 return NULL_TREE. */
8009
8010 tree
8011 native_interpret_expr (tree type, const unsigned char *ptr, int len)
8012 {
8013 switch (TREE_CODE (type))
8014 {
8015 case INTEGER_TYPE:
8016 case ENUMERAL_TYPE:
8017 case BOOLEAN_TYPE:
8018 case POINTER_TYPE:
8019 case REFERENCE_TYPE:
8020 return native_interpret_int (type, ptr, len);
8021
8022 case REAL_TYPE:
8023 return native_interpret_real (type, ptr, len);
8024
8025 case FIXED_POINT_TYPE:
8026 return native_interpret_fixed (type, ptr, len);
8027
8028 case COMPLEX_TYPE:
8029 return native_interpret_complex (type, ptr, len);
8030
8031 case VECTOR_TYPE:
8032 return native_interpret_vector (type, ptr, len);
8033
8034 default:
8035 return NULL_TREE;
8036 }
8037 }
8038
8039 /* Returns true if we can interpret the contents of a native encoding
8040 as TYPE. */
8041
8042 static bool
8043 can_native_interpret_type_p (tree type)
8044 {
8045 switch (TREE_CODE (type))
8046 {
8047 case INTEGER_TYPE:
8048 case ENUMERAL_TYPE:
8049 case BOOLEAN_TYPE:
8050 case POINTER_TYPE:
8051 case REFERENCE_TYPE:
8052 case FIXED_POINT_TYPE:
8053 case REAL_TYPE:
8054 case COMPLEX_TYPE:
8055 case VECTOR_TYPE:
8056 return true;
8057 default:
8058 return false;
8059 }
8060 }
8061
8062 /* Read a vector of type TYPE from the target memory image given by BYTES,
8063 starting at byte FIRST_BYTE. The vector is known to be encodable using
8064 NPATTERNS interleaved patterns with NELTS_PER_PATTERN elements each,
8065 and BYTES is known to have enough bytes to supply NPATTERNS *
8066 NELTS_PER_PATTERN vector elements. Each element of BYTES contains
8067 BITS_PER_UNIT bits and the bytes are in target memory order.
8068
8069 Return the vector on success, otherwise return null. */
8070
8071 static tree
8072 native_decode_vector_tree (tree type, vec<unsigned char> bytes,
8073 unsigned int first_byte, unsigned int npatterns,
8074 unsigned int nelts_per_pattern)
8075 {
8076 tree_vector_builder builder (type, npatterns, nelts_per_pattern);
8077 tree elt_type = TREE_TYPE (type);
8078 unsigned int elt_bits = tree_to_uhwi (TYPE_SIZE (elt_type));
8079 if (VECTOR_BOOLEAN_TYPE_P (type) && elt_bits <= BITS_PER_UNIT)
8080 {
8081 /* This is the only case in which elements can be smaller than a byte.
8082 Element 0 is always in the lsb of the containing byte. */
8083 elt_bits = TYPE_PRECISION (elt_type);
8084 for (unsigned int i = 0; i < builder.encoded_nelts (); ++i)
8085 {
8086 unsigned int bit_index = first_byte * BITS_PER_UNIT + i * elt_bits;
8087 unsigned int byte_index = bit_index / BITS_PER_UNIT;
8088 unsigned int lsb = bit_index % BITS_PER_UNIT;
8089 builder.quick_push (bytes[byte_index] & (1 << lsb)
8090 ? build_all_ones_cst (elt_type)
8091 : build_zero_cst (elt_type));
8092 }
8093 }
8094 else
8095 {
8096 unsigned int elt_bytes = elt_bits / BITS_PER_UNIT;
8097 for (unsigned int i = 0; i < builder.encoded_nelts (); ++i)
8098 {
8099 tree elt = native_interpret_expr (elt_type, &bytes[first_byte],
8100 elt_bytes);
8101 if (!elt)
8102 return NULL_TREE;
8103 builder.quick_push (elt);
8104 first_byte += elt_bytes;
8105 }
8106 }
8107 return builder.build ();
8108 }
8109
8110 /* Try to view-convert VECTOR_CST EXPR to VECTOR_TYPE TYPE by operating
8111 directly on the VECTOR_CST encoding, in a way that works for variable-
8112 length vectors. Return the resulting VECTOR_CST on success or null
8113 on failure. */
8114
8115 static tree
8116 fold_view_convert_vector_encoding (tree type, tree expr)
8117 {
8118 tree expr_type = TREE_TYPE (expr);
8119 poly_uint64 type_bits, expr_bits;
8120 if (!poly_int_tree_p (TYPE_SIZE (type), &type_bits)
8121 || !poly_int_tree_p (TYPE_SIZE (expr_type), &expr_bits))
8122 return NULL_TREE;
8123
8124 poly_uint64 type_units = TYPE_VECTOR_SUBPARTS (type);
8125 poly_uint64 expr_units = TYPE_VECTOR_SUBPARTS (expr_type);
8126 unsigned int type_elt_bits = vector_element_size (type_bits, type_units);
8127 unsigned int expr_elt_bits = vector_element_size (expr_bits, expr_units);
8128
8129 /* We can only preserve the semantics of a stepped pattern if the new
8130 vector element is an integer of the same size. */
8131 if (VECTOR_CST_STEPPED_P (expr)
8132 && (!INTEGRAL_TYPE_P (type) || type_elt_bits != expr_elt_bits))
8133 return NULL_TREE;
8134
8135 /* The number of bits needed to encode one element from every pattern
8136 of the original vector. */
8137 unsigned int expr_sequence_bits
8138 = VECTOR_CST_NPATTERNS (expr) * expr_elt_bits;
8139
8140 /* The number of bits needed to encode one element from every pattern
8141 of the result. */
8142 unsigned int type_sequence_bits
8143 = least_common_multiple (expr_sequence_bits, type_elt_bits);
8144
8145 /* Don't try to read more bytes than are available, which can happen
8146 for constant-sized vectors if TYPE has larger elements than EXPR_TYPE.
8147 The general VIEW_CONVERT handling can cope with that case, so there's
8148 no point complicating things here. */
8149 unsigned int nelts_per_pattern = VECTOR_CST_NELTS_PER_PATTERN (expr);
8150 unsigned int buffer_bytes = CEIL (nelts_per_pattern * type_sequence_bits,
8151 BITS_PER_UNIT);
8152 unsigned int buffer_bits = buffer_bytes * BITS_PER_UNIT;
8153 if (known_gt (buffer_bits, expr_bits))
8154 return NULL_TREE;
8155
8156 /* Get enough bytes of EXPR to form the new encoding. */
8157 auto_vec<unsigned char, 128> buffer (buffer_bytes);
8158 buffer.quick_grow (buffer_bytes);
8159 if (native_encode_vector_part (expr, buffer.address (), buffer_bytes, 0,
8160 buffer_bits / expr_elt_bits)
8161 != (int) buffer_bytes)
8162 return NULL_TREE;
8163
8164 /* Reencode the bytes as TYPE. */
8165 unsigned int type_npatterns = type_sequence_bits / type_elt_bits;
8166 return native_decode_vector_tree (type, buffer, 0, type_npatterns,
8167 nelts_per_pattern);
8168 }
8169
8170 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
8171 TYPE at compile-time. If we're unable to perform the conversion
8172 return NULL_TREE. */
8173
8174 static tree
8175 fold_view_convert_expr (tree type, tree expr)
8176 {
8177 /* We support up to 512-bit values (for V8DFmode). */
8178 unsigned char buffer[64];
8179 int len;
8180
8181 /* Check that the host and target are sane. */
8182 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
8183 return NULL_TREE;
8184
8185 if (VECTOR_TYPE_P (type) && TREE_CODE (expr) == VECTOR_CST)
8186 if (tree res = fold_view_convert_vector_encoding (type, expr))
8187 return res;
8188
8189 len = native_encode_expr (expr, buffer, sizeof (buffer));
8190 if (len == 0)
8191 return NULL_TREE;
8192
8193 return native_interpret_expr (type, buffer, len);
8194 }
8195
8196 /* Build an expression for the address of T. Folds away INDIRECT_REF
8197 to avoid confusing the gimplify process. */
8198
8199 tree
8200 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
8201 {
8202 /* The size of the object is not relevant when talking about its address. */
8203 if (TREE_CODE (t) == WITH_SIZE_EXPR)
8204 t = TREE_OPERAND (t, 0);
8205
8206 if (TREE_CODE (t) == INDIRECT_REF)
8207 {
8208 t = TREE_OPERAND (t, 0);
8209
8210 if (TREE_TYPE (t) != ptrtype)
8211 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
8212 }
8213 else if (TREE_CODE (t) == MEM_REF
8214 && integer_zerop (TREE_OPERAND (t, 1)))
8215 return TREE_OPERAND (t, 0);
8216 else if (TREE_CODE (t) == MEM_REF
8217 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
8218 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
8219 TREE_OPERAND (t, 0),
8220 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
8221 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
8222 {
8223 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
8224
8225 if (TREE_TYPE (t) != ptrtype)
8226 t = fold_convert_loc (loc, ptrtype, t);
8227 }
8228 else
8229 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
8230
8231 return t;
8232 }
8233
8234 /* Build an expression for the address of T. */
8235
8236 tree
8237 build_fold_addr_expr_loc (location_t loc, tree t)
8238 {
8239 tree ptrtype = build_pointer_type (TREE_TYPE (t));
8240
8241 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
8242 }
8243
8244 /* Fold a unary expression of code CODE and type TYPE with operand
8245 OP0. Return the folded expression if folding is successful.
8246 Otherwise, return NULL_TREE. */
8247
8248 tree
8249 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
8250 {
8251 tree tem;
8252 tree arg0;
8253 enum tree_code_class kind = TREE_CODE_CLASS (code);
8254
8255 gcc_assert (IS_EXPR_CODE_CLASS (kind)
8256 && TREE_CODE_LENGTH (code) == 1);
8257
8258 arg0 = op0;
8259 if (arg0)
8260 {
8261 if (CONVERT_EXPR_CODE_P (code)
8262 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
8263 {
8264 /* Don't use STRIP_NOPS, because signedness of argument type
8265 matters. */
8266 STRIP_SIGN_NOPS (arg0);
8267 }
8268 else
8269 {
8270 /* Strip any conversions that don't change the mode. This
8271 is safe for every expression, except for a comparison
8272 expression because its signedness is derived from its
8273 operands.
8274
8275 Note that this is done as an internal manipulation within
8276 the constant folder, in order to find the simplest
8277 representation of the arguments so that their form can be
8278 studied. In any cases, the appropriate type conversions
8279 should be put back in the tree that will get out of the
8280 constant folder. */
8281 STRIP_NOPS (arg0);
8282 }
8283
8284 if (CONSTANT_CLASS_P (arg0))
8285 {
8286 tree tem = const_unop (code, type, arg0);
8287 if (tem)
8288 {
8289 if (TREE_TYPE (tem) != type)
8290 tem = fold_convert_loc (loc, type, tem);
8291 return tem;
8292 }
8293 }
8294 }
8295
8296 tem = generic_simplify (loc, code, type, op0);
8297 if (tem)
8298 return tem;
8299
8300 if (TREE_CODE_CLASS (code) == tcc_unary)
8301 {
8302 if (TREE_CODE (arg0) == COMPOUND_EXPR)
8303 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
8304 fold_build1_loc (loc, code, type,
8305 fold_convert_loc (loc, TREE_TYPE (op0),
8306 TREE_OPERAND (arg0, 1))));
8307 else if (TREE_CODE (arg0) == COND_EXPR)
8308 {
8309 tree arg01 = TREE_OPERAND (arg0, 1);
8310 tree arg02 = TREE_OPERAND (arg0, 2);
8311 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
8312 arg01 = fold_build1_loc (loc, code, type,
8313 fold_convert_loc (loc,
8314 TREE_TYPE (op0), arg01));
8315 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
8316 arg02 = fold_build1_loc (loc, code, type,
8317 fold_convert_loc (loc,
8318 TREE_TYPE (op0), arg02));
8319 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
8320 arg01, arg02);
8321
8322 /* If this was a conversion, and all we did was to move into
8323 inside the COND_EXPR, bring it back out. But leave it if
8324 it is a conversion from integer to integer and the
8325 result precision is no wider than a word since such a
8326 conversion is cheap and may be optimized away by combine,
8327 while it couldn't if it were outside the COND_EXPR. Then return
8328 so we don't get into an infinite recursion loop taking the
8329 conversion out and then back in. */
8330
8331 if ((CONVERT_EXPR_CODE_P (code)
8332 || code == NON_LVALUE_EXPR)
8333 && TREE_CODE (tem) == COND_EXPR
8334 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
8335 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
8336 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
8337 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
8338 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
8339 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
8340 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8341 && (INTEGRAL_TYPE_P
8342 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
8343 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
8344 || flag_syntax_only))
8345 tem = build1_loc (loc, code, type,
8346 build3 (COND_EXPR,
8347 TREE_TYPE (TREE_OPERAND
8348 (TREE_OPERAND (tem, 1), 0)),
8349 TREE_OPERAND (tem, 0),
8350 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
8351 TREE_OPERAND (TREE_OPERAND (tem, 2),
8352 0)));
8353 return tem;
8354 }
8355 }
8356
8357 switch (code)
8358 {
8359 case NON_LVALUE_EXPR:
8360 if (!maybe_lvalue_p (op0))
8361 return fold_convert_loc (loc, type, op0);
8362 return NULL_TREE;
8363
8364 CASE_CONVERT:
8365 case FLOAT_EXPR:
8366 case FIX_TRUNC_EXPR:
8367 if (COMPARISON_CLASS_P (op0))
8368 {
8369 /* If we have (type) (a CMP b) and type is an integral type, return
8370 new expression involving the new type. Canonicalize
8371 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
8372 non-integral type.
8373 Do not fold the result as that would not simplify further, also
8374 folding again results in recursions. */
8375 if (TREE_CODE (type) == BOOLEAN_TYPE)
8376 return build2_loc (loc, TREE_CODE (op0), type,
8377 TREE_OPERAND (op0, 0),
8378 TREE_OPERAND (op0, 1));
8379 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
8380 && TREE_CODE (type) != VECTOR_TYPE)
8381 return build3_loc (loc, COND_EXPR, type, op0,
8382 constant_boolean_node (true, type),
8383 constant_boolean_node (false, type));
8384 }
8385
8386 /* Handle (T *)&A.B.C for A being of type T and B and C
8387 living at offset zero. This occurs frequently in
8388 C++ upcasting and then accessing the base. */
8389 if (TREE_CODE (op0) == ADDR_EXPR
8390 && POINTER_TYPE_P (type)
8391 && handled_component_p (TREE_OPERAND (op0, 0)))
8392 {
8393 poly_int64 bitsize, bitpos;
8394 tree offset;
8395 machine_mode mode;
8396 int unsignedp, reversep, volatilep;
8397 tree base
8398 = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
8399 &offset, &mode, &unsignedp, &reversep,
8400 &volatilep);
8401 /* If the reference was to a (constant) zero offset, we can use
8402 the address of the base if it has the same base type
8403 as the result type and the pointer type is unqualified. */
8404 if (!offset
8405 && known_eq (bitpos, 0)
8406 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
8407 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
8408 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
8409 return fold_convert_loc (loc, type,
8410 build_fold_addr_expr_loc (loc, base));
8411 }
8412
8413 if (TREE_CODE (op0) == MODIFY_EXPR
8414 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
8415 /* Detect assigning a bitfield. */
8416 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
8417 && DECL_BIT_FIELD
8418 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
8419 {
8420 /* Don't leave an assignment inside a conversion
8421 unless assigning a bitfield. */
8422 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
8423 /* First do the assignment, then return converted constant. */
8424 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
8425 TREE_NO_WARNING (tem) = 1;
8426 TREE_USED (tem) = 1;
8427 return tem;
8428 }
8429
8430 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8431 constants (if x has signed type, the sign bit cannot be set
8432 in c). This folds extension into the BIT_AND_EXPR.
8433 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8434 very likely don't have maximal range for their precision and this
8435 transformation effectively doesn't preserve non-maximal ranges. */
8436 if (TREE_CODE (type) == INTEGER_TYPE
8437 && TREE_CODE (op0) == BIT_AND_EXPR
8438 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
8439 {
8440 tree and_expr = op0;
8441 tree and0 = TREE_OPERAND (and_expr, 0);
8442 tree and1 = TREE_OPERAND (and_expr, 1);
8443 int change = 0;
8444
8445 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
8446 || (TYPE_PRECISION (type)
8447 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
8448 change = 1;
8449 else if (TYPE_PRECISION (TREE_TYPE (and1))
8450 <= HOST_BITS_PER_WIDE_INT
8451 && tree_fits_uhwi_p (and1))
8452 {
8453 unsigned HOST_WIDE_INT cst;
8454
8455 cst = tree_to_uhwi (and1);
8456 cst &= HOST_WIDE_INT_M1U
8457 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8458 change = (cst == 0);
8459 if (change
8460 && !flag_syntax_only
8461 && (load_extend_op (TYPE_MODE (TREE_TYPE (and0)))
8462 == ZERO_EXTEND))
8463 {
8464 tree uns = unsigned_type_for (TREE_TYPE (and0));
8465 and0 = fold_convert_loc (loc, uns, and0);
8466 and1 = fold_convert_loc (loc, uns, and1);
8467 }
8468 }
8469 if (change)
8470 {
8471 tem = force_fit_type (type, wi::to_widest (and1), 0,
8472 TREE_OVERFLOW (and1));
8473 return fold_build2_loc (loc, BIT_AND_EXPR, type,
8474 fold_convert_loc (loc, type, and0), tem);
8475 }
8476 }
8477
8478 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
8479 cast (T1)X will fold away. We assume that this happens when X itself
8480 is a cast. */
8481 if (POINTER_TYPE_P (type)
8482 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8483 && CONVERT_EXPR_P (TREE_OPERAND (arg0, 0)))
8484 {
8485 tree arg00 = TREE_OPERAND (arg0, 0);
8486 tree arg01 = TREE_OPERAND (arg0, 1);
8487
8488 return fold_build_pointer_plus_loc
8489 (loc, fold_convert_loc (loc, type, arg00), arg01);
8490 }
8491
8492 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8493 of the same precision, and X is an integer type not narrower than
8494 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8495 if (INTEGRAL_TYPE_P (type)
8496 && TREE_CODE (op0) == BIT_NOT_EXPR
8497 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8498 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8499 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8500 {
8501 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8502 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8503 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8504 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8505 fold_convert_loc (loc, type, tem));
8506 }
8507
8508 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8509 type of X and Y (integer types only). */
8510 if (INTEGRAL_TYPE_P (type)
8511 && TREE_CODE (op0) == MULT_EXPR
8512 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8513 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8514 {
8515 /* Be careful not to introduce new overflows. */
8516 tree mult_type;
8517 if (TYPE_OVERFLOW_WRAPS (type))
8518 mult_type = type;
8519 else
8520 mult_type = unsigned_type_for (type);
8521
8522 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8523 {
8524 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8525 fold_convert_loc (loc, mult_type,
8526 TREE_OPERAND (op0, 0)),
8527 fold_convert_loc (loc, mult_type,
8528 TREE_OPERAND (op0, 1)));
8529 return fold_convert_loc (loc, type, tem);
8530 }
8531 }
8532
8533 return NULL_TREE;
8534
8535 case VIEW_CONVERT_EXPR:
8536 if (TREE_CODE (op0) == MEM_REF)
8537 {
8538 if (TYPE_ALIGN (TREE_TYPE (op0)) != TYPE_ALIGN (type))
8539 type = build_aligned_type (type, TYPE_ALIGN (TREE_TYPE (op0)));
8540 tem = fold_build2_loc (loc, MEM_REF, type,
8541 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8542 REF_REVERSE_STORAGE_ORDER (tem) = REF_REVERSE_STORAGE_ORDER (op0);
8543 return tem;
8544 }
8545
8546 return NULL_TREE;
8547
8548 case NEGATE_EXPR:
8549 tem = fold_negate_expr (loc, arg0);
8550 if (tem)
8551 return fold_convert_loc (loc, type, tem);
8552 return NULL_TREE;
8553
8554 case ABS_EXPR:
8555 /* Convert fabs((double)float) into (double)fabsf(float). */
8556 if (TREE_CODE (arg0) == NOP_EXPR
8557 && TREE_CODE (type) == REAL_TYPE)
8558 {
8559 tree targ0 = strip_float_extensions (arg0);
8560 if (targ0 != arg0)
8561 return fold_convert_loc (loc, type,
8562 fold_build1_loc (loc, ABS_EXPR,
8563 TREE_TYPE (targ0),
8564 targ0));
8565 }
8566 return NULL_TREE;
8567
8568 case BIT_NOT_EXPR:
8569 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8570 if (TREE_CODE (arg0) == BIT_XOR_EXPR
8571 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8572 fold_convert_loc (loc, type,
8573 TREE_OPERAND (arg0, 0)))))
8574 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8575 fold_convert_loc (loc, type,
8576 TREE_OPERAND (arg0, 1)));
8577 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8578 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8579 fold_convert_loc (loc, type,
8580 TREE_OPERAND (arg0, 1)))))
8581 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8582 fold_convert_loc (loc, type,
8583 TREE_OPERAND (arg0, 0)), tem);
8584
8585 return NULL_TREE;
8586
8587 case TRUTH_NOT_EXPR:
8588 /* Note that the operand of this must be an int
8589 and its values must be 0 or 1.
8590 ("true" is a fixed value perhaps depending on the language,
8591 but we don't handle values other than 1 correctly yet.) */
8592 tem = fold_truth_not_expr (loc, arg0);
8593 if (!tem)
8594 return NULL_TREE;
8595 return fold_convert_loc (loc, type, tem);
8596
8597 case INDIRECT_REF:
8598 /* Fold *&X to X if X is an lvalue. */
8599 if (TREE_CODE (op0) == ADDR_EXPR)
8600 {
8601 tree op00 = TREE_OPERAND (op0, 0);
8602 if ((VAR_P (op00)
8603 || TREE_CODE (op00) == PARM_DECL
8604 || TREE_CODE (op00) == RESULT_DECL)
8605 && !TREE_READONLY (op00))
8606 return op00;
8607 }
8608 return NULL_TREE;
8609
8610 default:
8611 return NULL_TREE;
8612 } /* switch (code) */
8613 }
8614
8615
8616 /* If the operation was a conversion do _not_ mark a resulting constant
8617 with TREE_OVERFLOW if the original constant was not. These conversions
8618 have implementation defined behavior and retaining the TREE_OVERFLOW
8619 flag here would confuse later passes such as VRP. */
8620 tree
8621 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8622 tree type, tree op0)
8623 {
8624 tree res = fold_unary_loc (loc, code, type, op0);
8625 if (res
8626 && TREE_CODE (res) == INTEGER_CST
8627 && TREE_CODE (op0) == INTEGER_CST
8628 && CONVERT_EXPR_CODE_P (code))
8629 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8630
8631 return res;
8632 }
8633
8634 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8635 operands OP0 and OP1. LOC is the location of the resulting expression.
8636 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8637 Return the folded expression if folding is successful. Otherwise,
8638 return NULL_TREE. */
8639 static tree
8640 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8641 tree arg0, tree arg1, tree op0, tree op1)
8642 {
8643 tree tem;
8644
8645 /* We only do these simplifications if we are optimizing. */
8646 if (!optimize)
8647 return NULL_TREE;
8648
8649 /* Check for things like (A || B) && (A || C). We can convert this
8650 to A || (B && C). Note that either operator can be any of the four
8651 truth and/or operations and the transformation will still be
8652 valid. Also note that we only care about order for the
8653 ANDIF and ORIF operators. If B contains side effects, this
8654 might change the truth-value of A. */
8655 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8656 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8657 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8658 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8659 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8660 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8661 {
8662 tree a00 = TREE_OPERAND (arg0, 0);
8663 tree a01 = TREE_OPERAND (arg0, 1);
8664 tree a10 = TREE_OPERAND (arg1, 0);
8665 tree a11 = TREE_OPERAND (arg1, 1);
8666 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8667 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8668 && (code == TRUTH_AND_EXPR
8669 || code == TRUTH_OR_EXPR));
8670
8671 if (operand_equal_p (a00, a10, 0))
8672 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8673 fold_build2_loc (loc, code, type, a01, a11));
8674 else if (commutative && operand_equal_p (a00, a11, 0))
8675 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8676 fold_build2_loc (loc, code, type, a01, a10));
8677 else if (commutative && operand_equal_p (a01, a10, 0))
8678 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8679 fold_build2_loc (loc, code, type, a00, a11));
8680
8681 /* This case if tricky because we must either have commutative
8682 operators or else A10 must not have side-effects. */
8683
8684 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8685 && operand_equal_p (a01, a11, 0))
8686 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8687 fold_build2_loc (loc, code, type, a00, a10),
8688 a01);
8689 }
8690
8691 /* See if we can build a range comparison. */
8692 if ((tem = fold_range_test (loc, code, type, op0, op1)) != 0)
8693 return tem;
8694
8695 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8696 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8697 {
8698 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8699 if (tem)
8700 return fold_build2_loc (loc, code, type, tem, arg1);
8701 }
8702
8703 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8704 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8705 {
8706 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8707 if (tem)
8708 return fold_build2_loc (loc, code, type, arg0, tem);
8709 }
8710
8711 /* Check for the possibility of merging component references. If our
8712 lhs is another similar operation, try to merge its rhs with our
8713 rhs. Then try to merge our lhs and rhs. */
8714 if (TREE_CODE (arg0) == code
8715 && (tem = fold_truth_andor_1 (loc, code, type,
8716 TREE_OPERAND (arg0, 1), arg1)) != 0)
8717 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8718
8719 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8720 return tem;
8721
8722 bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
8723 if (param_logical_op_non_short_circuit != -1)
8724 logical_op_non_short_circuit
8725 = param_logical_op_non_short_circuit;
8726 if (logical_op_non_short_circuit
8727 && !flag_sanitize_coverage
8728 && (code == TRUTH_AND_EXPR
8729 || code == TRUTH_ANDIF_EXPR
8730 || code == TRUTH_OR_EXPR
8731 || code == TRUTH_ORIF_EXPR))
8732 {
8733 enum tree_code ncode, icode;
8734
8735 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8736 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8737 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8738
8739 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8740 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8741 We don't want to pack more than two leafs to a non-IF AND/OR
8742 expression.
8743 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8744 equal to IF-CODE, then we don't want to add right-hand operand.
8745 If the inner right-hand side of left-hand operand has
8746 side-effects, or isn't simple, then we can't add to it,
8747 as otherwise we might destroy if-sequence. */
8748 if (TREE_CODE (arg0) == icode
8749 && simple_operand_p_2 (arg1)
8750 /* Needed for sequence points to handle trappings, and
8751 side-effects. */
8752 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8753 {
8754 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8755 arg1);
8756 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8757 tem);
8758 }
8759 /* Same as above but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8760 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8761 else if (TREE_CODE (arg1) == icode
8762 && simple_operand_p_2 (arg0)
8763 /* Needed for sequence points to handle trappings, and
8764 side-effects. */
8765 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8766 {
8767 tem = fold_build2_loc (loc, ncode, type,
8768 arg0, TREE_OPERAND (arg1, 0));
8769 return fold_build2_loc (loc, icode, type, tem,
8770 TREE_OPERAND (arg1, 1));
8771 }
8772 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8773 into (A OR B).
8774 For sequence point consistancy, we need to check for trapping,
8775 and side-effects. */
8776 else if (code == icode && simple_operand_p_2 (arg0)
8777 && simple_operand_p_2 (arg1))
8778 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8779 }
8780
8781 return NULL_TREE;
8782 }
8783
8784 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8785 by changing CODE to reduce the magnitude of constants involved in
8786 ARG0 of the comparison.
8787 Returns a canonicalized comparison tree if a simplification was
8788 possible, otherwise returns NULL_TREE.
8789 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8790 valid if signed overflow is undefined. */
8791
8792 static tree
8793 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8794 tree arg0, tree arg1,
8795 bool *strict_overflow_p)
8796 {
8797 enum tree_code code0 = TREE_CODE (arg0);
8798 tree t, cst0 = NULL_TREE;
8799 int sgn0;
8800
8801 /* Match A +- CST code arg1. We can change this only if overflow
8802 is undefined. */
8803 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8804 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8805 /* In principle pointers also have undefined overflow behavior,
8806 but that causes problems elsewhere. */
8807 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8808 && (code0 == MINUS_EXPR
8809 || code0 == PLUS_EXPR)
8810 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
8811 return NULL_TREE;
8812
8813 /* Identify the constant in arg0 and its sign. */
8814 cst0 = TREE_OPERAND (arg0, 1);
8815 sgn0 = tree_int_cst_sgn (cst0);
8816
8817 /* Overflowed constants and zero will cause problems. */
8818 if (integer_zerop (cst0)
8819 || TREE_OVERFLOW (cst0))
8820 return NULL_TREE;
8821
8822 /* See if we can reduce the magnitude of the constant in
8823 arg0 by changing the comparison code. */
8824 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8825 if (code == LT_EXPR
8826 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8827 code = LE_EXPR;
8828 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8829 else if (code == GT_EXPR
8830 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8831 code = GE_EXPR;
8832 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8833 else if (code == LE_EXPR
8834 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8835 code = LT_EXPR;
8836 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8837 else if (code == GE_EXPR
8838 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8839 code = GT_EXPR;
8840 else
8841 return NULL_TREE;
8842 *strict_overflow_p = true;
8843
8844 /* Now build the constant reduced in magnitude. But not if that
8845 would produce one outside of its types range. */
8846 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8847 && ((sgn0 == 1
8848 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8849 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8850 || (sgn0 == -1
8851 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8852 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8853 return NULL_TREE;
8854
8855 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8856 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8857 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8858 t = fold_convert (TREE_TYPE (arg1), t);
8859
8860 return fold_build2_loc (loc, code, type, t, arg1);
8861 }
8862
8863 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8864 overflow further. Try to decrease the magnitude of constants involved
8865 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8866 and put sole constants at the second argument position.
8867 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8868
8869 static tree
8870 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8871 tree arg0, tree arg1)
8872 {
8873 tree t;
8874 bool strict_overflow_p;
8875 const char * const warnmsg = G_("assuming signed overflow does not occur "
8876 "when reducing constant in comparison");
8877
8878 /* Try canonicalization by simplifying arg0. */
8879 strict_overflow_p = false;
8880 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8881 &strict_overflow_p);
8882 if (t)
8883 {
8884 if (strict_overflow_p)
8885 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8886 return t;
8887 }
8888
8889 /* Try canonicalization by simplifying arg1 using the swapped
8890 comparison. */
8891 code = swap_tree_comparison (code);
8892 strict_overflow_p = false;
8893 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8894 &strict_overflow_p);
8895 if (t && strict_overflow_p)
8896 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8897 return t;
8898 }
8899
8900 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8901 space. This is used to avoid issuing overflow warnings for
8902 expressions like &p->x which cannot wrap. */
8903
8904 static bool
8905 pointer_may_wrap_p (tree base, tree offset, poly_int64 bitpos)
8906 {
8907 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8908 return true;
8909
8910 if (maybe_lt (bitpos, 0))
8911 return true;
8912
8913 poly_wide_int wi_offset;
8914 int precision = TYPE_PRECISION (TREE_TYPE (base));
8915 if (offset == NULL_TREE)
8916 wi_offset = wi::zero (precision);
8917 else if (!poly_int_tree_p (offset) || TREE_OVERFLOW (offset))
8918 return true;
8919 else
8920 wi_offset = wi::to_poly_wide (offset);
8921
8922 wi::overflow_type overflow;
8923 poly_wide_int units = wi::shwi (bits_to_bytes_round_down (bitpos),
8924 precision);
8925 poly_wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8926 if (overflow)
8927 return true;
8928
8929 poly_uint64 total_hwi, size;
8930 if (!total.to_uhwi (&total_hwi)
8931 || !poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (base))),
8932 &size)
8933 || known_eq (size, 0U))
8934 return true;
8935
8936 if (known_le (total_hwi, size))
8937 return false;
8938
8939 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8940 array. */
8941 if (TREE_CODE (base) == ADDR_EXPR
8942 && poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (base, 0))),
8943 &size)
8944 && maybe_ne (size, 0U)
8945 && known_le (total_hwi, size))
8946 return false;
8947
8948 return true;
8949 }
8950
8951 /* Return a positive integer when the symbol DECL is known to have
8952 a nonzero address, zero when it's known not to (e.g., it's a weak
8953 symbol), and a negative integer when the symbol is not yet in the
8954 symbol table and so whether or not its address is zero is unknown.
8955 For function local objects always return positive integer. */
8956 static int
8957 maybe_nonzero_address (tree decl)
8958 {
8959 if (DECL_P (decl) && decl_in_symtab_p (decl))
8960 if (struct symtab_node *symbol = symtab_node::get_create (decl))
8961 return symbol->nonzero_address ();
8962
8963 /* Function local objects are never NULL. */
8964 if (DECL_P (decl)
8965 && (DECL_CONTEXT (decl)
8966 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
8967 && auto_var_in_fn_p (decl, DECL_CONTEXT (decl))))
8968 return 1;
8969
8970 return -1;
8971 }
8972
8973 /* Subroutine of fold_binary. This routine performs all of the
8974 transformations that are common to the equality/inequality
8975 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8976 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8977 fold_binary should call fold_binary. Fold a comparison with
8978 tree code CODE and type TYPE with operands OP0 and OP1. Return
8979 the folded comparison or NULL_TREE. */
8980
8981 static tree
8982 fold_comparison (location_t loc, enum tree_code code, tree type,
8983 tree op0, tree op1)
8984 {
8985 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8986 tree arg0, arg1, tem;
8987
8988 arg0 = op0;
8989 arg1 = op1;
8990
8991 STRIP_SIGN_NOPS (arg0);
8992 STRIP_SIGN_NOPS (arg1);
8993
8994 /* For comparisons of pointers we can decompose it to a compile time
8995 comparison of the base objects and the offsets into the object.
8996 This requires at least one operand being an ADDR_EXPR or a
8997 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8998 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8999 && (TREE_CODE (arg0) == ADDR_EXPR
9000 || TREE_CODE (arg1) == ADDR_EXPR
9001 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
9002 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
9003 {
9004 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
9005 poly_int64 bitsize, bitpos0 = 0, bitpos1 = 0;
9006 machine_mode mode;
9007 int volatilep, reversep, unsignedp;
9008 bool indirect_base0 = false, indirect_base1 = false;
9009
9010 /* Get base and offset for the access. Strip ADDR_EXPR for
9011 get_inner_reference, but put it back by stripping INDIRECT_REF
9012 off the base object if possible. indirect_baseN will be true
9013 if baseN is not an address but refers to the object itself. */
9014 base0 = arg0;
9015 if (TREE_CODE (arg0) == ADDR_EXPR)
9016 {
9017 base0
9018 = get_inner_reference (TREE_OPERAND (arg0, 0),
9019 &bitsize, &bitpos0, &offset0, &mode,
9020 &unsignedp, &reversep, &volatilep);
9021 if (TREE_CODE (base0) == INDIRECT_REF)
9022 base0 = TREE_OPERAND (base0, 0);
9023 else
9024 indirect_base0 = true;
9025 }
9026 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9027 {
9028 base0 = TREE_OPERAND (arg0, 0);
9029 STRIP_SIGN_NOPS (base0);
9030 if (TREE_CODE (base0) == ADDR_EXPR)
9031 {
9032 base0
9033 = get_inner_reference (TREE_OPERAND (base0, 0),
9034 &bitsize, &bitpos0, &offset0, &mode,
9035 &unsignedp, &reversep, &volatilep);
9036 if (TREE_CODE (base0) == INDIRECT_REF)
9037 base0 = TREE_OPERAND (base0, 0);
9038 else
9039 indirect_base0 = true;
9040 }
9041 if (offset0 == NULL_TREE || integer_zerop (offset0))
9042 offset0 = TREE_OPERAND (arg0, 1);
9043 else
9044 offset0 = size_binop (PLUS_EXPR, offset0,
9045 TREE_OPERAND (arg0, 1));
9046 if (poly_int_tree_p (offset0))
9047 {
9048 poly_offset_int tem = wi::sext (wi::to_poly_offset (offset0),
9049 TYPE_PRECISION (sizetype));
9050 tem <<= LOG2_BITS_PER_UNIT;
9051 tem += bitpos0;
9052 if (tem.to_shwi (&bitpos0))
9053 offset0 = NULL_TREE;
9054 }
9055 }
9056
9057 base1 = arg1;
9058 if (TREE_CODE (arg1) == ADDR_EXPR)
9059 {
9060 base1
9061 = get_inner_reference (TREE_OPERAND (arg1, 0),
9062 &bitsize, &bitpos1, &offset1, &mode,
9063 &unsignedp, &reversep, &volatilep);
9064 if (TREE_CODE (base1) == INDIRECT_REF)
9065 base1 = TREE_OPERAND (base1, 0);
9066 else
9067 indirect_base1 = true;
9068 }
9069 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9070 {
9071 base1 = TREE_OPERAND (arg1, 0);
9072 STRIP_SIGN_NOPS (base1);
9073 if (TREE_CODE (base1) == ADDR_EXPR)
9074 {
9075 base1
9076 = get_inner_reference (TREE_OPERAND (base1, 0),
9077 &bitsize, &bitpos1, &offset1, &mode,
9078 &unsignedp, &reversep, &volatilep);
9079 if (TREE_CODE (base1) == INDIRECT_REF)
9080 base1 = TREE_OPERAND (base1, 0);
9081 else
9082 indirect_base1 = true;
9083 }
9084 if (offset1 == NULL_TREE || integer_zerop (offset1))
9085 offset1 = TREE_OPERAND (arg1, 1);
9086 else
9087 offset1 = size_binop (PLUS_EXPR, offset1,
9088 TREE_OPERAND (arg1, 1));
9089 if (poly_int_tree_p (offset1))
9090 {
9091 poly_offset_int tem = wi::sext (wi::to_poly_offset (offset1),
9092 TYPE_PRECISION (sizetype));
9093 tem <<= LOG2_BITS_PER_UNIT;
9094 tem += bitpos1;
9095 if (tem.to_shwi (&bitpos1))
9096 offset1 = NULL_TREE;
9097 }
9098 }
9099
9100 /* If we have equivalent bases we might be able to simplify. */
9101 if (indirect_base0 == indirect_base1
9102 && operand_equal_p (base0, base1,
9103 indirect_base0 ? OEP_ADDRESS_OF : 0))
9104 {
9105 /* We can fold this expression to a constant if the non-constant
9106 offset parts are equal. */
9107 if ((offset0 == offset1
9108 || (offset0 && offset1
9109 && operand_equal_p (offset0, offset1, 0)))
9110 && (equality_code
9111 || (indirect_base0
9112 && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
9113 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
9114 {
9115 if (!equality_code
9116 && maybe_ne (bitpos0, bitpos1)
9117 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9118 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9119 fold_overflow_warning (("assuming pointer wraparound does not "
9120 "occur when comparing P +- C1 with "
9121 "P +- C2"),
9122 WARN_STRICT_OVERFLOW_CONDITIONAL);
9123
9124 switch (code)
9125 {
9126 case EQ_EXPR:
9127 if (known_eq (bitpos0, bitpos1))
9128 return constant_boolean_node (true, type);
9129 if (known_ne (bitpos0, bitpos1))
9130 return constant_boolean_node (false, type);
9131 break;
9132 case NE_EXPR:
9133 if (known_ne (bitpos0, bitpos1))
9134 return constant_boolean_node (true, type);
9135 if (known_eq (bitpos0, bitpos1))
9136 return constant_boolean_node (false, type);
9137 break;
9138 case LT_EXPR:
9139 if (known_lt (bitpos0, bitpos1))
9140 return constant_boolean_node (true, type);
9141 if (known_ge (bitpos0, bitpos1))
9142 return constant_boolean_node (false, type);
9143 break;
9144 case LE_EXPR:
9145 if (known_le (bitpos0, bitpos1))
9146 return constant_boolean_node (true, type);
9147 if (known_gt (bitpos0, bitpos1))
9148 return constant_boolean_node (false, type);
9149 break;
9150 case GE_EXPR:
9151 if (known_ge (bitpos0, bitpos1))
9152 return constant_boolean_node (true, type);
9153 if (known_lt (bitpos0, bitpos1))
9154 return constant_boolean_node (false, type);
9155 break;
9156 case GT_EXPR:
9157 if (known_gt (bitpos0, bitpos1))
9158 return constant_boolean_node (true, type);
9159 if (known_le (bitpos0, bitpos1))
9160 return constant_boolean_node (false, type);
9161 break;
9162 default:;
9163 }
9164 }
9165 /* We can simplify the comparison to a comparison of the variable
9166 offset parts if the constant offset parts are equal.
9167 Be careful to use signed sizetype here because otherwise we
9168 mess with array offsets in the wrong way. This is possible
9169 because pointer arithmetic is restricted to retain within an
9170 object and overflow on pointer differences is undefined as of
9171 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9172 else if (known_eq (bitpos0, bitpos1)
9173 && (equality_code
9174 || (indirect_base0
9175 && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
9176 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
9177 {
9178 /* By converting to signed sizetype we cover middle-end pointer
9179 arithmetic which operates on unsigned pointer types of size
9180 type size and ARRAY_REF offsets which are properly sign or
9181 zero extended from their type in case it is narrower than
9182 sizetype. */
9183 if (offset0 == NULL_TREE)
9184 offset0 = build_int_cst (ssizetype, 0);
9185 else
9186 offset0 = fold_convert_loc (loc, ssizetype, offset0);
9187 if (offset1 == NULL_TREE)
9188 offset1 = build_int_cst (ssizetype, 0);
9189 else
9190 offset1 = fold_convert_loc (loc, ssizetype, offset1);
9191
9192 if (!equality_code
9193 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9194 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9195 fold_overflow_warning (("assuming pointer wraparound does not "
9196 "occur when comparing P +- C1 with "
9197 "P +- C2"),
9198 WARN_STRICT_OVERFLOW_COMPARISON);
9199
9200 return fold_build2_loc (loc, code, type, offset0, offset1);
9201 }
9202 }
9203 /* For equal offsets we can simplify to a comparison of the
9204 base addresses. */
9205 else if (known_eq (bitpos0, bitpos1)
9206 && (indirect_base0
9207 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9208 && (indirect_base1
9209 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9210 && ((offset0 == offset1)
9211 || (offset0 && offset1
9212 && operand_equal_p (offset0, offset1, 0))))
9213 {
9214 if (indirect_base0)
9215 base0 = build_fold_addr_expr_loc (loc, base0);
9216 if (indirect_base1)
9217 base1 = build_fold_addr_expr_loc (loc, base1);
9218 return fold_build2_loc (loc, code, type, base0, base1);
9219 }
9220 /* Comparison between an ordinary (non-weak) symbol and a null
9221 pointer can be eliminated since such symbols must have a non
9222 null address. In C, relational expressions between pointers
9223 to objects and null pointers are undefined. The results
9224 below follow the C++ rules with the additional property that
9225 every object pointer compares greater than a null pointer.
9226 */
9227 else if (((DECL_P (base0)
9228 && maybe_nonzero_address (base0) > 0
9229 /* Avoid folding references to struct members at offset 0 to
9230 prevent tests like '&ptr->firstmember == 0' from getting
9231 eliminated. When ptr is null, although the -> expression
9232 is strictly speaking invalid, GCC retains it as a matter
9233 of QoI. See PR c/44555. */
9234 && (offset0 == NULL_TREE && known_ne (bitpos0, 0)))
9235 || CONSTANT_CLASS_P (base0))
9236 && indirect_base0
9237 /* The caller guarantees that when one of the arguments is
9238 constant (i.e., null in this case) it is second. */
9239 && integer_zerop (arg1))
9240 {
9241 switch (code)
9242 {
9243 case EQ_EXPR:
9244 case LE_EXPR:
9245 case LT_EXPR:
9246 return constant_boolean_node (false, type);
9247 case GE_EXPR:
9248 case GT_EXPR:
9249 case NE_EXPR:
9250 return constant_boolean_node (true, type);
9251 default:
9252 gcc_unreachable ();
9253 }
9254 }
9255 }
9256
9257 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9258 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9259 the resulting offset is smaller in absolute value than the
9260 original one and has the same sign. */
9261 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9262 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9263 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9264 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9265 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9266 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9267 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9268 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9269 {
9270 tree const1 = TREE_OPERAND (arg0, 1);
9271 tree const2 = TREE_OPERAND (arg1, 1);
9272 tree variable1 = TREE_OPERAND (arg0, 0);
9273 tree variable2 = TREE_OPERAND (arg1, 0);
9274 tree cst;
9275 const char * const warnmsg = G_("assuming signed overflow does not "
9276 "occur when combining constants around "
9277 "a comparison");
9278
9279 /* Put the constant on the side where it doesn't overflow and is
9280 of lower absolute value and of same sign than before. */
9281 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9282 ? MINUS_EXPR : PLUS_EXPR,
9283 const2, const1);
9284 if (!TREE_OVERFLOW (cst)
9285 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
9286 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
9287 {
9288 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9289 return fold_build2_loc (loc, code, type,
9290 variable1,
9291 fold_build2_loc (loc, TREE_CODE (arg1),
9292 TREE_TYPE (arg1),
9293 variable2, cst));
9294 }
9295
9296 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9297 ? MINUS_EXPR : PLUS_EXPR,
9298 const1, const2);
9299 if (!TREE_OVERFLOW (cst)
9300 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
9301 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
9302 {
9303 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9304 return fold_build2_loc (loc, code, type,
9305 fold_build2_loc (loc, TREE_CODE (arg0),
9306 TREE_TYPE (arg0),
9307 variable1, cst),
9308 variable2);
9309 }
9310 }
9311
9312 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9313 if (tem)
9314 return tem;
9315
9316 /* If we are comparing an expression that just has comparisons
9317 of two integer values, arithmetic expressions of those comparisons,
9318 and constants, we can simplify it. There are only three cases
9319 to check: the two values can either be equal, the first can be
9320 greater, or the second can be greater. Fold the expression for
9321 those three values. Since each value must be 0 or 1, we have
9322 eight possibilities, each of which corresponds to the constant 0
9323 or 1 or one of the six possible comparisons.
9324
9325 This handles common cases like (a > b) == 0 but also handles
9326 expressions like ((x > y) - (y > x)) > 0, which supposedly
9327 occur in macroized code. */
9328
9329 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9330 {
9331 tree cval1 = 0, cval2 = 0;
9332
9333 if (twoval_comparison_p (arg0, &cval1, &cval2)
9334 /* Don't handle degenerate cases here; they should already
9335 have been handled anyway. */
9336 && cval1 != 0 && cval2 != 0
9337 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9338 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9339 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9340 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9341 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9342 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9343 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9344 {
9345 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9346 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9347
9348 /* We can't just pass T to eval_subst in case cval1 or cval2
9349 was the same as ARG1. */
9350
9351 tree high_result
9352 = fold_build2_loc (loc, code, type,
9353 eval_subst (loc, arg0, cval1, maxval,
9354 cval2, minval),
9355 arg1);
9356 tree equal_result
9357 = fold_build2_loc (loc, code, type,
9358 eval_subst (loc, arg0, cval1, maxval,
9359 cval2, maxval),
9360 arg1);
9361 tree low_result
9362 = fold_build2_loc (loc, code, type,
9363 eval_subst (loc, arg0, cval1, minval,
9364 cval2, maxval),
9365 arg1);
9366
9367 /* All three of these results should be 0 or 1. Confirm they are.
9368 Then use those values to select the proper code to use. */
9369
9370 if (TREE_CODE (high_result) == INTEGER_CST
9371 && TREE_CODE (equal_result) == INTEGER_CST
9372 && TREE_CODE (low_result) == INTEGER_CST)
9373 {
9374 /* Make a 3-bit mask with the high-order bit being the
9375 value for `>', the next for '=', and the low for '<'. */
9376 switch ((integer_onep (high_result) * 4)
9377 + (integer_onep (equal_result) * 2)
9378 + integer_onep (low_result))
9379 {
9380 case 0:
9381 /* Always false. */
9382 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9383 case 1:
9384 code = LT_EXPR;
9385 break;
9386 case 2:
9387 code = EQ_EXPR;
9388 break;
9389 case 3:
9390 code = LE_EXPR;
9391 break;
9392 case 4:
9393 code = GT_EXPR;
9394 break;
9395 case 5:
9396 code = NE_EXPR;
9397 break;
9398 case 6:
9399 code = GE_EXPR;
9400 break;
9401 case 7:
9402 /* Always true. */
9403 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9404 }
9405
9406 return fold_build2_loc (loc, code, type, cval1, cval2);
9407 }
9408 }
9409 }
9410
9411 return NULL_TREE;
9412 }
9413
9414
9415 /* Subroutine of fold_binary. Optimize complex multiplications of the
9416 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9417 argument EXPR represents the expression "z" of type TYPE. */
9418
9419 static tree
9420 fold_mult_zconjz (location_t loc, tree type, tree expr)
9421 {
9422 tree itype = TREE_TYPE (type);
9423 tree rpart, ipart, tem;
9424
9425 if (TREE_CODE (expr) == COMPLEX_EXPR)
9426 {
9427 rpart = TREE_OPERAND (expr, 0);
9428 ipart = TREE_OPERAND (expr, 1);
9429 }
9430 else if (TREE_CODE (expr) == COMPLEX_CST)
9431 {
9432 rpart = TREE_REALPART (expr);
9433 ipart = TREE_IMAGPART (expr);
9434 }
9435 else
9436 {
9437 expr = save_expr (expr);
9438 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9439 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9440 }
9441
9442 rpart = save_expr (rpart);
9443 ipart = save_expr (ipart);
9444 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9445 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9446 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9447 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9448 build_zero_cst (itype));
9449 }
9450
9451
9452 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9453 CONSTRUCTOR ARG into array ELTS, which has NELTS elements, and return
9454 true if successful. */
9455
9456 static bool
9457 vec_cst_ctor_to_array (tree arg, unsigned int nelts, tree *elts)
9458 {
9459 unsigned HOST_WIDE_INT i, nunits;
9460
9461 if (TREE_CODE (arg) == VECTOR_CST
9462 && VECTOR_CST_NELTS (arg).is_constant (&nunits))
9463 {
9464 for (i = 0; i < nunits; ++i)
9465 elts[i] = VECTOR_CST_ELT (arg, i);
9466 }
9467 else if (TREE_CODE (arg) == CONSTRUCTOR)
9468 {
9469 constructor_elt *elt;
9470
9471 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9472 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9473 return false;
9474 else
9475 elts[i] = elt->value;
9476 }
9477 else
9478 return false;
9479 for (; i < nelts; i++)
9480 elts[i]
9481 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9482 return true;
9483 }
9484
9485 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9486 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9487 NULL_TREE otherwise. */
9488
9489 tree
9490 fold_vec_perm (tree type, tree arg0, tree arg1, const vec_perm_indices &sel)
9491 {
9492 unsigned int i;
9493 unsigned HOST_WIDE_INT nelts;
9494 bool need_ctor = false;
9495
9496 if (!sel.length ().is_constant (&nelts))
9497 return NULL_TREE;
9498 gcc_assert (known_eq (TYPE_VECTOR_SUBPARTS (type), nelts)
9499 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)), nelts)
9500 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)), nelts));
9501 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9502 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9503 return NULL_TREE;
9504
9505 tree *in_elts = XALLOCAVEC (tree, nelts * 2);
9506 if (!vec_cst_ctor_to_array (arg0, nelts, in_elts)
9507 || !vec_cst_ctor_to_array (arg1, nelts, in_elts + nelts))
9508 return NULL_TREE;
9509
9510 tree_vector_builder out_elts (type, nelts, 1);
9511 for (i = 0; i < nelts; i++)
9512 {
9513 HOST_WIDE_INT index;
9514 if (!sel[i].is_constant (&index))
9515 return NULL_TREE;
9516 if (!CONSTANT_CLASS_P (in_elts[index]))
9517 need_ctor = true;
9518 out_elts.quick_push (unshare_expr (in_elts[index]));
9519 }
9520
9521 if (need_ctor)
9522 {
9523 vec<constructor_elt, va_gc> *v;
9524 vec_alloc (v, nelts);
9525 for (i = 0; i < nelts; i++)
9526 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, out_elts[i]);
9527 return build_constructor (type, v);
9528 }
9529 else
9530 return out_elts.build ();
9531 }
9532
9533 /* Try to fold a pointer difference of type TYPE two address expressions of
9534 array references AREF0 and AREF1 using location LOC. Return a
9535 simplified expression for the difference or NULL_TREE. */
9536
9537 static tree
9538 fold_addr_of_array_ref_difference (location_t loc, tree type,
9539 tree aref0, tree aref1,
9540 bool use_pointer_diff)
9541 {
9542 tree base0 = TREE_OPERAND (aref0, 0);
9543 tree base1 = TREE_OPERAND (aref1, 0);
9544 tree base_offset = build_int_cst (type, 0);
9545
9546 /* If the bases are array references as well, recurse. If the bases
9547 are pointer indirections compute the difference of the pointers.
9548 If the bases are equal, we are set. */
9549 if ((TREE_CODE (base0) == ARRAY_REF
9550 && TREE_CODE (base1) == ARRAY_REF
9551 && (base_offset
9552 = fold_addr_of_array_ref_difference (loc, type, base0, base1,
9553 use_pointer_diff)))
9554 || (INDIRECT_REF_P (base0)
9555 && INDIRECT_REF_P (base1)
9556 && (base_offset
9557 = use_pointer_diff
9558 ? fold_binary_loc (loc, POINTER_DIFF_EXPR, type,
9559 TREE_OPERAND (base0, 0),
9560 TREE_OPERAND (base1, 0))
9561 : fold_binary_loc (loc, MINUS_EXPR, type,
9562 fold_convert (type,
9563 TREE_OPERAND (base0, 0)),
9564 fold_convert (type,
9565 TREE_OPERAND (base1, 0)))))
9566 || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
9567 {
9568 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9569 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9570 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9571 tree diff = fold_build2_loc (loc, MINUS_EXPR, type, op0, op1);
9572 return fold_build2_loc (loc, PLUS_EXPR, type,
9573 base_offset,
9574 fold_build2_loc (loc, MULT_EXPR, type,
9575 diff, esz));
9576 }
9577 return NULL_TREE;
9578 }
9579
9580 /* If the real or vector real constant CST of type TYPE has an exact
9581 inverse, return it, else return NULL. */
9582
9583 tree
9584 exact_inverse (tree type, tree cst)
9585 {
9586 REAL_VALUE_TYPE r;
9587 tree unit_type;
9588 machine_mode mode;
9589
9590 switch (TREE_CODE (cst))
9591 {
9592 case REAL_CST:
9593 r = TREE_REAL_CST (cst);
9594
9595 if (exact_real_inverse (TYPE_MODE (type), &r))
9596 return build_real (type, r);
9597
9598 return NULL_TREE;
9599
9600 case VECTOR_CST:
9601 {
9602 unit_type = TREE_TYPE (type);
9603 mode = TYPE_MODE (unit_type);
9604
9605 tree_vector_builder elts;
9606 if (!elts.new_unary_operation (type, cst, false))
9607 return NULL_TREE;
9608 unsigned int count = elts.encoded_nelts ();
9609 for (unsigned int i = 0; i < count; ++i)
9610 {
9611 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9612 if (!exact_real_inverse (mode, &r))
9613 return NULL_TREE;
9614 elts.quick_push (build_real (unit_type, r));
9615 }
9616
9617 return elts.build ();
9618 }
9619
9620 default:
9621 return NULL_TREE;
9622 }
9623 }
9624
9625 /* Mask out the tz least significant bits of X of type TYPE where
9626 tz is the number of trailing zeroes in Y. */
9627 static wide_int
9628 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
9629 {
9630 int tz = wi::ctz (y);
9631 if (tz > 0)
9632 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
9633 return x;
9634 }
9635
9636 /* Return true when T is an address and is known to be nonzero.
9637 For floating point we further ensure that T is not denormal.
9638 Similar logic is present in nonzero_address in rtlanal.h.
9639
9640 If the return value is based on the assumption that signed overflow
9641 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9642 change *STRICT_OVERFLOW_P. */
9643
9644 static bool
9645 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
9646 {
9647 tree type = TREE_TYPE (t);
9648 enum tree_code code;
9649
9650 /* Doing something useful for floating point would need more work. */
9651 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9652 return false;
9653
9654 code = TREE_CODE (t);
9655 switch (TREE_CODE_CLASS (code))
9656 {
9657 case tcc_unary:
9658 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9659 strict_overflow_p);
9660 case tcc_binary:
9661 case tcc_comparison:
9662 return tree_binary_nonzero_warnv_p (code, type,
9663 TREE_OPERAND (t, 0),
9664 TREE_OPERAND (t, 1),
9665 strict_overflow_p);
9666 case tcc_constant:
9667 case tcc_declaration:
9668 case tcc_reference:
9669 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9670
9671 default:
9672 break;
9673 }
9674
9675 switch (code)
9676 {
9677 case TRUTH_NOT_EXPR:
9678 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9679 strict_overflow_p);
9680
9681 case TRUTH_AND_EXPR:
9682 case TRUTH_OR_EXPR:
9683 case TRUTH_XOR_EXPR:
9684 return tree_binary_nonzero_warnv_p (code, type,
9685 TREE_OPERAND (t, 0),
9686 TREE_OPERAND (t, 1),
9687 strict_overflow_p);
9688
9689 case COND_EXPR:
9690 case CONSTRUCTOR:
9691 case OBJ_TYPE_REF:
9692 case ASSERT_EXPR:
9693 case ADDR_EXPR:
9694 case WITH_SIZE_EXPR:
9695 case SSA_NAME:
9696 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9697
9698 case COMPOUND_EXPR:
9699 case MODIFY_EXPR:
9700 case BIND_EXPR:
9701 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
9702 strict_overflow_p);
9703
9704 case SAVE_EXPR:
9705 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
9706 strict_overflow_p);
9707
9708 case CALL_EXPR:
9709 {
9710 tree fndecl = get_callee_fndecl (t);
9711 if (!fndecl) return false;
9712 if (flag_delete_null_pointer_checks && !flag_check_new
9713 && DECL_IS_OPERATOR_NEW_P (fndecl)
9714 && !TREE_NOTHROW (fndecl))
9715 return true;
9716 if (flag_delete_null_pointer_checks
9717 && lookup_attribute ("returns_nonnull",
9718 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9719 return true;
9720 return alloca_call_p (t);
9721 }
9722
9723 default:
9724 break;
9725 }
9726 return false;
9727 }
9728
9729 /* Return true when T is an address and is known to be nonzero.
9730 Handle warnings about undefined signed overflow. */
9731
9732 bool
9733 tree_expr_nonzero_p (tree t)
9734 {
9735 bool ret, strict_overflow_p;
9736
9737 strict_overflow_p = false;
9738 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9739 if (strict_overflow_p)
9740 fold_overflow_warning (("assuming signed overflow does not occur when "
9741 "determining that expression is always "
9742 "non-zero"),
9743 WARN_STRICT_OVERFLOW_MISC);
9744 return ret;
9745 }
9746
9747 /* Return true if T is known not to be equal to an integer W. */
9748
9749 bool
9750 expr_not_equal_to (tree t, const wide_int &w)
9751 {
9752 wide_int min, max, nz;
9753 value_range_kind rtype;
9754 switch (TREE_CODE (t))
9755 {
9756 case INTEGER_CST:
9757 return wi::to_wide (t) != w;
9758
9759 case SSA_NAME:
9760 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
9761 return false;
9762 rtype = get_range_info (t, &min, &max);
9763 if (rtype == VR_RANGE)
9764 {
9765 if (wi::lt_p (max, w, TYPE_SIGN (TREE_TYPE (t))))
9766 return true;
9767 if (wi::lt_p (w, min, TYPE_SIGN (TREE_TYPE (t))))
9768 return true;
9769 }
9770 else if (rtype == VR_ANTI_RANGE
9771 && wi::le_p (min, w, TYPE_SIGN (TREE_TYPE (t)))
9772 && wi::le_p (w, max, TYPE_SIGN (TREE_TYPE (t))))
9773 return true;
9774 /* If T has some known zero bits and W has any of those bits set,
9775 then T is known not to be equal to W. */
9776 if (wi::ne_p (wi::zext (wi::bit_and_not (w, get_nonzero_bits (t)),
9777 TYPE_PRECISION (TREE_TYPE (t))), 0))
9778 return true;
9779 return false;
9780
9781 default:
9782 return false;
9783 }
9784 }
9785
9786 /* Fold a binary expression of code CODE and type TYPE with operands
9787 OP0 and OP1. LOC is the location of the resulting expression.
9788 Return the folded expression if folding is successful. Otherwise,
9789 return NULL_TREE. */
9790
9791 tree
9792 fold_binary_loc (location_t loc, enum tree_code code, tree type,
9793 tree op0, tree op1)
9794 {
9795 enum tree_code_class kind = TREE_CODE_CLASS (code);
9796 tree arg0, arg1, tem;
9797 tree t1 = NULL_TREE;
9798 bool strict_overflow_p;
9799 unsigned int prec;
9800
9801 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9802 && TREE_CODE_LENGTH (code) == 2
9803 && op0 != NULL_TREE
9804 && op1 != NULL_TREE);
9805
9806 arg0 = op0;
9807 arg1 = op1;
9808
9809 /* Strip any conversions that don't change the mode. This is
9810 safe for every expression, except for a comparison expression
9811 because its signedness is derived from its operands. So, in
9812 the latter case, only strip conversions that don't change the
9813 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9814 preserved.
9815
9816 Note that this is done as an internal manipulation within the
9817 constant folder, in order to find the simplest representation
9818 of the arguments so that their form can be studied. In any
9819 cases, the appropriate type conversions should be put back in
9820 the tree that will get out of the constant folder. */
9821
9822 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9823 {
9824 STRIP_SIGN_NOPS (arg0);
9825 STRIP_SIGN_NOPS (arg1);
9826 }
9827 else
9828 {
9829 STRIP_NOPS (arg0);
9830 STRIP_NOPS (arg1);
9831 }
9832
9833 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9834 constant but we can't do arithmetic on them. */
9835 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
9836 {
9837 tem = const_binop (code, type, arg0, arg1);
9838 if (tem != NULL_TREE)
9839 {
9840 if (TREE_TYPE (tem) != type)
9841 tem = fold_convert_loc (loc, type, tem);
9842 return tem;
9843 }
9844 }
9845
9846 /* If this is a commutative operation, and ARG0 is a constant, move it
9847 to ARG1 to reduce the number of tests below. */
9848 if (commutative_tree_code (code)
9849 && tree_swap_operands_p (arg0, arg1))
9850 return fold_build2_loc (loc, code, type, op1, op0);
9851
9852 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9853 to ARG1 to reduce the number of tests below. */
9854 if (kind == tcc_comparison
9855 && tree_swap_operands_p (arg0, arg1))
9856 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9857
9858 tem = generic_simplify (loc, code, type, op0, op1);
9859 if (tem)
9860 return tem;
9861
9862 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9863
9864 First check for cases where an arithmetic operation is applied to a
9865 compound, conditional, or comparison operation. Push the arithmetic
9866 operation inside the compound or conditional to see if any folding
9867 can then be done. Convert comparison to conditional for this purpose.
9868 The also optimizes non-constant cases that used to be done in
9869 expand_expr.
9870
9871 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9872 one of the operands is a comparison and the other is a comparison, a
9873 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9874 code below would make the expression more complex. Change it to a
9875 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9876 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9877
9878 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9879 || code == EQ_EXPR || code == NE_EXPR)
9880 && !VECTOR_TYPE_P (TREE_TYPE (arg0))
9881 && ((truth_value_p (TREE_CODE (arg0))
9882 && (truth_value_p (TREE_CODE (arg1))
9883 || (TREE_CODE (arg1) == BIT_AND_EXPR
9884 && integer_onep (TREE_OPERAND (arg1, 1)))))
9885 || (truth_value_p (TREE_CODE (arg1))
9886 && (truth_value_p (TREE_CODE (arg0))
9887 || (TREE_CODE (arg0) == BIT_AND_EXPR
9888 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9889 {
9890 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9891 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9892 : TRUTH_XOR_EXPR,
9893 boolean_type_node,
9894 fold_convert_loc (loc, boolean_type_node, arg0),
9895 fold_convert_loc (loc, boolean_type_node, arg1));
9896
9897 if (code == EQ_EXPR)
9898 tem = invert_truthvalue_loc (loc, tem);
9899
9900 return fold_convert_loc (loc, type, tem);
9901 }
9902
9903 if (TREE_CODE_CLASS (code) == tcc_binary
9904 || TREE_CODE_CLASS (code) == tcc_comparison)
9905 {
9906 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9907 {
9908 tem = fold_build2_loc (loc, code, type,
9909 fold_convert_loc (loc, TREE_TYPE (op0),
9910 TREE_OPERAND (arg0, 1)), op1);
9911 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9912 tem);
9913 }
9914 if (TREE_CODE (arg1) == COMPOUND_EXPR)
9915 {
9916 tem = fold_build2_loc (loc, code, type, op0,
9917 fold_convert_loc (loc, TREE_TYPE (op1),
9918 TREE_OPERAND (arg1, 1)));
9919 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9920 tem);
9921 }
9922
9923 if (TREE_CODE (arg0) == COND_EXPR
9924 || TREE_CODE (arg0) == VEC_COND_EXPR
9925 || COMPARISON_CLASS_P (arg0))
9926 {
9927 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9928 arg0, arg1,
9929 /*cond_first_p=*/1);
9930 if (tem != NULL_TREE)
9931 return tem;
9932 }
9933
9934 if (TREE_CODE (arg1) == COND_EXPR
9935 || TREE_CODE (arg1) == VEC_COND_EXPR
9936 || COMPARISON_CLASS_P (arg1))
9937 {
9938 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9939 arg1, arg0,
9940 /*cond_first_p=*/0);
9941 if (tem != NULL_TREE)
9942 return tem;
9943 }
9944 }
9945
9946 switch (code)
9947 {
9948 case MEM_REF:
9949 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9950 if (TREE_CODE (arg0) == ADDR_EXPR
9951 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9952 {
9953 tree iref = TREE_OPERAND (arg0, 0);
9954 return fold_build2 (MEM_REF, type,
9955 TREE_OPERAND (iref, 0),
9956 int_const_binop (PLUS_EXPR, arg1,
9957 TREE_OPERAND (iref, 1)));
9958 }
9959
9960 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9961 if (TREE_CODE (arg0) == ADDR_EXPR
9962 && handled_component_p (TREE_OPERAND (arg0, 0)))
9963 {
9964 tree base;
9965 poly_int64 coffset;
9966 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9967 &coffset);
9968 if (!base)
9969 return NULL_TREE;
9970 return fold_build2 (MEM_REF, type,
9971 build_fold_addr_expr (base),
9972 int_const_binop (PLUS_EXPR, arg1,
9973 size_int (coffset)));
9974 }
9975
9976 return NULL_TREE;
9977
9978 case POINTER_PLUS_EXPR:
9979 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9980 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9981 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9982 return fold_convert_loc (loc, type,
9983 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9984 fold_convert_loc (loc, sizetype,
9985 arg1),
9986 fold_convert_loc (loc, sizetype,
9987 arg0)));
9988
9989 return NULL_TREE;
9990
9991 case PLUS_EXPR:
9992 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
9993 {
9994 /* X + (X / CST) * -CST is X % CST. */
9995 if (TREE_CODE (arg1) == MULT_EXPR
9996 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9997 && operand_equal_p (arg0,
9998 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9999 {
10000 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10001 tree cst1 = TREE_OPERAND (arg1, 1);
10002 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10003 cst1, cst0);
10004 if (sum && integer_zerop (sum))
10005 return fold_convert_loc (loc, type,
10006 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10007 TREE_TYPE (arg0), arg0,
10008 cst0));
10009 }
10010 }
10011
10012 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10013 one. Make sure the type is not saturating and has the signedness of
10014 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10015 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10016 if ((TREE_CODE (arg0) == MULT_EXPR
10017 || TREE_CODE (arg1) == MULT_EXPR)
10018 && !TYPE_SATURATING (type)
10019 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10020 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10021 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10022 {
10023 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10024 if (tem)
10025 return tem;
10026 }
10027
10028 if (! FLOAT_TYPE_P (type))
10029 {
10030 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10031 (plus (plus (mult) (mult)) (foo)) so that we can
10032 take advantage of the factoring cases below. */
10033 if (ANY_INTEGRAL_TYPE_P (type)
10034 && TYPE_OVERFLOW_WRAPS (type)
10035 && (((TREE_CODE (arg0) == PLUS_EXPR
10036 || TREE_CODE (arg0) == MINUS_EXPR)
10037 && TREE_CODE (arg1) == MULT_EXPR)
10038 || ((TREE_CODE (arg1) == PLUS_EXPR
10039 || TREE_CODE (arg1) == MINUS_EXPR)
10040 && TREE_CODE (arg0) == MULT_EXPR)))
10041 {
10042 tree parg0, parg1, parg, marg;
10043 enum tree_code pcode;
10044
10045 if (TREE_CODE (arg1) == MULT_EXPR)
10046 parg = arg0, marg = arg1;
10047 else
10048 parg = arg1, marg = arg0;
10049 pcode = TREE_CODE (parg);
10050 parg0 = TREE_OPERAND (parg, 0);
10051 parg1 = TREE_OPERAND (parg, 1);
10052 STRIP_NOPS (parg0);
10053 STRIP_NOPS (parg1);
10054
10055 if (TREE_CODE (parg0) == MULT_EXPR
10056 && TREE_CODE (parg1) != MULT_EXPR)
10057 return fold_build2_loc (loc, pcode, type,
10058 fold_build2_loc (loc, PLUS_EXPR, type,
10059 fold_convert_loc (loc, type,
10060 parg0),
10061 fold_convert_loc (loc, type,
10062 marg)),
10063 fold_convert_loc (loc, type, parg1));
10064 if (TREE_CODE (parg0) != MULT_EXPR
10065 && TREE_CODE (parg1) == MULT_EXPR)
10066 return
10067 fold_build2_loc (loc, PLUS_EXPR, type,
10068 fold_convert_loc (loc, type, parg0),
10069 fold_build2_loc (loc, pcode, type,
10070 fold_convert_loc (loc, type, marg),
10071 fold_convert_loc (loc, type,
10072 parg1)));
10073 }
10074 }
10075 else
10076 {
10077 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10078 to __complex__ ( x, y ). This is not the same for SNaNs or
10079 if signed zeros are involved. */
10080 if (!HONOR_SNANS (element_mode (arg0))
10081 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10082 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10083 {
10084 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10085 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10086 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10087 bool arg0rz = false, arg0iz = false;
10088 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10089 || (arg0i && (arg0iz = real_zerop (arg0i))))
10090 {
10091 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10092 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10093 if (arg0rz && arg1i && real_zerop (arg1i))
10094 {
10095 tree rp = arg1r ? arg1r
10096 : build1 (REALPART_EXPR, rtype, arg1);
10097 tree ip = arg0i ? arg0i
10098 : build1 (IMAGPART_EXPR, rtype, arg0);
10099 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10100 }
10101 else if (arg0iz && arg1r && real_zerop (arg1r))
10102 {
10103 tree rp = arg0r ? arg0r
10104 : build1 (REALPART_EXPR, rtype, arg0);
10105 tree ip = arg1i ? arg1i
10106 : build1 (IMAGPART_EXPR, rtype, arg1);
10107 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10108 }
10109 }
10110 }
10111
10112 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10113 We associate floats only if the user has specified
10114 -fassociative-math. */
10115 if (flag_associative_math
10116 && TREE_CODE (arg1) == PLUS_EXPR
10117 && TREE_CODE (arg0) != MULT_EXPR)
10118 {
10119 tree tree10 = TREE_OPERAND (arg1, 0);
10120 tree tree11 = TREE_OPERAND (arg1, 1);
10121 if (TREE_CODE (tree11) == MULT_EXPR
10122 && TREE_CODE (tree10) == MULT_EXPR)
10123 {
10124 tree tree0;
10125 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10126 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10127 }
10128 }
10129 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10130 We associate floats only if the user has specified
10131 -fassociative-math. */
10132 if (flag_associative_math
10133 && TREE_CODE (arg0) == PLUS_EXPR
10134 && TREE_CODE (arg1) != MULT_EXPR)
10135 {
10136 tree tree00 = TREE_OPERAND (arg0, 0);
10137 tree tree01 = TREE_OPERAND (arg0, 1);
10138 if (TREE_CODE (tree01) == MULT_EXPR
10139 && TREE_CODE (tree00) == MULT_EXPR)
10140 {
10141 tree tree0;
10142 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10143 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10144 }
10145 }
10146 }
10147
10148 bit_rotate:
10149 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10150 is a rotate of A by C1 bits. */
10151 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10152 is a rotate of A by B bits.
10153 Similarly for (A << B) | (A >> (-B & C3)) where C3 is Z-1,
10154 though in this case CODE must be | and not + or ^, otherwise
10155 it doesn't return A when B is 0. */
10156 {
10157 enum tree_code code0, code1;
10158 tree rtype;
10159 code0 = TREE_CODE (arg0);
10160 code1 = TREE_CODE (arg1);
10161 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10162 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10163 && operand_equal_p (TREE_OPERAND (arg0, 0),
10164 TREE_OPERAND (arg1, 0), 0)
10165 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10166 TYPE_UNSIGNED (rtype))
10167 /* Only create rotates in complete modes. Other cases are not
10168 expanded properly. */
10169 && (element_precision (rtype)
10170 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
10171 {
10172 tree tree01, tree11;
10173 tree orig_tree01, orig_tree11;
10174 enum tree_code code01, code11;
10175
10176 tree01 = orig_tree01 = TREE_OPERAND (arg0, 1);
10177 tree11 = orig_tree11 = TREE_OPERAND (arg1, 1);
10178 STRIP_NOPS (tree01);
10179 STRIP_NOPS (tree11);
10180 code01 = TREE_CODE (tree01);
10181 code11 = TREE_CODE (tree11);
10182 if (code11 != MINUS_EXPR
10183 && (code01 == MINUS_EXPR || code01 == BIT_AND_EXPR))
10184 {
10185 std::swap (code0, code1);
10186 std::swap (code01, code11);
10187 std::swap (tree01, tree11);
10188 std::swap (orig_tree01, orig_tree11);
10189 }
10190 if (code01 == INTEGER_CST
10191 && code11 == INTEGER_CST
10192 && (wi::to_widest (tree01) + wi::to_widest (tree11)
10193 == element_precision (rtype)))
10194 {
10195 tem = build2_loc (loc, LROTATE_EXPR,
10196 rtype, TREE_OPERAND (arg0, 0),
10197 code0 == LSHIFT_EXPR
10198 ? orig_tree01 : orig_tree11);
10199 return fold_convert_loc (loc, type, tem);
10200 }
10201 else if (code11 == MINUS_EXPR)
10202 {
10203 tree tree110, tree111;
10204 tree110 = TREE_OPERAND (tree11, 0);
10205 tree111 = TREE_OPERAND (tree11, 1);
10206 STRIP_NOPS (tree110);
10207 STRIP_NOPS (tree111);
10208 if (TREE_CODE (tree110) == INTEGER_CST
10209 && compare_tree_int (tree110,
10210 element_precision (rtype)) == 0
10211 && operand_equal_p (tree01, tree111, 0))
10212 {
10213 tem = build2_loc (loc, (code0 == LSHIFT_EXPR
10214 ? LROTATE_EXPR : RROTATE_EXPR),
10215 rtype, TREE_OPERAND (arg0, 0),
10216 orig_tree01);
10217 return fold_convert_loc (loc, type, tem);
10218 }
10219 }
10220 else if (code == BIT_IOR_EXPR
10221 && code11 == BIT_AND_EXPR
10222 && pow2p_hwi (element_precision (rtype)))
10223 {
10224 tree tree110, tree111;
10225 tree110 = TREE_OPERAND (tree11, 0);
10226 tree111 = TREE_OPERAND (tree11, 1);
10227 STRIP_NOPS (tree110);
10228 STRIP_NOPS (tree111);
10229 if (TREE_CODE (tree110) == NEGATE_EXPR
10230 && TREE_CODE (tree111) == INTEGER_CST
10231 && compare_tree_int (tree111,
10232 element_precision (rtype) - 1) == 0
10233 && operand_equal_p (tree01, TREE_OPERAND (tree110, 0), 0))
10234 {
10235 tem = build2_loc (loc, (code0 == LSHIFT_EXPR
10236 ? LROTATE_EXPR : RROTATE_EXPR),
10237 rtype, TREE_OPERAND (arg0, 0),
10238 orig_tree01);
10239 return fold_convert_loc (loc, type, tem);
10240 }
10241 }
10242 }
10243 }
10244
10245 associate:
10246 /* In most languages, can't associate operations on floats through
10247 parentheses. Rather than remember where the parentheses were, we
10248 don't associate floats at all, unless the user has specified
10249 -fassociative-math.
10250 And, we need to make sure type is not saturating. */
10251
10252 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10253 && !TYPE_SATURATING (type))
10254 {
10255 tree var0, minus_var0, con0, minus_con0, lit0, minus_lit0;
10256 tree var1, minus_var1, con1, minus_con1, lit1, minus_lit1;
10257 tree atype = type;
10258 bool ok = true;
10259
10260 /* Split both trees into variables, constants, and literals. Then
10261 associate each group together, the constants with literals,
10262 then the result with variables. This increases the chances of
10263 literals being recombined later and of generating relocatable
10264 expressions for the sum of a constant and literal. */
10265 var0 = split_tree (arg0, type, code,
10266 &minus_var0, &con0, &minus_con0,
10267 &lit0, &minus_lit0, 0);
10268 var1 = split_tree (arg1, type, code,
10269 &minus_var1, &con1, &minus_con1,
10270 &lit1, &minus_lit1, code == MINUS_EXPR);
10271
10272 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10273 if (code == MINUS_EXPR)
10274 code = PLUS_EXPR;
10275
10276 /* With undefined overflow prefer doing association in a type
10277 which wraps on overflow, if that is one of the operand types. */
10278 if ((POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
10279 && !TYPE_OVERFLOW_WRAPS (type))
10280 {
10281 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10282 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10283 atype = TREE_TYPE (arg0);
10284 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10285 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10286 atype = TREE_TYPE (arg1);
10287 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10288 }
10289
10290 /* With undefined overflow we can only associate constants with one
10291 variable, and constants whose association doesn't overflow. */
10292 if ((POINTER_TYPE_P (atype) || INTEGRAL_TYPE_P (atype))
10293 && !TYPE_OVERFLOW_WRAPS (atype))
10294 {
10295 if ((var0 && var1) || (minus_var0 && minus_var1))
10296 {
10297 /* ??? If split_tree would handle NEGATE_EXPR we could
10298 simply reject these cases and the allowed cases would
10299 be the var0/minus_var1 ones. */
10300 tree tmp0 = var0 ? var0 : minus_var0;
10301 tree tmp1 = var1 ? var1 : minus_var1;
10302 bool one_neg = false;
10303
10304 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10305 {
10306 tmp0 = TREE_OPERAND (tmp0, 0);
10307 one_neg = !one_neg;
10308 }
10309 if (CONVERT_EXPR_P (tmp0)
10310 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10311 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10312 <= TYPE_PRECISION (atype)))
10313 tmp0 = TREE_OPERAND (tmp0, 0);
10314 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10315 {
10316 tmp1 = TREE_OPERAND (tmp1, 0);
10317 one_neg = !one_neg;
10318 }
10319 if (CONVERT_EXPR_P (tmp1)
10320 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10321 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10322 <= TYPE_PRECISION (atype)))
10323 tmp1 = TREE_OPERAND (tmp1, 0);
10324 /* The only case we can still associate with two variables
10325 is if they cancel out. */
10326 if (!one_neg
10327 || !operand_equal_p (tmp0, tmp1, 0))
10328 ok = false;
10329 }
10330 else if ((var0 && minus_var1
10331 && ! operand_equal_p (var0, minus_var1, 0))
10332 || (minus_var0 && var1
10333 && ! operand_equal_p (minus_var0, var1, 0)))
10334 ok = false;
10335 }
10336
10337 /* Only do something if we found more than two objects. Otherwise,
10338 nothing has changed and we risk infinite recursion. */
10339 if (ok
10340 && ((var0 != 0) + (var1 != 0)
10341 + (minus_var0 != 0) + (minus_var1 != 0)
10342 + (con0 != 0) + (con1 != 0)
10343 + (minus_con0 != 0) + (minus_con1 != 0)
10344 + (lit0 != 0) + (lit1 != 0)
10345 + (minus_lit0 != 0) + (minus_lit1 != 0)) > 2)
10346 {
10347 var0 = associate_trees (loc, var0, var1, code, atype);
10348 minus_var0 = associate_trees (loc, minus_var0, minus_var1,
10349 code, atype);
10350 con0 = associate_trees (loc, con0, con1, code, atype);
10351 minus_con0 = associate_trees (loc, minus_con0, minus_con1,
10352 code, atype);
10353 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10354 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10355 code, atype);
10356
10357 if (minus_var0 && var0)
10358 {
10359 var0 = associate_trees (loc, var0, minus_var0,
10360 MINUS_EXPR, atype);
10361 minus_var0 = 0;
10362 }
10363 if (minus_con0 && con0)
10364 {
10365 con0 = associate_trees (loc, con0, minus_con0,
10366 MINUS_EXPR, atype);
10367 minus_con0 = 0;
10368 }
10369
10370 /* Preserve the MINUS_EXPR if the negative part of the literal is
10371 greater than the positive part. Otherwise, the multiplicative
10372 folding code (i.e extract_muldiv) may be fooled in case
10373 unsigned constants are subtracted, like in the following
10374 example: ((X*2 + 4) - 8U)/2. */
10375 if (minus_lit0 && lit0)
10376 {
10377 if (TREE_CODE (lit0) == INTEGER_CST
10378 && TREE_CODE (minus_lit0) == INTEGER_CST
10379 && tree_int_cst_lt (lit0, minus_lit0)
10380 /* But avoid ending up with only negated parts. */
10381 && (var0 || con0))
10382 {
10383 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10384 MINUS_EXPR, atype);
10385 lit0 = 0;
10386 }
10387 else
10388 {
10389 lit0 = associate_trees (loc, lit0, minus_lit0,
10390 MINUS_EXPR, atype);
10391 minus_lit0 = 0;
10392 }
10393 }
10394
10395 /* Don't introduce overflows through reassociation. */
10396 if ((lit0 && TREE_OVERFLOW_P (lit0))
10397 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0)))
10398 return NULL_TREE;
10399
10400 /* Eliminate lit0 and minus_lit0 to con0 and minus_con0. */
10401 con0 = associate_trees (loc, con0, lit0, code, atype);
10402 lit0 = 0;
10403 minus_con0 = associate_trees (loc, minus_con0, minus_lit0,
10404 code, atype);
10405 minus_lit0 = 0;
10406
10407 /* Eliminate minus_con0. */
10408 if (minus_con0)
10409 {
10410 if (con0)
10411 con0 = associate_trees (loc, con0, minus_con0,
10412 MINUS_EXPR, atype);
10413 else if (var0)
10414 var0 = associate_trees (loc, var0, minus_con0,
10415 MINUS_EXPR, atype);
10416 else
10417 gcc_unreachable ();
10418 minus_con0 = 0;
10419 }
10420
10421 /* Eliminate minus_var0. */
10422 if (minus_var0)
10423 {
10424 if (con0)
10425 con0 = associate_trees (loc, con0, minus_var0,
10426 MINUS_EXPR, atype);
10427 else
10428 gcc_unreachable ();
10429 minus_var0 = 0;
10430 }
10431
10432 return
10433 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10434 code, atype));
10435 }
10436 }
10437
10438 return NULL_TREE;
10439
10440 case POINTER_DIFF_EXPR:
10441 case MINUS_EXPR:
10442 /* Fold &a[i] - &a[j] to i-j. */
10443 if (TREE_CODE (arg0) == ADDR_EXPR
10444 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10445 && TREE_CODE (arg1) == ADDR_EXPR
10446 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10447 {
10448 tree tem = fold_addr_of_array_ref_difference (loc, type,
10449 TREE_OPERAND (arg0, 0),
10450 TREE_OPERAND (arg1, 0),
10451 code
10452 == POINTER_DIFF_EXPR);
10453 if (tem)
10454 return tem;
10455 }
10456
10457 /* Further transformations are not for pointers. */
10458 if (code == POINTER_DIFF_EXPR)
10459 return NULL_TREE;
10460
10461 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10462 if (TREE_CODE (arg0) == NEGATE_EXPR
10463 && negate_expr_p (op1)
10464 /* If arg0 is e.g. unsigned int and type is int, then this could
10465 introduce UB, because if A is INT_MIN at runtime, the original
10466 expression can be well defined while the latter is not.
10467 See PR83269. */
10468 && !(ANY_INTEGRAL_TYPE_P (type)
10469 && TYPE_OVERFLOW_UNDEFINED (type)
10470 && ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10471 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
10472 return fold_build2_loc (loc, MINUS_EXPR, type, negate_expr (op1),
10473 fold_convert_loc (loc, type,
10474 TREE_OPERAND (arg0, 0)));
10475
10476 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10477 __complex__ ( x, -y ). This is not the same for SNaNs or if
10478 signed zeros are involved. */
10479 if (!HONOR_SNANS (element_mode (arg0))
10480 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10481 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10482 {
10483 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10484 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10485 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10486 bool arg0rz = false, arg0iz = false;
10487 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10488 || (arg0i && (arg0iz = real_zerop (arg0i))))
10489 {
10490 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10491 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10492 if (arg0rz && arg1i && real_zerop (arg1i))
10493 {
10494 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10495 arg1r ? arg1r
10496 : build1 (REALPART_EXPR, rtype, arg1));
10497 tree ip = arg0i ? arg0i
10498 : build1 (IMAGPART_EXPR, rtype, arg0);
10499 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10500 }
10501 else if (arg0iz && arg1r && real_zerop (arg1r))
10502 {
10503 tree rp = arg0r ? arg0r
10504 : build1 (REALPART_EXPR, rtype, arg0);
10505 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10506 arg1i ? arg1i
10507 : build1 (IMAGPART_EXPR, rtype, arg1));
10508 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10509 }
10510 }
10511 }
10512
10513 /* A - B -> A + (-B) if B is easily negatable. */
10514 if (negate_expr_p (op1)
10515 && ! TYPE_OVERFLOW_SANITIZED (type)
10516 && ((FLOAT_TYPE_P (type)
10517 /* Avoid this transformation if B is a positive REAL_CST. */
10518 && (TREE_CODE (op1) != REAL_CST
10519 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1))))
10520 || INTEGRAL_TYPE_P (type)))
10521 return fold_build2_loc (loc, PLUS_EXPR, type,
10522 fold_convert_loc (loc, type, arg0),
10523 negate_expr (op1));
10524
10525 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10526 one. Make sure the type is not saturating and has the signedness of
10527 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10528 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10529 if ((TREE_CODE (arg0) == MULT_EXPR
10530 || TREE_CODE (arg1) == MULT_EXPR)
10531 && !TYPE_SATURATING (type)
10532 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10533 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10534 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10535 {
10536 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10537 if (tem)
10538 return tem;
10539 }
10540
10541 goto associate;
10542
10543 case MULT_EXPR:
10544 if (! FLOAT_TYPE_P (type))
10545 {
10546 /* Transform x * -C into -x * C if x is easily negatable. */
10547 if (TREE_CODE (op1) == INTEGER_CST
10548 && tree_int_cst_sgn (op1) == -1
10549 && negate_expr_p (op0)
10550 && negate_expr_p (op1)
10551 && (tem = negate_expr (op1)) != op1
10552 && ! TREE_OVERFLOW (tem))
10553 return fold_build2_loc (loc, MULT_EXPR, type,
10554 fold_convert_loc (loc, type,
10555 negate_expr (op0)), tem);
10556
10557 strict_overflow_p = false;
10558 if (TREE_CODE (arg1) == INTEGER_CST
10559 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10560 &strict_overflow_p)) != 0)
10561 {
10562 if (strict_overflow_p)
10563 fold_overflow_warning (("assuming signed overflow does not "
10564 "occur when simplifying "
10565 "multiplication"),
10566 WARN_STRICT_OVERFLOW_MISC);
10567 return fold_convert_loc (loc, type, tem);
10568 }
10569
10570 /* Optimize z * conj(z) for integer complex numbers. */
10571 if (TREE_CODE (arg0) == CONJ_EXPR
10572 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10573 return fold_mult_zconjz (loc, type, arg1);
10574 if (TREE_CODE (arg1) == CONJ_EXPR
10575 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10576 return fold_mult_zconjz (loc, type, arg0);
10577 }
10578 else
10579 {
10580 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10581 This is not the same for NaNs or if signed zeros are
10582 involved. */
10583 if (!HONOR_NANS (arg0)
10584 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10585 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10586 && TREE_CODE (arg1) == COMPLEX_CST
10587 && real_zerop (TREE_REALPART (arg1)))
10588 {
10589 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10590 if (real_onep (TREE_IMAGPART (arg1)))
10591 return
10592 fold_build2_loc (loc, COMPLEX_EXPR, type,
10593 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10594 rtype, arg0)),
10595 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10596 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10597 return
10598 fold_build2_loc (loc, COMPLEX_EXPR, type,
10599 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10600 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10601 rtype, arg0)));
10602 }
10603
10604 /* Optimize z * conj(z) for floating point complex numbers.
10605 Guarded by flag_unsafe_math_optimizations as non-finite
10606 imaginary components don't produce scalar results. */
10607 if (flag_unsafe_math_optimizations
10608 && TREE_CODE (arg0) == CONJ_EXPR
10609 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10610 return fold_mult_zconjz (loc, type, arg1);
10611 if (flag_unsafe_math_optimizations
10612 && TREE_CODE (arg1) == CONJ_EXPR
10613 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10614 return fold_mult_zconjz (loc, type, arg0);
10615 }
10616 goto associate;
10617
10618 case BIT_IOR_EXPR:
10619 /* Canonicalize (X & C1) | C2. */
10620 if (TREE_CODE (arg0) == BIT_AND_EXPR
10621 && TREE_CODE (arg1) == INTEGER_CST
10622 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10623 {
10624 int width = TYPE_PRECISION (type), w;
10625 wide_int c1 = wi::to_wide (TREE_OPERAND (arg0, 1));
10626 wide_int c2 = wi::to_wide (arg1);
10627
10628 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10629 if ((c1 & c2) == c1)
10630 return omit_one_operand_loc (loc, type, arg1,
10631 TREE_OPERAND (arg0, 0));
10632
10633 wide_int msk = wi::mask (width, false,
10634 TYPE_PRECISION (TREE_TYPE (arg1)));
10635
10636 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10637 if (wi::bit_and_not (msk, c1 | c2) == 0)
10638 {
10639 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10640 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
10641 }
10642
10643 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10644 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10645 mode which allows further optimizations. */
10646 c1 &= msk;
10647 c2 &= msk;
10648 wide_int c3 = wi::bit_and_not (c1, c2);
10649 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
10650 {
10651 wide_int mask = wi::mask (w, false,
10652 TYPE_PRECISION (type));
10653 if (((c1 | c2) & mask) == mask
10654 && wi::bit_and_not (c1, mask) == 0)
10655 {
10656 c3 = mask;
10657 break;
10658 }
10659 }
10660
10661 if (c3 != c1)
10662 {
10663 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10664 tem = fold_build2_loc (loc, BIT_AND_EXPR, type, tem,
10665 wide_int_to_tree (type, c3));
10666 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
10667 }
10668 }
10669
10670 /* See if this can be simplified into a rotate first. If that
10671 is unsuccessful continue in the association code. */
10672 goto bit_rotate;
10673
10674 case BIT_XOR_EXPR:
10675 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10676 if (TREE_CODE (arg0) == BIT_AND_EXPR
10677 && INTEGRAL_TYPE_P (type)
10678 && integer_onep (TREE_OPERAND (arg0, 1))
10679 && integer_onep (arg1))
10680 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
10681 build_zero_cst (TREE_TYPE (arg0)));
10682
10683 /* See if this can be simplified into a rotate first. If that
10684 is unsuccessful continue in the association code. */
10685 goto bit_rotate;
10686
10687 case BIT_AND_EXPR:
10688 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10689 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10690 && INTEGRAL_TYPE_P (type)
10691 && integer_onep (TREE_OPERAND (arg0, 1))
10692 && integer_onep (arg1))
10693 {
10694 tree tem2;
10695 tem = TREE_OPERAND (arg0, 0);
10696 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
10697 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
10698 tem, tem2);
10699 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
10700 build_zero_cst (TREE_TYPE (tem)));
10701 }
10702 /* Fold ~X & 1 as (X & 1) == 0. */
10703 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10704 && INTEGRAL_TYPE_P (type)
10705 && integer_onep (arg1))
10706 {
10707 tree tem2;
10708 tem = TREE_OPERAND (arg0, 0);
10709 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
10710 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
10711 tem, tem2);
10712 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
10713 build_zero_cst (TREE_TYPE (tem)));
10714 }
10715 /* Fold !X & 1 as X == 0. */
10716 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10717 && integer_onep (arg1))
10718 {
10719 tem = TREE_OPERAND (arg0, 0);
10720 return fold_build2_loc (loc, EQ_EXPR, type, tem,
10721 build_zero_cst (TREE_TYPE (tem)));
10722 }
10723
10724 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
10725 multiple of 1 << CST. */
10726 if (TREE_CODE (arg1) == INTEGER_CST)
10727 {
10728 wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
10729 wide_int ncst1 = -cst1;
10730 if ((cst1 & ncst1) == ncst1
10731 && multiple_of_p (type, arg0,
10732 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
10733 return fold_convert_loc (loc, type, arg0);
10734 }
10735
10736 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
10737 bits from CST2. */
10738 if (TREE_CODE (arg1) == INTEGER_CST
10739 && TREE_CODE (arg0) == MULT_EXPR
10740 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10741 {
10742 wi::tree_to_wide_ref warg1 = wi::to_wide (arg1);
10743 wide_int masked
10744 = mask_with_tz (type, warg1, wi::to_wide (TREE_OPERAND (arg0, 1)));
10745
10746 if (masked == 0)
10747 return omit_two_operands_loc (loc, type, build_zero_cst (type),
10748 arg0, arg1);
10749 else if (masked != warg1)
10750 {
10751 /* Avoid the transform if arg1 is a mask of some
10752 mode which allows further optimizations. */
10753 int pop = wi::popcount (warg1);
10754 if (!(pop >= BITS_PER_UNIT
10755 && pow2p_hwi (pop)
10756 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
10757 return fold_build2_loc (loc, code, type, op0,
10758 wide_int_to_tree (type, masked));
10759 }
10760 }
10761
10762 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10763 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10764 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10765 {
10766 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10767
10768 wide_int mask = wide_int::from (wi::to_wide (arg1), prec, UNSIGNED);
10769 if (mask == -1)
10770 return
10771 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10772 }
10773
10774 goto associate;
10775
10776 case RDIV_EXPR:
10777 /* Don't touch a floating-point divide by zero unless the mode
10778 of the constant can represent infinity. */
10779 if (TREE_CODE (arg1) == REAL_CST
10780 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10781 && real_zerop (arg1))
10782 return NULL_TREE;
10783
10784 /* (-A) / (-B) -> A / B */
10785 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10786 return fold_build2_loc (loc, RDIV_EXPR, type,
10787 TREE_OPERAND (arg0, 0),
10788 negate_expr (arg1));
10789 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10790 return fold_build2_loc (loc, RDIV_EXPR, type,
10791 negate_expr (arg0),
10792 TREE_OPERAND (arg1, 0));
10793 return NULL_TREE;
10794
10795 case TRUNC_DIV_EXPR:
10796 /* Fall through */
10797
10798 case FLOOR_DIV_EXPR:
10799 /* Simplify A / (B << N) where A and B are positive and B is
10800 a power of 2, to A >> (N + log2(B)). */
10801 strict_overflow_p = false;
10802 if (TREE_CODE (arg1) == LSHIFT_EXPR
10803 && (TYPE_UNSIGNED (type)
10804 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
10805 {
10806 tree sval = TREE_OPERAND (arg1, 0);
10807 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10808 {
10809 tree sh_cnt = TREE_OPERAND (arg1, 1);
10810 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
10811 wi::exact_log2 (wi::to_wide (sval)));
10812
10813 if (strict_overflow_p)
10814 fold_overflow_warning (("assuming signed overflow does not "
10815 "occur when simplifying A / (B << N)"),
10816 WARN_STRICT_OVERFLOW_MISC);
10817
10818 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
10819 sh_cnt, pow2);
10820 return fold_build2_loc (loc, RSHIFT_EXPR, type,
10821 fold_convert_loc (loc, type, arg0), sh_cnt);
10822 }
10823 }
10824
10825 /* Fall through */
10826
10827 case ROUND_DIV_EXPR:
10828 case CEIL_DIV_EXPR:
10829 case EXACT_DIV_EXPR:
10830 if (integer_zerop (arg1))
10831 return NULL_TREE;
10832
10833 /* Convert -A / -B to A / B when the type is signed and overflow is
10834 undefined. */
10835 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10836 && TREE_CODE (op0) == NEGATE_EXPR
10837 && negate_expr_p (op1))
10838 {
10839 if (INTEGRAL_TYPE_P (type))
10840 fold_overflow_warning (("assuming signed overflow does not occur "
10841 "when distributing negation across "
10842 "division"),
10843 WARN_STRICT_OVERFLOW_MISC);
10844 return fold_build2_loc (loc, code, type,
10845 fold_convert_loc (loc, type,
10846 TREE_OPERAND (arg0, 0)),
10847 negate_expr (op1));
10848 }
10849 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10850 && TREE_CODE (arg1) == NEGATE_EXPR
10851 && negate_expr_p (op0))
10852 {
10853 if (INTEGRAL_TYPE_P (type))
10854 fold_overflow_warning (("assuming signed overflow does not occur "
10855 "when distributing negation across "
10856 "division"),
10857 WARN_STRICT_OVERFLOW_MISC);
10858 return fold_build2_loc (loc, code, type,
10859 negate_expr (op0),
10860 fold_convert_loc (loc, type,
10861 TREE_OPERAND (arg1, 0)));
10862 }
10863
10864 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10865 operation, EXACT_DIV_EXPR.
10866
10867 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10868 At one time others generated faster code, it's not clear if they do
10869 after the last round to changes to the DIV code in expmed.c. */
10870 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10871 && multiple_of_p (type, arg0, arg1))
10872 return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
10873 fold_convert (type, arg0),
10874 fold_convert (type, arg1));
10875
10876 strict_overflow_p = false;
10877 if (TREE_CODE (arg1) == INTEGER_CST
10878 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10879 &strict_overflow_p)) != 0)
10880 {
10881 if (strict_overflow_p)
10882 fold_overflow_warning (("assuming signed overflow does not occur "
10883 "when simplifying division"),
10884 WARN_STRICT_OVERFLOW_MISC);
10885 return fold_convert_loc (loc, type, tem);
10886 }
10887
10888 return NULL_TREE;
10889
10890 case CEIL_MOD_EXPR:
10891 case FLOOR_MOD_EXPR:
10892 case ROUND_MOD_EXPR:
10893 case TRUNC_MOD_EXPR:
10894 strict_overflow_p = false;
10895 if (TREE_CODE (arg1) == INTEGER_CST
10896 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10897 &strict_overflow_p)) != 0)
10898 {
10899 if (strict_overflow_p)
10900 fold_overflow_warning (("assuming signed overflow does not occur "
10901 "when simplifying modulus"),
10902 WARN_STRICT_OVERFLOW_MISC);
10903 return fold_convert_loc (loc, type, tem);
10904 }
10905
10906 return NULL_TREE;
10907
10908 case LROTATE_EXPR:
10909 case RROTATE_EXPR:
10910 case RSHIFT_EXPR:
10911 case LSHIFT_EXPR:
10912 /* Since negative shift count is not well-defined,
10913 don't try to compute it in the compiler. */
10914 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10915 return NULL_TREE;
10916
10917 prec = element_precision (type);
10918
10919 /* If we have a rotate of a bit operation with the rotate count and
10920 the second operand of the bit operation both constant,
10921 permute the two operations. */
10922 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10923 && (TREE_CODE (arg0) == BIT_AND_EXPR
10924 || TREE_CODE (arg0) == BIT_IOR_EXPR
10925 || TREE_CODE (arg0) == BIT_XOR_EXPR)
10926 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10927 {
10928 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10929 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10930 return fold_build2_loc (loc, TREE_CODE (arg0), type,
10931 fold_build2_loc (loc, code, type,
10932 arg00, arg1),
10933 fold_build2_loc (loc, code, type,
10934 arg01, arg1));
10935 }
10936
10937 /* Two consecutive rotates adding up to the some integer
10938 multiple of the precision of the type can be ignored. */
10939 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10940 && TREE_CODE (arg0) == RROTATE_EXPR
10941 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10942 && wi::umod_trunc (wi::to_wide (arg1)
10943 + wi::to_wide (TREE_OPERAND (arg0, 1)),
10944 prec) == 0)
10945 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10946
10947 return NULL_TREE;
10948
10949 case MIN_EXPR:
10950 case MAX_EXPR:
10951 goto associate;
10952
10953 case TRUTH_ANDIF_EXPR:
10954 /* Note that the operands of this must be ints
10955 and their values must be 0 or 1.
10956 ("true" is a fixed value perhaps depending on the language.) */
10957 /* If first arg is constant zero, return it. */
10958 if (integer_zerop (arg0))
10959 return fold_convert_loc (loc, type, arg0);
10960 /* FALLTHRU */
10961 case TRUTH_AND_EXPR:
10962 /* If either arg is constant true, drop it. */
10963 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10964 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10965 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10966 /* Preserve sequence points. */
10967 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10968 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10969 /* If second arg is constant zero, result is zero, but first arg
10970 must be evaluated. */
10971 if (integer_zerop (arg1))
10972 return omit_one_operand_loc (loc, type, arg1, arg0);
10973 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10974 case will be handled here. */
10975 if (integer_zerop (arg0))
10976 return omit_one_operand_loc (loc, type, arg0, arg1);
10977
10978 /* !X && X is always false. */
10979 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10980 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10981 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
10982 /* X && !X is always false. */
10983 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10984 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10985 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10986
10987 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10988 means A >= Y && A != MAX, but in this case we know that
10989 A < X <= MAX. */
10990
10991 if (!TREE_SIDE_EFFECTS (arg0)
10992 && !TREE_SIDE_EFFECTS (arg1))
10993 {
10994 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
10995 if (tem && !operand_equal_p (tem, arg0, 0))
10996 return fold_build2_loc (loc, code, type, tem, arg1);
10997
10998 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
10999 if (tem && !operand_equal_p (tem, arg1, 0))
11000 return fold_build2_loc (loc, code, type, arg0, tem);
11001 }
11002
11003 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
11004 != NULL_TREE)
11005 return tem;
11006
11007 return NULL_TREE;
11008
11009 case TRUTH_ORIF_EXPR:
11010 /* Note that the operands of this must be ints
11011 and their values must be 0 or true.
11012 ("true" is a fixed value perhaps depending on the language.) */
11013 /* If first arg is constant true, return it. */
11014 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11015 return fold_convert_loc (loc, type, arg0);
11016 /* FALLTHRU */
11017 case TRUTH_OR_EXPR:
11018 /* If either arg is constant zero, drop it. */
11019 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
11020 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
11021 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
11022 /* Preserve sequence points. */
11023 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11024 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11025 /* If second arg is constant true, result is true, but we must
11026 evaluate first arg. */
11027 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
11028 return omit_one_operand_loc (loc, type, arg1, arg0);
11029 /* Likewise for first arg, but note this only occurs here for
11030 TRUTH_OR_EXPR. */
11031 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11032 return omit_one_operand_loc (loc, type, arg0, arg1);
11033
11034 /* !X || X is always true. */
11035 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11036 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11037 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
11038 /* X || !X is always true. */
11039 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11040 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11041 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
11042
11043 /* (X && !Y) || (!X && Y) is X ^ Y */
11044 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
11045 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
11046 {
11047 tree a0, a1, l0, l1, n0, n1;
11048
11049 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11050 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11051
11052 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11053 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11054
11055 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
11056 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
11057
11058 if ((operand_equal_p (n0, a0, 0)
11059 && operand_equal_p (n1, a1, 0))
11060 || (operand_equal_p (n0, a1, 0)
11061 && operand_equal_p (n1, a0, 0)))
11062 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
11063 }
11064
11065 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
11066 != NULL_TREE)
11067 return tem;
11068
11069 return NULL_TREE;
11070
11071 case TRUTH_XOR_EXPR:
11072 /* If the second arg is constant zero, drop it. */
11073 if (integer_zerop (arg1))
11074 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11075 /* If the second arg is constant true, this is a logical inversion. */
11076 if (integer_onep (arg1))
11077 {
11078 tem = invert_truthvalue_loc (loc, arg0);
11079 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
11080 }
11081 /* Identical arguments cancel to zero. */
11082 if (operand_equal_p (arg0, arg1, 0))
11083 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11084
11085 /* !X ^ X is always true. */
11086 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11087 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11088 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
11089
11090 /* X ^ !X is always true. */
11091 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11092 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11093 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
11094
11095 return NULL_TREE;
11096
11097 case EQ_EXPR:
11098 case NE_EXPR:
11099 STRIP_NOPS (arg0);
11100 STRIP_NOPS (arg1);
11101
11102 tem = fold_comparison (loc, code, type, op0, op1);
11103 if (tem != NULL_TREE)
11104 return tem;
11105
11106 /* bool_var != 1 becomes !bool_var. */
11107 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11108 && code == NE_EXPR)
11109 return fold_convert_loc (loc, type,
11110 fold_build1_loc (loc, TRUTH_NOT_EXPR,
11111 TREE_TYPE (arg0), arg0));
11112
11113 /* bool_var == 0 becomes !bool_var. */
11114 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11115 && code == EQ_EXPR)
11116 return fold_convert_loc (loc, type,
11117 fold_build1_loc (loc, TRUTH_NOT_EXPR,
11118 TREE_TYPE (arg0), arg0));
11119
11120 /* !exp != 0 becomes !exp */
11121 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
11122 && code == NE_EXPR)
11123 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11124
11125 /* If this is an EQ or NE comparison with zero and ARG0 is
11126 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
11127 two operations, but the latter can be done in one less insn
11128 on machines that have only two-operand insns or on which a
11129 constant cannot be the first operand. */
11130 if (TREE_CODE (arg0) == BIT_AND_EXPR
11131 && integer_zerop (arg1))
11132 {
11133 tree arg00 = TREE_OPERAND (arg0, 0);
11134 tree arg01 = TREE_OPERAND (arg0, 1);
11135 if (TREE_CODE (arg00) == LSHIFT_EXPR
11136 && integer_onep (TREE_OPERAND (arg00, 0)))
11137 {
11138 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
11139 arg01, TREE_OPERAND (arg00, 1));
11140 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
11141 build_int_cst (TREE_TYPE (arg0), 1));
11142 return fold_build2_loc (loc, code, type,
11143 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
11144 arg1);
11145 }
11146 else if (TREE_CODE (arg01) == LSHIFT_EXPR
11147 && integer_onep (TREE_OPERAND (arg01, 0)))
11148 {
11149 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
11150 arg00, TREE_OPERAND (arg01, 1));
11151 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
11152 build_int_cst (TREE_TYPE (arg0), 1));
11153 return fold_build2_loc (loc, code, type,
11154 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
11155 arg1);
11156 }
11157 }
11158
11159 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
11160 C1 is a valid shift constant, and C2 is a power of two, i.e.
11161 a single bit. */
11162 if (TREE_CODE (arg0) == BIT_AND_EXPR
11163 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
11164 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
11165 == INTEGER_CST
11166 && integer_pow2p (TREE_OPERAND (arg0, 1))
11167 && integer_zerop (arg1))
11168 {
11169 tree itype = TREE_TYPE (arg0);
11170 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
11171 prec = TYPE_PRECISION (itype);
11172
11173 /* Check for a valid shift count. */
11174 if (wi::ltu_p (wi::to_wide (arg001), prec))
11175 {
11176 tree arg01 = TREE_OPERAND (arg0, 1);
11177 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11178 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
11179 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
11180 can be rewritten as (X & (C2 << C1)) != 0. */
11181 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
11182 {
11183 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
11184 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
11185 return fold_build2_loc (loc, code, type, tem,
11186 fold_convert_loc (loc, itype, arg1));
11187 }
11188 /* Otherwise, for signed (arithmetic) shifts,
11189 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
11190 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
11191 else if (!TYPE_UNSIGNED (itype))
11192 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
11193 arg000, build_int_cst (itype, 0));
11194 /* Otherwise, of unsigned (logical) shifts,
11195 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
11196 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
11197 else
11198 return omit_one_operand_loc (loc, type,
11199 code == EQ_EXPR ? integer_one_node
11200 : integer_zero_node,
11201 arg000);
11202 }
11203 }
11204
11205 /* If this is a comparison of a field, we may be able to simplify it. */
11206 if ((TREE_CODE (arg0) == COMPONENT_REF
11207 || TREE_CODE (arg0) == BIT_FIELD_REF)
11208 /* Handle the constant case even without -O
11209 to make sure the warnings are given. */
11210 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
11211 {
11212 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
11213 if (t1)
11214 return t1;
11215 }
11216
11217 /* Optimize comparisons of strlen vs zero to a compare of the
11218 first character of the string vs zero. To wit,
11219 strlen(ptr) == 0 => *ptr == 0
11220 strlen(ptr) != 0 => *ptr != 0
11221 Other cases should reduce to one of these two (or a constant)
11222 due to the return value of strlen being unsigned. */
11223 if (TREE_CODE (arg0) == CALL_EXPR && integer_zerop (arg1))
11224 {
11225 tree fndecl = get_callee_fndecl (arg0);
11226
11227 if (fndecl
11228 && fndecl_built_in_p (fndecl, BUILT_IN_STRLEN)
11229 && call_expr_nargs (arg0) == 1
11230 && (TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0)))
11231 == POINTER_TYPE))
11232 {
11233 tree ptrtype
11234 = build_pointer_type (build_qualified_type (char_type_node,
11235 TYPE_QUAL_CONST));
11236 tree ptr = fold_convert_loc (loc, ptrtype,
11237 CALL_EXPR_ARG (arg0, 0));
11238 tree iref = build_fold_indirect_ref_loc (loc, ptr);
11239 return fold_build2_loc (loc, code, type, iref,
11240 build_int_cst (TREE_TYPE (iref), 0));
11241 }
11242 }
11243
11244 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
11245 of X. Similarly fold (X >> C) == 0 into X >= 0. */
11246 if (TREE_CODE (arg0) == RSHIFT_EXPR
11247 && integer_zerop (arg1)
11248 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11249 {
11250 tree arg00 = TREE_OPERAND (arg0, 0);
11251 tree arg01 = TREE_OPERAND (arg0, 1);
11252 tree itype = TREE_TYPE (arg00);
11253 if (wi::to_wide (arg01) == element_precision (itype) - 1)
11254 {
11255 if (TYPE_UNSIGNED (itype))
11256 {
11257 itype = signed_type_for (itype);
11258 arg00 = fold_convert_loc (loc, itype, arg00);
11259 }
11260 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
11261 type, arg00, build_zero_cst (itype));
11262 }
11263 }
11264
11265 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
11266 (X & C) == 0 when C is a single bit. */
11267 if (TREE_CODE (arg0) == BIT_AND_EXPR
11268 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
11269 && integer_zerop (arg1)
11270 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11271 {
11272 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
11273 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
11274 TREE_OPERAND (arg0, 1));
11275 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
11276 type, tem,
11277 fold_convert_loc (loc, TREE_TYPE (arg0),
11278 arg1));
11279 }
11280
11281 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
11282 constant C is a power of two, i.e. a single bit. */
11283 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11284 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11285 && integer_zerop (arg1)
11286 && integer_pow2p (TREE_OPERAND (arg0, 1))
11287 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11288 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11289 {
11290 tree arg00 = TREE_OPERAND (arg0, 0);
11291 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11292 arg00, build_int_cst (TREE_TYPE (arg00), 0));
11293 }
11294
11295 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
11296 when is C is a power of two, i.e. a single bit. */
11297 if (TREE_CODE (arg0) == BIT_AND_EXPR
11298 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
11299 && integer_zerop (arg1)
11300 && integer_pow2p (TREE_OPERAND (arg0, 1))
11301 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11302 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11303 {
11304 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11305 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
11306 arg000, TREE_OPERAND (arg0, 1));
11307 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11308 tem, build_int_cst (TREE_TYPE (tem), 0));
11309 }
11310
11311 if (integer_zerop (arg1)
11312 && tree_expr_nonzero_p (arg0))
11313 {
11314 tree res = constant_boolean_node (code==NE_EXPR, type);
11315 return omit_one_operand_loc (loc, type, res, arg0);
11316 }
11317
11318 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
11319 if (TREE_CODE (arg0) == BIT_AND_EXPR
11320 && TREE_CODE (arg1) == BIT_AND_EXPR)
11321 {
11322 tree arg00 = TREE_OPERAND (arg0, 0);
11323 tree arg01 = TREE_OPERAND (arg0, 1);
11324 tree arg10 = TREE_OPERAND (arg1, 0);
11325 tree arg11 = TREE_OPERAND (arg1, 1);
11326 tree itype = TREE_TYPE (arg0);
11327
11328 if (operand_equal_p (arg01, arg11, 0))
11329 {
11330 tem = fold_convert_loc (loc, itype, arg10);
11331 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
11332 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg01);
11333 return fold_build2_loc (loc, code, type, tem,
11334 build_zero_cst (itype));
11335 }
11336 if (operand_equal_p (arg01, arg10, 0))
11337 {
11338 tem = fold_convert_loc (loc, itype, arg11);
11339 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
11340 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg01);
11341 return fold_build2_loc (loc, code, type, tem,
11342 build_zero_cst (itype));
11343 }
11344 if (operand_equal_p (arg00, arg11, 0))
11345 {
11346 tem = fold_convert_loc (loc, itype, arg10);
11347 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01, tem);
11348 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg00);
11349 return fold_build2_loc (loc, code, type, tem,
11350 build_zero_cst (itype));
11351 }
11352 if (operand_equal_p (arg00, arg10, 0))
11353 {
11354 tem = fold_convert_loc (loc, itype, arg11);
11355 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01, tem);
11356 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg00);
11357 return fold_build2_loc (loc, code, type, tem,
11358 build_zero_cst (itype));
11359 }
11360 }
11361
11362 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11363 && TREE_CODE (arg1) == BIT_XOR_EXPR)
11364 {
11365 tree arg00 = TREE_OPERAND (arg0, 0);
11366 tree arg01 = TREE_OPERAND (arg0, 1);
11367 tree arg10 = TREE_OPERAND (arg1, 0);
11368 tree arg11 = TREE_OPERAND (arg1, 1);
11369 tree itype = TREE_TYPE (arg0);
11370
11371 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
11372 operand_equal_p guarantees no side-effects so we don't need
11373 to use omit_one_operand on Z. */
11374 if (operand_equal_p (arg01, arg11, 0))
11375 return fold_build2_loc (loc, code, type, arg00,
11376 fold_convert_loc (loc, TREE_TYPE (arg00),
11377 arg10));
11378 if (operand_equal_p (arg01, arg10, 0))
11379 return fold_build2_loc (loc, code, type, arg00,
11380 fold_convert_loc (loc, TREE_TYPE (arg00),
11381 arg11));
11382 if (operand_equal_p (arg00, arg11, 0))
11383 return fold_build2_loc (loc, code, type, arg01,
11384 fold_convert_loc (loc, TREE_TYPE (arg01),
11385 arg10));
11386 if (operand_equal_p (arg00, arg10, 0))
11387 return fold_build2_loc (loc, code, type, arg01,
11388 fold_convert_loc (loc, TREE_TYPE (arg01),
11389 arg11));
11390
11391 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
11392 if (TREE_CODE (arg01) == INTEGER_CST
11393 && TREE_CODE (arg11) == INTEGER_CST)
11394 {
11395 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
11396 fold_convert_loc (loc, itype, arg11));
11397 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
11398 return fold_build2_loc (loc, code, type, tem,
11399 fold_convert_loc (loc, itype, arg10));
11400 }
11401 }
11402
11403 /* Attempt to simplify equality/inequality comparisons of complex
11404 values. Only lower the comparison if the result is known or
11405 can be simplified to a single scalar comparison. */
11406 if ((TREE_CODE (arg0) == COMPLEX_EXPR
11407 || TREE_CODE (arg0) == COMPLEX_CST)
11408 && (TREE_CODE (arg1) == COMPLEX_EXPR
11409 || TREE_CODE (arg1) == COMPLEX_CST))
11410 {
11411 tree real0, imag0, real1, imag1;
11412 tree rcond, icond;
11413
11414 if (TREE_CODE (arg0) == COMPLEX_EXPR)
11415 {
11416 real0 = TREE_OPERAND (arg0, 0);
11417 imag0 = TREE_OPERAND (arg0, 1);
11418 }
11419 else
11420 {
11421 real0 = TREE_REALPART (arg0);
11422 imag0 = TREE_IMAGPART (arg0);
11423 }
11424
11425 if (TREE_CODE (arg1) == COMPLEX_EXPR)
11426 {
11427 real1 = TREE_OPERAND (arg1, 0);
11428 imag1 = TREE_OPERAND (arg1, 1);
11429 }
11430 else
11431 {
11432 real1 = TREE_REALPART (arg1);
11433 imag1 = TREE_IMAGPART (arg1);
11434 }
11435
11436 rcond = fold_binary_loc (loc, code, type, real0, real1);
11437 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
11438 {
11439 if (integer_zerop (rcond))
11440 {
11441 if (code == EQ_EXPR)
11442 return omit_two_operands_loc (loc, type, boolean_false_node,
11443 imag0, imag1);
11444 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
11445 }
11446 else
11447 {
11448 if (code == NE_EXPR)
11449 return omit_two_operands_loc (loc, type, boolean_true_node,
11450 imag0, imag1);
11451 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
11452 }
11453 }
11454
11455 icond = fold_binary_loc (loc, code, type, imag0, imag1);
11456 if (icond && TREE_CODE (icond) == INTEGER_CST)
11457 {
11458 if (integer_zerop (icond))
11459 {
11460 if (code == EQ_EXPR)
11461 return omit_two_operands_loc (loc, type, boolean_false_node,
11462 real0, real1);
11463 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
11464 }
11465 else
11466 {
11467 if (code == NE_EXPR)
11468 return omit_two_operands_loc (loc, type, boolean_true_node,
11469 real0, real1);
11470 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
11471 }
11472 }
11473 }
11474
11475 return NULL_TREE;
11476
11477 case LT_EXPR:
11478 case GT_EXPR:
11479 case LE_EXPR:
11480 case GE_EXPR:
11481 tem = fold_comparison (loc, code, type, op0, op1);
11482 if (tem != NULL_TREE)
11483 return tem;
11484
11485 /* Transform comparisons of the form X +- C CMP X. */
11486 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11487 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11488 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
11489 && !HONOR_SNANS (arg0))
11490 {
11491 tree arg01 = TREE_OPERAND (arg0, 1);
11492 enum tree_code code0 = TREE_CODE (arg0);
11493 int is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
11494
11495 /* (X - c) > X becomes false. */
11496 if (code == GT_EXPR
11497 && ((code0 == MINUS_EXPR && is_positive >= 0)
11498 || (code0 == PLUS_EXPR && is_positive <= 0)))
11499 return constant_boolean_node (0, type);
11500
11501 /* Likewise (X + c) < X becomes false. */
11502 if (code == LT_EXPR
11503 && ((code0 == PLUS_EXPR && is_positive >= 0)
11504 || (code0 == MINUS_EXPR && is_positive <= 0)))
11505 return constant_boolean_node (0, type);
11506
11507 /* Convert (X - c) <= X to true. */
11508 if (!HONOR_NANS (arg1)
11509 && code == LE_EXPR
11510 && ((code0 == MINUS_EXPR && is_positive >= 0)
11511 || (code0 == PLUS_EXPR && is_positive <= 0)))
11512 return constant_boolean_node (1, type);
11513
11514 /* Convert (X + c) >= X to true. */
11515 if (!HONOR_NANS (arg1)
11516 && code == GE_EXPR
11517 && ((code0 == PLUS_EXPR && is_positive >= 0)
11518 || (code0 == MINUS_EXPR && is_positive <= 0)))
11519 return constant_boolean_node (1, type);
11520 }
11521
11522 /* If we are comparing an ABS_EXPR with a constant, we can
11523 convert all the cases into explicit comparisons, but they may
11524 well not be faster than doing the ABS and one comparison.
11525 But ABS (X) <= C is a range comparison, which becomes a subtraction
11526 and a comparison, and is probably faster. */
11527 if (code == LE_EXPR
11528 && TREE_CODE (arg1) == INTEGER_CST
11529 && TREE_CODE (arg0) == ABS_EXPR
11530 && ! TREE_SIDE_EFFECTS (arg0)
11531 && (tem = negate_expr (arg1)) != 0
11532 && TREE_CODE (tem) == INTEGER_CST
11533 && !TREE_OVERFLOW (tem))
11534 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
11535 build2 (GE_EXPR, type,
11536 TREE_OPERAND (arg0, 0), tem),
11537 build2 (LE_EXPR, type,
11538 TREE_OPERAND (arg0, 0), arg1));
11539
11540 /* Convert ABS_EXPR<x> >= 0 to true. */
11541 strict_overflow_p = false;
11542 if (code == GE_EXPR
11543 && (integer_zerop (arg1)
11544 || (! HONOR_NANS (arg0)
11545 && real_zerop (arg1)))
11546 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11547 {
11548 if (strict_overflow_p)
11549 fold_overflow_warning (("assuming signed overflow does not occur "
11550 "when simplifying comparison of "
11551 "absolute value and zero"),
11552 WARN_STRICT_OVERFLOW_CONDITIONAL);
11553 return omit_one_operand_loc (loc, type,
11554 constant_boolean_node (true, type),
11555 arg0);
11556 }
11557
11558 /* Convert ABS_EXPR<x> < 0 to false. */
11559 strict_overflow_p = false;
11560 if (code == LT_EXPR
11561 && (integer_zerop (arg1) || real_zerop (arg1))
11562 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11563 {
11564 if (strict_overflow_p)
11565 fold_overflow_warning (("assuming signed overflow does not occur "
11566 "when simplifying comparison of "
11567 "absolute value and zero"),
11568 WARN_STRICT_OVERFLOW_CONDITIONAL);
11569 return omit_one_operand_loc (loc, type,
11570 constant_boolean_node (false, type),
11571 arg0);
11572 }
11573
11574 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11575 and similarly for >= into !=. */
11576 if ((code == LT_EXPR || code == GE_EXPR)
11577 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11578 && TREE_CODE (arg1) == LSHIFT_EXPR
11579 && integer_onep (TREE_OPERAND (arg1, 0)))
11580 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11581 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11582 TREE_OPERAND (arg1, 1)),
11583 build_zero_cst (TREE_TYPE (arg0)));
11584
11585 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
11586 otherwise Y might be >= # of bits in X's type and thus e.g.
11587 (unsigned char) (1 << Y) for Y 15 might be 0.
11588 If the cast is widening, then 1 << Y should have unsigned type,
11589 otherwise if Y is number of bits in the signed shift type minus 1,
11590 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
11591 31 might be 0xffffffff80000000. */
11592 if ((code == LT_EXPR || code == GE_EXPR)
11593 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11594 && CONVERT_EXPR_P (arg1)
11595 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
11596 && (element_precision (TREE_TYPE (arg1))
11597 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
11598 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
11599 || (element_precision (TREE_TYPE (arg1))
11600 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
11601 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11602 {
11603 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11604 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
11605 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11606 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
11607 build_zero_cst (TREE_TYPE (arg0)));
11608 }
11609
11610 return NULL_TREE;
11611
11612 case UNORDERED_EXPR:
11613 case ORDERED_EXPR:
11614 case UNLT_EXPR:
11615 case UNLE_EXPR:
11616 case UNGT_EXPR:
11617 case UNGE_EXPR:
11618 case UNEQ_EXPR:
11619 case LTGT_EXPR:
11620 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
11621 {
11622 tree targ0 = strip_float_extensions (arg0);
11623 tree targ1 = strip_float_extensions (arg1);
11624 tree newtype = TREE_TYPE (targ0);
11625
11626 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
11627 newtype = TREE_TYPE (targ1);
11628
11629 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
11630 return fold_build2_loc (loc, code, type,
11631 fold_convert_loc (loc, newtype, targ0),
11632 fold_convert_loc (loc, newtype, targ1));
11633 }
11634
11635 return NULL_TREE;
11636
11637 case COMPOUND_EXPR:
11638 /* When pedantic, a compound expression can be neither an lvalue
11639 nor an integer constant expression. */
11640 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11641 return NULL_TREE;
11642 /* Don't let (0, 0) be null pointer constant. */
11643 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11644 : fold_convert_loc (loc, type, arg1);
11645 return pedantic_non_lvalue_loc (loc, tem);
11646
11647 case ASSERT_EXPR:
11648 /* An ASSERT_EXPR should never be passed to fold_binary. */
11649 gcc_unreachable ();
11650
11651 default:
11652 return NULL_TREE;
11653 } /* switch (code) */
11654 }
11655
11656 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11657 ((A & N) + B) & M -> (A + B) & M
11658 Similarly if (N & M) == 0,
11659 ((A | N) + B) & M -> (A + B) & M
11660 and for - instead of + (or unary - instead of +)
11661 and/or ^ instead of |.
11662 If B is constant and (B & M) == 0, fold into A & M.
11663
11664 This function is a helper for match.pd patterns. Return non-NULL
11665 type in which the simplified operation should be performed only
11666 if any optimization is possible.
11667
11668 ARG1 is M above, ARG00 is left operand of +/-, if CODE00 is BIT_*_EXPR,
11669 then ARG00{0,1} are operands of that bitop, otherwise CODE00 is ERROR_MARK.
11670 Similarly for ARG01, CODE01 and ARG01{0,1}, just for the right operand of
11671 +/-. */
11672 tree
11673 fold_bit_and_mask (tree type, tree arg1, enum tree_code code,
11674 tree arg00, enum tree_code code00, tree arg000, tree arg001,
11675 tree arg01, enum tree_code code01, tree arg010, tree arg011,
11676 tree *pmop)
11677 {
11678 gcc_assert (TREE_CODE (arg1) == INTEGER_CST);
11679 gcc_assert (code == PLUS_EXPR || code == MINUS_EXPR || code == NEGATE_EXPR);
11680 wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
11681 if (~cst1 == 0
11682 || (cst1 & (cst1 + 1)) != 0
11683 || !INTEGRAL_TYPE_P (type)
11684 || (!TYPE_OVERFLOW_WRAPS (type)
11685 && TREE_CODE (type) != INTEGER_TYPE)
11686 || (wi::max_value (type) & cst1) != cst1)
11687 return NULL_TREE;
11688
11689 enum tree_code codes[2] = { code00, code01 };
11690 tree arg0xx[4] = { arg000, arg001, arg010, arg011 };
11691 int which = 0;
11692 wide_int cst0;
11693
11694 /* Now we know that arg0 is (C + D) or (C - D) or -C and
11695 arg1 (M) is == (1LL << cst) - 1.
11696 Store C into PMOP[0] and D into PMOP[1]. */
11697 pmop[0] = arg00;
11698 pmop[1] = arg01;
11699 which = code != NEGATE_EXPR;
11700
11701 for (; which >= 0; which--)
11702 switch (codes[which])
11703 {
11704 case BIT_AND_EXPR:
11705 case BIT_IOR_EXPR:
11706 case BIT_XOR_EXPR:
11707 gcc_assert (TREE_CODE (arg0xx[2 * which + 1]) == INTEGER_CST);
11708 cst0 = wi::to_wide (arg0xx[2 * which + 1]) & cst1;
11709 if (codes[which] == BIT_AND_EXPR)
11710 {
11711 if (cst0 != cst1)
11712 break;
11713 }
11714 else if (cst0 != 0)
11715 break;
11716 /* If C or D is of the form (A & N) where
11717 (N & M) == M, or of the form (A | N) or
11718 (A ^ N) where (N & M) == 0, replace it with A. */
11719 pmop[which] = arg0xx[2 * which];
11720 break;
11721 case ERROR_MARK:
11722 if (TREE_CODE (pmop[which]) != INTEGER_CST)
11723 break;
11724 /* If C or D is a N where (N & M) == 0, it can be
11725 omitted (replaced with 0). */
11726 if ((code == PLUS_EXPR
11727 || (code == MINUS_EXPR && which == 0))
11728 && (cst1 & wi::to_wide (pmop[which])) == 0)
11729 pmop[which] = build_int_cst (type, 0);
11730 /* Similarly, with C - N where (-N & M) == 0. */
11731 if (code == MINUS_EXPR
11732 && which == 1
11733 && (cst1 & -wi::to_wide (pmop[which])) == 0)
11734 pmop[which] = build_int_cst (type, 0);
11735 break;
11736 default:
11737 gcc_unreachable ();
11738 }
11739
11740 /* Only build anything new if we optimized one or both arguments above. */
11741 if (pmop[0] == arg00 && pmop[1] == arg01)
11742 return NULL_TREE;
11743
11744 if (TYPE_OVERFLOW_WRAPS (type))
11745 return type;
11746 else
11747 return unsigned_type_for (type);
11748 }
11749
11750 /* Used by contains_label_[p1]. */
11751
11752 struct contains_label_data
11753 {
11754 hash_set<tree> *pset;
11755 bool inside_switch_p;
11756 };
11757
11758 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
11759 a LABEL_EXPR or CASE_LABEL_EXPR not inside of another SWITCH_EXPR; otherwise
11760 return NULL_TREE. Do not check the subtrees of GOTO_EXPR. */
11761
11762 static tree
11763 contains_label_1 (tree *tp, int *walk_subtrees, void *data)
11764 {
11765 contains_label_data *d = (contains_label_data *) data;
11766 switch (TREE_CODE (*tp))
11767 {
11768 case LABEL_EXPR:
11769 return *tp;
11770
11771 case CASE_LABEL_EXPR:
11772 if (!d->inside_switch_p)
11773 return *tp;
11774 return NULL_TREE;
11775
11776 case SWITCH_EXPR:
11777 if (!d->inside_switch_p)
11778 {
11779 if (walk_tree (&SWITCH_COND (*tp), contains_label_1, data, d->pset))
11780 return *tp;
11781 d->inside_switch_p = true;
11782 if (walk_tree (&SWITCH_BODY (*tp), contains_label_1, data, d->pset))
11783 return *tp;
11784 d->inside_switch_p = false;
11785 *walk_subtrees = 0;
11786 }
11787 return NULL_TREE;
11788
11789 case GOTO_EXPR:
11790 *walk_subtrees = 0;
11791 return NULL_TREE;
11792
11793 default:
11794 return NULL_TREE;
11795 }
11796 }
11797
11798 /* Return whether the sub-tree ST contains a label which is accessible from
11799 outside the sub-tree. */
11800
11801 static bool
11802 contains_label_p (tree st)
11803 {
11804 hash_set<tree> pset;
11805 contains_label_data data = { &pset, false };
11806 return walk_tree (&st, contains_label_1, &data, &pset) != NULL_TREE;
11807 }
11808
11809 /* Fold a ternary expression of code CODE and type TYPE with operands
11810 OP0, OP1, and OP2. Return the folded expression if folding is
11811 successful. Otherwise, return NULL_TREE. */
11812
11813 tree
11814 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
11815 tree op0, tree op1, tree op2)
11816 {
11817 tree tem;
11818 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
11819 enum tree_code_class kind = TREE_CODE_CLASS (code);
11820
11821 gcc_assert (IS_EXPR_CODE_CLASS (kind)
11822 && TREE_CODE_LENGTH (code) == 3);
11823
11824 /* If this is a commutative operation, and OP0 is a constant, move it
11825 to OP1 to reduce the number of tests below. */
11826 if (commutative_ternary_tree_code (code)
11827 && tree_swap_operands_p (op0, op1))
11828 return fold_build3_loc (loc, code, type, op1, op0, op2);
11829
11830 tem = generic_simplify (loc, code, type, op0, op1, op2);
11831 if (tem)
11832 return tem;
11833
11834 /* Strip any conversions that don't change the mode. This is safe
11835 for every expression, except for a comparison expression because
11836 its signedness is derived from its operands. So, in the latter
11837 case, only strip conversions that don't change the signedness.
11838
11839 Note that this is done as an internal manipulation within the
11840 constant folder, in order to find the simplest representation of
11841 the arguments so that their form can be studied. In any cases,
11842 the appropriate type conversions should be put back in the tree
11843 that will get out of the constant folder. */
11844 if (op0)
11845 {
11846 arg0 = op0;
11847 STRIP_NOPS (arg0);
11848 }
11849
11850 if (op1)
11851 {
11852 arg1 = op1;
11853 STRIP_NOPS (arg1);
11854 }
11855
11856 if (op2)
11857 {
11858 arg2 = op2;
11859 STRIP_NOPS (arg2);
11860 }
11861
11862 switch (code)
11863 {
11864 case COMPONENT_REF:
11865 if (TREE_CODE (arg0) == CONSTRUCTOR
11866 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11867 {
11868 unsigned HOST_WIDE_INT idx;
11869 tree field, value;
11870 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11871 if (field == arg1)
11872 return value;
11873 }
11874 return NULL_TREE;
11875
11876 case COND_EXPR:
11877 case VEC_COND_EXPR:
11878 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11879 so all simple results must be passed through pedantic_non_lvalue. */
11880 if (TREE_CODE (arg0) == INTEGER_CST)
11881 {
11882 tree unused_op = integer_zerop (arg0) ? op1 : op2;
11883 tem = integer_zerop (arg0) ? op2 : op1;
11884 /* Only optimize constant conditions when the selected branch
11885 has the same type as the COND_EXPR. This avoids optimizing
11886 away "c ? x : throw", where the throw has a void type.
11887 Avoid throwing away that operand which contains label. */
11888 if ((!TREE_SIDE_EFFECTS (unused_op)
11889 || !contains_label_p (unused_op))
11890 && (! VOID_TYPE_P (TREE_TYPE (tem))
11891 || VOID_TYPE_P (type)))
11892 return pedantic_non_lvalue_loc (loc, tem);
11893 return NULL_TREE;
11894 }
11895 else if (TREE_CODE (arg0) == VECTOR_CST)
11896 {
11897 unsigned HOST_WIDE_INT nelts;
11898 if ((TREE_CODE (arg1) == VECTOR_CST
11899 || TREE_CODE (arg1) == CONSTRUCTOR)
11900 && (TREE_CODE (arg2) == VECTOR_CST
11901 || TREE_CODE (arg2) == CONSTRUCTOR)
11902 && TYPE_VECTOR_SUBPARTS (type).is_constant (&nelts))
11903 {
11904 vec_perm_builder sel (nelts, nelts, 1);
11905 for (unsigned int i = 0; i < nelts; i++)
11906 {
11907 tree val = VECTOR_CST_ELT (arg0, i);
11908 if (integer_all_onesp (val))
11909 sel.quick_push (i);
11910 else if (integer_zerop (val))
11911 sel.quick_push (nelts + i);
11912 else /* Currently unreachable. */
11913 return NULL_TREE;
11914 }
11915 vec_perm_indices indices (sel, 2, nelts);
11916 tree t = fold_vec_perm (type, arg1, arg2, indices);
11917 if (t != NULL_TREE)
11918 return t;
11919 }
11920 }
11921
11922 /* If we have A op B ? A : C, we may be able to convert this to a
11923 simpler expression, depending on the operation and the values
11924 of B and C. Signed zeros prevent all of these transformations,
11925 for reasons given above each one.
11926
11927 Also try swapping the arguments and inverting the conditional. */
11928 if (COMPARISON_CLASS_P (arg0)
11929 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op1)
11930 && !HONOR_SIGNED_ZEROS (element_mode (op1)))
11931 {
11932 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
11933 if (tem)
11934 return tem;
11935 }
11936
11937 if (COMPARISON_CLASS_P (arg0)
11938 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op2)
11939 && !HONOR_SIGNED_ZEROS (element_mode (op2)))
11940 {
11941 location_t loc0 = expr_location_or (arg0, loc);
11942 tem = fold_invert_truthvalue (loc0, arg0);
11943 if (tem && COMPARISON_CLASS_P (tem))
11944 {
11945 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
11946 if (tem)
11947 return tem;
11948 }
11949 }
11950
11951 /* If the second operand is simpler than the third, swap them
11952 since that produces better jump optimization results. */
11953 if (truth_value_p (TREE_CODE (arg0))
11954 && tree_swap_operands_p (op1, op2))
11955 {
11956 location_t loc0 = expr_location_or (arg0, loc);
11957 /* See if this can be inverted. If it can't, possibly because
11958 it was a floating-point inequality comparison, don't do
11959 anything. */
11960 tem = fold_invert_truthvalue (loc0, arg0);
11961 if (tem)
11962 return fold_build3_loc (loc, code, type, tem, op2, op1);
11963 }
11964
11965 /* Convert A ? 1 : 0 to simply A. */
11966 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
11967 : (integer_onep (op1)
11968 && !VECTOR_TYPE_P (type)))
11969 && integer_zerop (op2)
11970 /* If we try to convert OP0 to our type, the
11971 call to fold will try to move the conversion inside
11972 a COND, which will recurse. In that case, the COND_EXPR
11973 is probably the best choice, so leave it alone. */
11974 && type == TREE_TYPE (arg0))
11975 return pedantic_non_lvalue_loc (loc, arg0);
11976
11977 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11978 over COND_EXPR in cases such as floating point comparisons. */
11979 if (integer_zerop (op1)
11980 && code == COND_EXPR
11981 && integer_onep (op2)
11982 && !VECTOR_TYPE_P (type)
11983 && truth_value_p (TREE_CODE (arg0)))
11984 return pedantic_non_lvalue_loc (loc,
11985 fold_convert_loc (loc, type,
11986 invert_truthvalue_loc (loc,
11987 arg0)));
11988
11989 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11990 if (TREE_CODE (arg0) == LT_EXPR
11991 && integer_zerop (TREE_OPERAND (arg0, 1))
11992 && integer_zerop (op2)
11993 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11994 {
11995 /* sign_bit_p looks through both zero and sign extensions,
11996 but for this optimization only sign extensions are
11997 usable. */
11998 tree tem2 = TREE_OPERAND (arg0, 0);
11999 while (tem != tem2)
12000 {
12001 if (TREE_CODE (tem2) != NOP_EXPR
12002 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
12003 {
12004 tem = NULL_TREE;
12005 break;
12006 }
12007 tem2 = TREE_OPERAND (tem2, 0);
12008 }
12009 /* sign_bit_p only checks ARG1 bits within A's precision.
12010 If <sign bit of A> has wider type than A, bits outside
12011 of A's precision in <sign bit of A> need to be checked.
12012 If they are all 0, this optimization needs to be done
12013 in unsigned A's type, if they are all 1 in signed A's type,
12014 otherwise this can't be done. */
12015 if (tem
12016 && TYPE_PRECISION (TREE_TYPE (tem))
12017 < TYPE_PRECISION (TREE_TYPE (arg1))
12018 && TYPE_PRECISION (TREE_TYPE (tem))
12019 < TYPE_PRECISION (type))
12020 {
12021 int inner_width, outer_width;
12022 tree tem_type;
12023
12024 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
12025 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
12026 if (outer_width > TYPE_PRECISION (type))
12027 outer_width = TYPE_PRECISION (type);
12028
12029 wide_int mask = wi::shifted_mask
12030 (inner_width, outer_width - inner_width, false,
12031 TYPE_PRECISION (TREE_TYPE (arg1)));
12032
12033 wide_int common = mask & wi::to_wide (arg1);
12034 if (common == mask)
12035 {
12036 tem_type = signed_type_for (TREE_TYPE (tem));
12037 tem = fold_convert_loc (loc, tem_type, tem);
12038 }
12039 else if (common == 0)
12040 {
12041 tem_type = unsigned_type_for (TREE_TYPE (tem));
12042 tem = fold_convert_loc (loc, tem_type, tem);
12043 }
12044 else
12045 tem = NULL;
12046 }
12047
12048 if (tem)
12049 return
12050 fold_convert_loc (loc, type,
12051 fold_build2_loc (loc, BIT_AND_EXPR,
12052 TREE_TYPE (tem), tem,
12053 fold_convert_loc (loc,
12054 TREE_TYPE (tem),
12055 arg1)));
12056 }
12057
12058 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
12059 already handled above. */
12060 if (TREE_CODE (arg0) == BIT_AND_EXPR
12061 && integer_onep (TREE_OPERAND (arg0, 1))
12062 && integer_zerop (op2)
12063 && integer_pow2p (arg1))
12064 {
12065 tree tem = TREE_OPERAND (arg0, 0);
12066 STRIP_NOPS (tem);
12067 if (TREE_CODE (tem) == RSHIFT_EXPR
12068 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
12069 && (unsigned HOST_WIDE_INT) tree_log2 (arg1)
12070 == tree_to_uhwi (TREE_OPERAND (tem, 1)))
12071 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12072 fold_convert_loc (loc, type,
12073 TREE_OPERAND (tem, 0)),
12074 op1);
12075 }
12076
12077 /* A & N ? N : 0 is simply A & N if N is a power of two. This
12078 is probably obsolete because the first operand should be a
12079 truth value (that's why we have the two cases above), but let's
12080 leave it in until we can confirm this for all front-ends. */
12081 if (integer_zerop (op2)
12082 && TREE_CODE (arg0) == NE_EXPR
12083 && integer_zerop (TREE_OPERAND (arg0, 1))
12084 && integer_pow2p (arg1)
12085 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12086 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12087 arg1, OEP_ONLY_CONST)
12088 /* operand_equal_p compares just value, not precision, so e.g.
12089 arg1 could be 8-bit -128 and be power of two, but BIT_AND_EXPR
12090 second operand 32-bit -128, which is not a power of two (or vice
12091 versa. */
12092 && integer_pow2p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)))
12093 return pedantic_non_lvalue_loc (loc,
12094 fold_convert_loc (loc, type,
12095 TREE_OPERAND (arg0,
12096 0)));
12097
12098 /* Disable the transformations below for vectors, since
12099 fold_binary_op_with_conditional_arg may undo them immediately,
12100 yielding an infinite loop. */
12101 if (code == VEC_COND_EXPR)
12102 return NULL_TREE;
12103
12104 /* Convert A ? B : 0 into A && B if A and B are truth values. */
12105 if (integer_zerop (op2)
12106 && truth_value_p (TREE_CODE (arg0))
12107 && truth_value_p (TREE_CODE (arg1))
12108 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
12109 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
12110 : TRUTH_ANDIF_EXPR,
12111 type, fold_convert_loc (loc, type, arg0), op1);
12112
12113 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
12114 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
12115 && truth_value_p (TREE_CODE (arg0))
12116 && truth_value_p (TREE_CODE (arg1))
12117 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
12118 {
12119 location_t loc0 = expr_location_or (arg0, loc);
12120 /* Only perform transformation if ARG0 is easily inverted. */
12121 tem = fold_invert_truthvalue (loc0, arg0);
12122 if (tem)
12123 return fold_build2_loc (loc, code == VEC_COND_EXPR
12124 ? BIT_IOR_EXPR
12125 : TRUTH_ORIF_EXPR,
12126 type, fold_convert_loc (loc, type, tem),
12127 op1);
12128 }
12129
12130 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
12131 if (integer_zerop (arg1)
12132 && truth_value_p (TREE_CODE (arg0))
12133 && truth_value_p (TREE_CODE (op2))
12134 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
12135 {
12136 location_t loc0 = expr_location_or (arg0, loc);
12137 /* Only perform transformation if ARG0 is easily inverted. */
12138 tem = fold_invert_truthvalue (loc0, arg0);
12139 if (tem)
12140 return fold_build2_loc (loc, code == VEC_COND_EXPR
12141 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
12142 type, fold_convert_loc (loc, type, tem),
12143 op2);
12144 }
12145
12146 /* Convert A ? 1 : B into A || B if A and B are truth values. */
12147 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
12148 && truth_value_p (TREE_CODE (arg0))
12149 && truth_value_p (TREE_CODE (op2))
12150 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
12151 return fold_build2_loc (loc, code == VEC_COND_EXPR
12152 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
12153 type, fold_convert_loc (loc, type, arg0), op2);
12154
12155 return NULL_TREE;
12156
12157 case CALL_EXPR:
12158 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
12159 of fold_ternary on them. */
12160 gcc_unreachable ();
12161
12162 case BIT_FIELD_REF:
12163 if (TREE_CODE (arg0) == VECTOR_CST
12164 && (type == TREE_TYPE (TREE_TYPE (arg0))
12165 || (VECTOR_TYPE_P (type)
12166 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0))))
12167 && tree_fits_uhwi_p (op1)
12168 && tree_fits_uhwi_p (op2))
12169 {
12170 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
12171 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
12172 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
12173 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
12174
12175 if (n != 0
12176 && (idx % width) == 0
12177 && (n % width) == 0
12178 && known_le ((idx + n) / width,
12179 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))))
12180 {
12181 idx = idx / width;
12182 n = n / width;
12183
12184 if (TREE_CODE (arg0) == VECTOR_CST)
12185 {
12186 if (n == 1)
12187 {
12188 tem = VECTOR_CST_ELT (arg0, idx);
12189 if (VECTOR_TYPE_P (type))
12190 tem = fold_build1 (VIEW_CONVERT_EXPR, type, tem);
12191 return tem;
12192 }
12193
12194 tree_vector_builder vals (type, n, 1);
12195 for (unsigned i = 0; i < n; ++i)
12196 vals.quick_push (VECTOR_CST_ELT (arg0, idx + i));
12197 return vals.build ();
12198 }
12199 }
12200 }
12201
12202 /* On constants we can use native encode/interpret to constant
12203 fold (nearly) all BIT_FIELD_REFs. */
12204 if (CONSTANT_CLASS_P (arg0)
12205 && can_native_interpret_type_p (type)
12206 && BITS_PER_UNIT == 8
12207 && tree_fits_uhwi_p (op1)
12208 && tree_fits_uhwi_p (op2))
12209 {
12210 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
12211 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
12212 /* Limit us to a reasonable amount of work. To relax the
12213 other limitations we need bit-shifting of the buffer
12214 and rounding up the size. */
12215 if (bitpos % BITS_PER_UNIT == 0
12216 && bitsize % BITS_PER_UNIT == 0
12217 && bitsize <= MAX_BITSIZE_MODE_ANY_MODE)
12218 {
12219 unsigned char b[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
12220 unsigned HOST_WIDE_INT len
12221 = native_encode_expr (arg0, b, bitsize / BITS_PER_UNIT,
12222 bitpos / BITS_PER_UNIT);
12223 if (len > 0
12224 && len * BITS_PER_UNIT >= bitsize)
12225 {
12226 tree v = native_interpret_expr (type, b,
12227 bitsize / BITS_PER_UNIT);
12228 if (v)
12229 return v;
12230 }
12231 }
12232 }
12233
12234 return NULL_TREE;
12235
12236 case VEC_PERM_EXPR:
12237 /* Perform constant folding of BIT_INSERT_EXPR. */
12238 if (TREE_CODE (arg2) == VECTOR_CST
12239 && TREE_CODE (op0) == VECTOR_CST
12240 && TREE_CODE (op1) == VECTOR_CST)
12241 {
12242 /* Build a vector of integers from the tree mask. */
12243 vec_perm_builder builder;
12244 if (!tree_to_vec_perm_builder (&builder, arg2))
12245 return NULL_TREE;
12246
12247 /* Create a vec_perm_indices for the integer vector. */
12248 poly_uint64 nelts = TYPE_VECTOR_SUBPARTS (type);
12249 bool single_arg = (op0 == op1);
12250 vec_perm_indices sel (builder, single_arg ? 1 : 2, nelts);
12251 return fold_vec_perm (type, op0, op1, sel);
12252 }
12253 return NULL_TREE;
12254
12255 case BIT_INSERT_EXPR:
12256 /* Perform (partial) constant folding of BIT_INSERT_EXPR. */
12257 if (TREE_CODE (arg0) == INTEGER_CST
12258 && TREE_CODE (arg1) == INTEGER_CST)
12259 {
12260 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
12261 unsigned bitsize = TYPE_PRECISION (TREE_TYPE (arg1));
12262 wide_int tem = (wi::to_wide (arg0)
12263 & wi::shifted_mask (bitpos, bitsize, true,
12264 TYPE_PRECISION (type)));
12265 wide_int tem2
12266 = wi::lshift (wi::zext (wi::to_wide (arg1, TYPE_PRECISION (type)),
12267 bitsize), bitpos);
12268 return wide_int_to_tree (type, wi::bit_or (tem, tem2));
12269 }
12270 else if (TREE_CODE (arg0) == VECTOR_CST
12271 && CONSTANT_CLASS_P (arg1)
12272 && types_compatible_p (TREE_TYPE (TREE_TYPE (arg0)),
12273 TREE_TYPE (arg1)))
12274 {
12275 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
12276 unsigned HOST_WIDE_INT elsize
12277 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (arg1)));
12278 if (bitpos % elsize == 0)
12279 {
12280 unsigned k = bitpos / elsize;
12281 unsigned HOST_WIDE_INT nelts;
12282 if (operand_equal_p (VECTOR_CST_ELT (arg0, k), arg1, 0))
12283 return arg0;
12284 else if (VECTOR_CST_NELTS (arg0).is_constant (&nelts))
12285 {
12286 tree_vector_builder elts (type, nelts, 1);
12287 elts.quick_grow (nelts);
12288 for (unsigned HOST_WIDE_INT i = 0; i < nelts; ++i)
12289 elts[i] = (i == k ? arg1 : VECTOR_CST_ELT (arg0, i));
12290 return elts.build ();
12291 }
12292 }
12293 }
12294 return NULL_TREE;
12295
12296 default:
12297 return NULL_TREE;
12298 } /* switch (code) */
12299 }
12300
12301 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
12302 of an array (or vector). *CTOR_IDX if non-NULL is updated with the
12303 constructor element index of the value returned. If the element is
12304 not found NULL_TREE is returned and *CTOR_IDX is updated to
12305 the index of the element after the ACCESS_INDEX position (which
12306 may be outside of the CTOR array). */
12307
12308 tree
12309 get_array_ctor_element_at_index (tree ctor, offset_int access_index,
12310 unsigned *ctor_idx)
12311 {
12312 tree index_type = NULL_TREE;
12313 signop index_sgn = UNSIGNED;
12314 offset_int low_bound = 0;
12315
12316 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
12317 {
12318 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
12319 if (domain_type && TYPE_MIN_VALUE (domain_type))
12320 {
12321 /* Static constructors for variably sized objects makes no sense. */
12322 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
12323 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
12324 /* ??? When it is obvious that the range is signed, treat it so. */
12325 if (TYPE_UNSIGNED (index_type)
12326 && TYPE_MAX_VALUE (domain_type)
12327 && tree_int_cst_lt (TYPE_MAX_VALUE (domain_type),
12328 TYPE_MIN_VALUE (domain_type)))
12329 {
12330 index_sgn = SIGNED;
12331 low_bound
12332 = offset_int::from (wi::to_wide (TYPE_MIN_VALUE (domain_type)),
12333 SIGNED);
12334 }
12335 else
12336 {
12337 index_sgn = TYPE_SIGN (index_type);
12338 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
12339 }
12340 }
12341 }
12342
12343 if (index_type)
12344 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
12345 index_sgn);
12346
12347 offset_int index = low_bound;
12348 if (index_type)
12349 index = wi::ext (index, TYPE_PRECISION (index_type), index_sgn);
12350
12351 offset_int max_index = index;
12352 unsigned cnt;
12353 tree cfield, cval;
12354 bool first_p = true;
12355
12356 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
12357 {
12358 /* Array constructor might explicitly set index, or specify a range,
12359 or leave index NULL meaning that it is next index after previous
12360 one. */
12361 if (cfield)
12362 {
12363 if (TREE_CODE (cfield) == INTEGER_CST)
12364 max_index = index
12365 = offset_int::from (wi::to_wide (cfield), index_sgn);
12366 else
12367 {
12368 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
12369 index = offset_int::from (wi::to_wide (TREE_OPERAND (cfield, 0)),
12370 index_sgn);
12371 max_index
12372 = offset_int::from (wi::to_wide (TREE_OPERAND (cfield, 1)),
12373 index_sgn);
12374 gcc_checking_assert (wi::le_p (index, max_index, index_sgn));
12375 }
12376 }
12377 else if (!first_p)
12378 {
12379 index = max_index + 1;
12380 if (index_type)
12381 index = wi::ext (index, TYPE_PRECISION (index_type), index_sgn);
12382 gcc_checking_assert (wi::gt_p (index, max_index, index_sgn));
12383 max_index = index;
12384 }
12385 else
12386 first_p = false;
12387
12388 /* Do we have match? */
12389 if (wi::cmp (access_index, index, index_sgn) >= 0)
12390 {
12391 if (wi::cmp (access_index, max_index, index_sgn) <= 0)
12392 {
12393 if (ctor_idx)
12394 *ctor_idx = cnt;
12395 return cval;
12396 }
12397 }
12398 else if (in_gimple_form)
12399 /* We're past the element we search for. Note during parsing
12400 the elements might not be sorted.
12401 ??? We should use a binary search and a flag on the
12402 CONSTRUCTOR as to whether elements are sorted in declaration
12403 order. */
12404 break;
12405 }
12406 if (ctor_idx)
12407 *ctor_idx = cnt;
12408 return NULL_TREE;
12409 }
12410
12411 /* Perform constant folding and related simplification of EXPR.
12412 The related simplifications include x*1 => x, x*0 => 0, etc.,
12413 and application of the associative law.
12414 NOP_EXPR conversions may be removed freely (as long as we
12415 are careful not to change the type of the overall expression).
12416 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
12417 but we can constant-fold them if they have constant operands. */
12418
12419 #ifdef ENABLE_FOLD_CHECKING
12420 # define fold(x) fold_1 (x)
12421 static tree fold_1 (tree);
12422 static
12423 #endif
12424 tree
12425 fold (tree expr)
12426 {
12427 const tree t = expr;
12428 enum tree_code code = TREE_CODE (t);
12429 enum tree_code_class kind = TREE_CODE_CLASS (code);
12430 tree tem;
12431 location_t loc = EXPR_LOCATION (expr);
12432
12433 /* Return right away if a constant. */
12434 if (kind == tcc_constant)
12435 return t;
12436
12437 /* CALL_EXPR-like objects with variable numbers of operands are
12438 treated specially. */
12439 if (kind == tcc_vl_exp)
12440 {
12441 if (code == CALL_EXPR)
12442 {
12443 tem = fold_call_expr (loc, expr, false);
12444 return tem ? tem : expr;
12445 }
12446 return expr;
12447 }
12448
12449 if (IS_EXPR_CODE_CLASS (kind))
12450 {
12451 tree type = TREE_TYPE (t);
12452 tree op0, op1, op2;
12453
12454 switch (TREE_CODE_LENGTH (code))
12455 {
12456 case 1:
12457 op0 = TREE_OPERAND (t, 0);
12458 tem = fold_unary_loc (loc, code, type, op0);
12459 return tem ? tem : expr;
12460 case 2:
12461 op0 = TREE_OPERAND (t, 0);
12462 op1 = TREE_OPERAND (t, 1);
12463 tem = fold_binary_loc (loc, code, type, op0, op1);
12464 return tem ? tem : expr;
12465 case 3:
12466 op0 = TREE_OPERAND (t, 0);
12467 op1 = TREE_OPERAND (t, 1);
12468 op2 = TREE_OPERAND (t, 2);
12469 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12470 return tem ? tem : expr;
12471 default:
12472 break;
12473 }
12474 }
12475
12476 switch (code)
12477 {
12478 case ARRAY_REF:
12479 {
12480 tree op0 = TREE_OPERAND (t, 0);
12481 tree op1 = TREE_OPERAND (t, 1);
12482
12483 if (TREE_CODE (op1) == INTEGER_CST
12484 && TREE_CODE (op0) == CONSTRUCTOR
12485 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
12486 {
12487 tree val = get_array_ctor_element_at_index (op0,
12488 wi::to_offset (op1));
12489 if (val)
12490 return val;
12491 }
12492
12493 return t;
12494 }
12495
12496 /* Return a VECTOR_CST if possible. */
12497 case CONSTRUCTOR:
12498 {
12499 tree type = TREE_TYPE (t);
12500 if (TREE_CODE (type) != VECTOR_TYPE)
12501 return t;
12502
12503 unsigned i;
12504 tree val;
12505 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
12506 if (! CONSTANT_CLASS_P (val))
12507 return t;
12508
12509 return build_vector_from_ctor (type, CONSTRUCTOR_ELTS (t));
12510 }
12511
12512 case CONST_DECL:
12513 return fold (DECL_INITIAL (t));
12514
12515 default:
12516 return t;
12517 } /* switch (code) */
12518 }
12519
12520 #ifdef ENABLE_FOLD_CHECKING
12521 #undef fold
12522
12523 static void fold_checksum_tree (const_tree, struct md5_ctx *,
12524 hash_table<nofree_ptr_hash<const tree_node> > *);
12525 static void fold_check_failed (const_tree, const_tree);
12526 void print_fold_checksum (const_tree);
12527
12528 /* When --enable-checking=fold, compute a digest of expr before
12529 and after actual fold call to see if fold did not accidentally
12530 change original expr. */
12531
12532 tree
12533 fold (tree expr)
12534 {
12535 tree ret;
12536 struct md5_ctx ctx;
12537 unsigned char checksum_before[16], checksum_after[16];
12538 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12539
12540 md5_init_ctx (&ctx);
12541 fold_checksum_tree (expr, &ctx, &ht);
12542 md5_finish_ctx (&ctx, checksum_before);
12543 ht.empty ();
12544
12545 ret = fold_1 (expr);
12546
12547 md5_init_ctx (&ctx);
12548 fold_checksum_tree (expr, &ctx, &ht);
12549 md5_finish_ctx (&ctx, checksum_after);
12550
12551 if (memcmp (checksum_before, checksum_after, 16))
12552 fold_check_failed (expr, ret);
12553
12554 return ret;
12555 }
12556
12557 void
12558 print_fold_checksum (const_tree expr)
12559 {
12560 struct md5_ctx ctx;
12561 unsigned char checksum[16], cnt;
12562 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12563
12564 md5_init_ctx (&ctx);
12565 fold_checksum_tree (expr, &ctx, &ht);
12566 md5_finish_ctx (&ctx, checksum);
12567 for (cnt = 0; cnt < 16; ++cnt)
12568 fprintf (stderr, "%02x", checksum[cnt]);
12569 putc ('\n', stderr);
12570 }
12571
12572 static void
12573 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
12574 {
12575 internal_error ("fold check: original tree changed by fold");
12576 }
12577
12578 static void
12579 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
12580 hash_table<nofree_ptr_hash <const tree_node> > *ht)
12581 {
12582 const tree_node **slot;
12583 enum tree_code code;
12584 union tree_node *buf;
12585 int i, len;
12586
12587 recursive_label:
12588 if (expr == NULL)
12589 return;
12590 slot = ht->find_slot (expr, INSERT);
12591 if (*slot != NULL)
12592 return;
12593 *slot = expr;
12594 code = TREE_CODE (expr);
12595 if (TREE_CODE_CLASS (code) == tcc_declaration
12596 && HAS_DECL_ASSEMBLER_NAME_P (expr))
12597 {
12598 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
12599 size_t sz = tree_size (expr);
12600 buf = XALLOCAVAR (union tree_node, sz);
12601 memcpy ((char *) buf, expr, sz);
12602 SET_DECL_ASSEMBLER_NAME ((tree) buf, NULL);
12603 buf->decl_with_vis.symtab_node = NULL;
12604 buf->base.nowarning_flag = 0;
12605 expr = (tree) buf;
12606 }
12607 else if (TREE_CODE_CLASS (code) == tcc_type
12608 && (TYPE_POINTER_TO (expr)
12609 || TYPE_REFERENCE_TO (expr)
12610 || TYPE_CACHED_VALUES_P (expr)
12611 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
12612 || TYPE_NEXT_VARIANT (expr)
12613 || TYPE_ALIAS_SET_KNOWN_P (expr)))
12614 {
12615 /* Allow these fields to be modified. */
12616 tree tmp;
12617 size_t sz = tree_size (expr);
12618 buf = XALLOCAVAR (union tree_node, sz);
12619 memcpy ((char *) buf, expr, sz);
12620 expr = tmp = (tree) buf;
12621 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
12622 TYPE_POINTER_TO (tmp) = NULL;
12623 TYPE_REFERENCE_TO (tmp) = NULL;
12624 TYPE_NEXT_VARIANT (tmp) = NULL;
12625 TYPE_ALIAS_SET (tmp) = -1;
12626 if (TYPE_CACHED_VALUES_P (tmp))
12627 {
12628 TYPE_CACHED_VALUES_P (tmp) = 0;
12629 TYPE_CACHED_VALUES (tmp) = NULL;
12630 }
12631 }
12632 else if (TREE_NO_WARNING (expr) && (DECL_P (expr) || EXPR_P (expr)))
12633 {
12634 /* Allow TREE_NO_WARNING to be set. Perhaps we shouldn't allow that
12635 and change builtins.c etc. instead - see PR89543. */
12636 size_t sz = tree_size (expr);
12637 buf = XALLOCAVAR (union tree_node, sz);
12638 memcpy ((char *) buf, expr, sz);
12639 buf->base.nowarning_flag = 0;
12640 expr = (tree) buf;
12641 }
12642 md5_process_bytes (expr, tree_size (expr), ctx);
12643 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
12644 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
12645 if (TREE_CODE_CLASS (code) != tcc_type
12646 && TREE_CODE_CLASS (code) != tcc_declaration
12647 && code != TREE_LIST
12648 && code != SSA_NAME
12649 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
12650 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
12651 switch (TREE_CODE_CLASS (code))
12652 {
12653 case tcc_constant:
12654 switch (code)
12655 {
12656 case STRING_CST:
12657 md5_process_bytes (TREE_STRING_POINTER (expr),
12658 TREE_STRING_LENGTH (expr), ctx);
12659 break;
12660 case COMPLEX_CST:
12661 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
12662 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
12663 break;
12664 case VECTOR_CST:
12665 len = vector_cst_encoded_nelts (expr);
12666 for (i = 0; i < len; ++i)
12667 fold_checksum_tree (VECTOR_CST_ENCODED_ELT (expr, i), ctx, ht);
12668 break;
12669 default:
12670 break;
12671 }
12672 break;
12673 case tcc_exceptional:
12674 switch (code)
12675 {
12676 case TREE_LIST:
12677 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
12678 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
12679 expr = TREE_CHAIN (expr);
12680 goto recursive_label;
12681 break;
12682 case TREE_VEC:
12683 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
12684 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
12685 break;
12686 default:
12687 break;
12688 }
12689 break;
12690 case tcc_expression:
12691 case tcc_reference:
12692 case tcc_comparison:
12693 case tcc_unary:
12694 case tcc_binary:
12695 case tcc_statement:
12696 case tcc_vl_exp:
12697 len = TREE_OPERAND_LENGTH (expr);
12698 for (i = 0; i < len; ++i)
12699 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
12700 break;
12701 case tcc_declaration:
12702 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
12703 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
12704 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
12705 {
12706 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
12707 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
12708 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
12709 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
12710 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
12711 }
12712
12713 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
12714 {
12715 if (TREE_CODE (expr) == FUNCTION_DECL)
12716 {
12717 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
12718 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
12719 }
12720 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
12721 }
12722 break;
12723 case tcc_type:
12724 if (TREE_CODE (expr) == ENUMERAL_TYPE)
12725 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
12726 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
12727 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
12728 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
12729 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
12730 if (INTEGRAL_TYPE_P (expr)
12731 || SCALAR_FLOAT_TYPE_P (expr))
12732 {
12733 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12734 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
12735 }
12736 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
12737 if (TREE_CODE (expr) == RECORD_TYPE
12738 || TREE_CODE (expr) == UNION_TYPE
12739 || TREE_CODE (expr) == QUAL_UNION_TYPE)
12740 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
12741 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12742 break;
12743 default:
12744 break;
12745 }
12746 }
12747
12748 /* Helper function for outputting the checksum of a tree T. When
12749 debugging with gdb, you can "define mynext" to be "next" followed
12750 by "call debug_fold_checksum (op0)", then just trace down till the
12751 outputs differ. */
12752
12753 DEBUG_FUNCTION void
12754 debug_fold_checksum (const_tree t)
12755 {
12756 int i;
12757 unsigned char checksum[16];
12758 struct md5_ctx ctx;
12759 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12760
12761 md5_init_ctx (&ctx);
12762 fold_checksum_tree (t, &ctx, &ht);
12763 md5_finish_ctx (&ctx, checksum);
12764 ht.empty ();
12765
12766 for (i = 0; i < 16; i++)
12767 fprintf (stderr, "%d ", checksum[i]);
12768
12769 fprintf (stderr, "\n");
12770 }
12771
12772 #endif
12773
12774 /* Fold a unary tree expression with code CODE of type TYPE with an
12775 operand OP0. LOC is the location of the resulting expression.
12776 Return a folded expression if successful. Otherwise, return a tree
12777 expression with code CODE of type TYPE with an operand OP0. */
12778
12779 tree
12780 fold_build1_loc (location_t loc,
12781 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
12782 {
12783 tree tem;
12784 #ifdef ENABLE_FOLD_CHECKING
12785 unsigned char checksum_before[16], checksum_after[16];
12786 struct md5_ctx ctx;
12787 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12788
12789 md5_init_ctx (&ctx);
12790 fold_checksum_tree (op0, &ctx, &ht);
12791 md5_finish_ctx (&ctx, checksum_before);
12792 ht.empty ();
12793 #endif
12794
12795 tem = fold_unary_loc (loc, code, type, op0);
12796 if (!tem)
12797 tem = build1_loc (loc, code, type, op0 PASS_MEM_STAT);
12798
12799 #ifdef ENABLE_FOLD_CHECKING
12800 md5_init_ctx (&ctx);
12801 fold_checksum_tree (op0, &ctx, &ht);
12802 md5_finish_ctx (&ctx, checksum_after);
12803
12804 if (memcmp (checksum_before, checksum_after, 16))
12805 fold_check_failed (op0, tem);
12806 #endif
12807 return tem;
12808 }
12809
12810 /* Fold a binary tree expression with code CODE of type TYPE with
12811 operands OP0 and OP1. LOC is the location of the resulting
12812 expression. Return a folded expression if successful. Otherwise,
12813 return a tree expression with code CODE of type TYPE with operands
12814 OP0 and OP1. */
12815
12816 tree
12817 fold_build2_loc (location_t loc,
12818 enum tree_code code, tree type, tree op0, tree op1
12819 MEM_STAT_DECL)
12820 {
12821 tree tem;
12822 #ifdef ENABLE_FOLD_CHECKING
12823 unsigned char checksum_before_op0[16],
12824 checksum_before_op1[16],
12825 checksum_after_op0[16],
12826 checksum_after_op1[16];
12827 struct md5_ctx ctx;
12828 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12829
12830 md5_init_ctx (&ctx);
12831 fold_checksum_tree (op0, &ctx, &ht);
12832 md5_finish_ctx (&ctx, checksum_before_op0);
12833 ht.empty ();
12834
12835 md5_init_ctx (&ctx);
12836 fold_checksum_tree (op1, &ctx, &ht);
12837 md5_finish_ctx (&ctx, checksum_before_op1);
12838 ht.empty ();
12839 #endif
12840
12841 tem = fold_binary_loc (loc, code, type, op0, op1);
12842 if (!tem)
12843 tem = build2_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
12844
12845 #ifdef ENABLE_FOLD_CHECKING
12846 md5_init_ctx (&ctx);
12847 fold_checksum_tree (op0, &ctx, &ht);
12848 md5_finish_ctx (&ctx, checksum_after_op0);
12849 ht.empty ();
12850
12851 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12852 fold_check_failed (op0, tem);
12853
12854 md5_init_ctx (&ctx);
12855 fold_checksum_tree (op1, &ctx, &ht);
12856 md5_finish_ctx (&ctx, checksum_after_op1);
12857
12858 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12859 fold_check_failed (op1, tem);
12860 #endif
12861 return tem;
12862 }
12863
12864 /* Fold a ternary tree expression with code CODE of type TYPE with
12865 operands OP0, OP1, and OP2. Return a folded expression if
12866 successful. Otherwise, return a tree expression with code CODE of
12867 type TYPE with operands OP0, OP1, and OP2. */
12868
12869 tree
12870 fold_build3_loc (location_t loc, enum tree_code code, tree type,
12871 tree op0, tree op1, tree op2 MEM_STAT_DECL)
12872 {
12873 tree tem;
12874 #ifdef ENABLE_FOLD_CHECKING
12875 unsigned char checksum_before_op0[16],
12876 checksum_before_op1[16],
12877 checksum_before_op2[16],
12878 checksum_after_op0[16],
12879 checksum_after_op1[16],
12880 checksum_after_op2[16];
12881 struct md5_ctx ctx;
12882 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12883
12884 md5_init_ctx (&ctx);
12885 fold_checksum_tree (op0, &ctx, &ht);
12886 md5_finish_ctx (&ctx, checksum_before_op0);
12887 ht.empty ();
12888
12889 md5_init_ctx (&ctx);
12890 fold_checksum_tree (op1, &ctx, &ht);
12891 md5_finish_ctx (&ctx, checksum_before_op1);
12892 ht.empty ();
12893
12894 md5_init_ctx (&ctx);
12895 fold_checksum_tree (op2, &ctx, &ht);
12896 md5_finish_ctx (&ctx, checksum_before_op2);
12897 ht.empty ();
12898 #endif
12899
12900 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
12901 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12902 if (!tem)
12903 tem = build3_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
12904
12905 #ifdef ENABLE_FOLD_CHECKING
12906 md5_init_ctx (&ctx);
12907 fold_checksum_tree (op0, &ctx, &ht);
12908 md5_finish_ctx (&ctx, checksum_after_op0);
12909 ht.empty ();
12910
12911 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12912 fold_check_failed (op0, tem);
12913
12914 md5_init_ctx (&ctx);
12915 fold_checksum_tree (op1, &ctx, &ht);
12916 md5_finish_ctx (&ctx, checksum_after_op1);
12917 ht.empty ();
12918
12919 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12920 fold_check_failed (op1, tem);
12921
12922 md5_init_ctx (&ctx);
12923 fold_checksum_tree (op2, &ctx, &ht);
12924 md5_finish_ctx (&ctx, checksum_after_op2);
12925
12926 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12927 fold_check_failed (op2, tem);
12928 #endif
12929 return tem;
12930 }
12931
12932 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
12933 arguments in ARGARRAY, and a null static chain.
12934 Return a folded expression if successful. Otherwise, return a CALL_EXPR
12935 of type TYPE from the given operands as constructed by build_call_array. */
12936
12937 tree
12938 fold_build_call_array_loc (location_t loc, tree type, tree fn,
12939 int nargs, tree *argarray)
12940 {
12941 tree tem;
12942 #ifdef ENABLE_FOLD_CHECKING
12943 unsigned char checksum_before_fn[16],
12944 checksum_before_arglist[16],
12945 checksum_after_fn[16],
12946 checksum_after_arglist[16];
12947 struct md5_ctx ctx;
12948 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12949 int i;
12950
12951 md5_init_ctx (&ctx);
12952 fold_checksum_tree (fn, &ctx, &ht);
12953 md5_finish_ctx (&ctx, checksum_before_fn);
12954 ht.empty ();
12955
12956 md5_init_ctx (&ctx);
12957 for (i = 0; i < nargs; i++)
12958 fold_checksum_tree (argarray[i], &ctx, &ht);
12959 md5_finish_ctx (&ctx, checksum_before_arglist);
12960 ht.empty ();
12961 #endif
12962
12963 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
12964 if (!tem)
12965 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
12966
12967 #ifdef ENABLE_FOLD_CHECKING
12968 md5_init_ctx (&ctx);
12969 fold_checksum_tree (fn, &ctx, &ht);
12970 md5_finish_ctx (&ctx, checksum_after_fn);
12971 ht.empty ();
12972
12973 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
12974 fold_check_failed (fn, tem);
12975
12976 md5_init_ctx (&ctx);
12977 for (i = 0; i < nargs; i++)
12978 fold_checksum_tree (argarray[i], &ctx, &ht);
12979 md5_finish_ctx (&ctx, checksum_after_arglist);
12980
12981 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
12982 fold_check_failed (NULL_TREE, tem);
12983 #endif
12984 return tem;
12985 }
12986
12987 /* Perform constant folding and related simplification of initializer
12988 expression EXPR. These behave identically to "fold_buildN" but ignore
12989 potential run-time traps and exceptions that fold must preserve. */
12990
12991 #define START_FOLD_INIT \
12992 int saved_signaling_nans = flag_signaling_nans;\
12993 int saved_trapping_math = flag_trapping_math;\
12994 int saved_rounding_math = flag_rounding_math;\
12995 int saved_trapv = flag_trapv;\
12996 int saved_folding_initializer = folding_initializer;\
12997 flag_signaling_nans = 0;\
12998 flag_trapping_math = 0;\
12999 flag_rounding_math = 0;\
13000 flag_trapv = 0;\
13001 folding_initializer = 1;
13002
13003 #define END_FOLD_INIT \
13004 flag_signaling_nans = saved_signaling_nans;\
13005 flag_trapping_math = saved_trapping_math;\
13006 flag_rounding_math = saved_rounding_math;\
13007 flag_trapv = saved_trapv;\
13008 folding_initializer = saved_folding_initializer;
13009
13010 tree
13011 fold_build1_initializer_loc (location_t loc, enum tree_code code,
13012 tree type, tree op)
13013 {
13014 tree result;
13015 START_FOLD_INIT;
13016
13017 result = fold_build1_loc (loc, code, type, op);
13018
13019 END_FOLD_INIT;
13020 return result;
13021 }
13022
13023 tree
13024 fold_build2_initializer_loc (location_t loc, enum tree_code code,
13025 tree type, tree op0, tree op1)
13026 {
13027 tree result;
13028 START_FOLD_INIT;
13029
13030 result = fold_build2_loc (loc, code, type, op0, op1);
13031
13032 END_FOLD_INIT;
13033 return result;
13034 }
13035
13036 tree
13037 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
13038 int nargs, tree *argarray)
13039 {
13040 tree result;
13041 START_FOLD_INIT;
13042
13043 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
13044
13045 END_FOLD_INIT;
13046 return result;
13047 }
13048
13049 #undef START_FOLD_INIT
13050 #undef END_FOLD_INIT
13051
13052 /* Determine if first argument is a multiple of second argument. Return 0 if
13053 it is not, or we cannot easily determined it to be.
13054
13055 An example of the sort of thing we care about (at this point; this routine
13056 could surely be made more general, and expanded to do what the *_DIV_EXPR's
13057 fold cases do now) is discovering that
13058
13059 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13060
13061 is a multiple of
13062
13063 SAVE_EXPR (J * 8)
13064
13065 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
13066
13067 This code also handles discovering that
13068
13069 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13070
13071 is a multiple of 8 so we don't have to worry about dealing with a
13072 possible remainder.
13073
13074 Note that we *look* inside a SAVE_EXPR only to determine how it was
13075 calculated; it is not safe for fold to do much of anything else with the
13076 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
13077 at run time. For example, the latter example above *cannot* be implemented
13078 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
13079 evaluation time of the original SAVE_EXPR is not necessarily the same at
13080 the time the new expression is evaluated. The only optimization of this
13081 sort that would be valid is changing
13082
13083 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
13084
13085 divided by 8 to
13086
13087 SAVE_EXPR (I) * SAVE_EXPR (J)
13088
13089 (where the same SAVE_EXPR (J) is used in the original and the
13090 transformed version). */
13091
13092 int
13093 multiple_of_p (tree type, const_tree top, const_tree bottom)
13094 {
13095 gimple *stmt;
13096 tree t1, op1, op2;
13097
13098 if (operand_equal_p (top, bottom, 0))
13099 return 1;
13100
13101 if (TREE_CODE (type) != INTEGER_TYPE)
13102 return 0;
13103
13104 switch (TREE_CODE (top))
13105 {
13106 case BIT_AND_EXPR:
13107 /* Bitwise and provides a power of two multiple. If the mask is
13108 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
13109 if (!integer_pow2p (bottom))
13110 return 0;
13111 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
13112 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
13113
13114 case MULT_EXPR:
13115 if (TREE_CODE (bottom) == INTEGER_CST)
13116 {
13117 op1 = TREE_OPERAND (top, 0);
13118 op2 = TREE_OPERAND (top, 1);
13119 if (TREE_CODE (op1) == INTEGER_CST)
13120 std::swap (op1, op2);
13121 if (TREE_CODE (op2) == INTEGER_CST)
13122 {
13123 if (multiple_of_p (type, op2, bottom))
13124 return 1;
13125 /* Handle multiple_of_p ((x * 2 + 2) * 4, 8). */
13126 if (multiple_of_p (type, bottom, op2))
13127 {
13128 widest_int w = wi::sdiv_trunc (wi::to_widest (bottom),
13129 wi::to_widest (op2));
13130 if (wi::fits_to_tree_p (w, TREE_TYPE (bottom)))
13131 {
13132 op2 = wide_int_to_tree (TREE_TYPE (bottom), w);
13133 return multiple_of_p (type, op1, op2);
13134 }
13135 }
13136 return multiple_of_p (type, op1, bottom);
13137 }
13138 }
13139 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
13140 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
13141
13142 case MINUS_EXPR:
13143 /* It is impossible to prove if op0 - op1 is multiple of bottom
13144 precisely, so be conservative here checking if both op0 and op1
13145 are multiple of bottom. Note we check the second operand first
13146 since it's usually simpler. */
13147 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
13148 && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
13149
13150 case PLUS_EXPR:
13151 /* The same as MINUS_EXPR, but handle cases like op0 + 0xfffffffd
13152 as op0 - 3 if the expression has unsigned type. For example,
13153 (X / 3) + 0xfffffffd is multiple of 3, but 0xfffffffd is not. */
13154 op1 = TREE_OPERAND (top, 1);
13155 if (TYPE_UNSIGNED (type)
13156 && TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sign_bit (op1))
13157 op1 = fold_build1 (NEGATE_EXPR, type, op1);
13158 return (multiple_of_p (type, op1, bottom)
13159 && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
13160
13161 case LSHIFT_EXPR:
13162 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
13163 {
13164 op1 = TREE_OPERAND (top, 1);
13165 /* const_binop may not detect overflow correctly,
13166 so check for it explicitly here. */
13167 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
13168 wi::to_wide (op1))
13169 && (t1 = fold_convert (type,
13170 const_binop (LSHIFT_EXPR, size_one_node,
13171 op1))) != 0
13172 && !TREE_OVERFLOW (t1))
13173 return multiple_of_p (type, t1, bottom);
13174 }
13175 return 0;
13176
13177 case NOP_EXPR:
13178 /* Can't handle conversions from non-integral or wider integral type. */
13179 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
13180 || (TYPE_PRECISION (type)
13181 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
13182 return 0;
13183
13184 /* fall through */
13185
13186 case SAVE_EXPR:
13187 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
13188
13189 case COND_EXPR:
13190 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
13191 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
13192
13193 case INTEGER_CST:
13194 if (TREE_CODE (bottom) != INTEGER_CST
13195 || integer_zerop (bottom)
13196 || (TYPE_UNSIGNED (type)
13197 && (tree_int_cst_sgn (top) < 0
13198 || tree_int_cst_sgn (bottom) < 0)))
13199 return 0;
13200 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
13201 SIGNED);
13202
13203 case SSA_NAME:
13204 if (TREE_CODE (bottom) == INTEGER_CST
13205 && (stmt = SSA_NAME_DEF_STMT (top)) != NULL
13206 && gimple_code (stmt) == GIMPLE_ASSIGN)
13207 {
13208 enum tree_code code = gimple_assign_rhs_code (stmt);
13209
13210 /* Check for special cases to see if top is defined as multiple
13211 of bottom:
13212
13213 top = (X & ~(bottom - 1) ; bottom is power of 2
13214
13215 or
13216
13217 Y = X % bottom
13218 top = X - Y. */
13219 if (code == BIT_AND_EXPR
13220 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
13221 && TREE_CODE (op2) == INTEGER_CST
13222 && integer_pow2p (bottom)
13223 && wi::multiple_of_p (wi::to_widest (op2),
13224 wi::to_widest (bottom), UNSIGNED))
13225 return 1;
13226
13227 op1 = gimple_assign_rhs1 (stmt);
13228 if (code == MINUS_EXPR
13229 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
13230 && TREE_CODE (op2) == SSA_NAME
13231 && (stmt = SSA_NAME_DEF_STMT (op2)) != NULL
13232 && gimple_code (stmt) == GIMPLE_ASSIGN
13233 && (code = gimple_assign_rhs_code (stmt)) == TRUNC_MOD_EXPR
13234 && operand_equal_p (op1, gimple_assign_rhs1 (stmt), 0)
13235 && operand_equal_p (bottom, gimple_assign_rhs2 (stmt), 0))
13236 return 1;
13237 }
13238
13239 /* fall through */
13240
13241 default:
13242 if (POLY_INT_CST_P (top) && poly_int_tree_p (bottom))
13243 return multiple_p (wi::to_poly_widest (top),
13244 wi::to_poly_widest (bottom));
13245
13246 return 0;
13247 }
13248 }
13249
13250 #define tree_expr_nonnegative_warnv_p(X, Y) \
13251 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
13252
13253 #define RECURSE(X) \
13254 ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
13255
13256 /* Return true if CODE or TYPE is known to be non-negative. */
13257
13258 static bool
13259 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
13260 {
13261 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
13262 && truth_value_p (code))
13263 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
13264 have a signed:1 type (where the value is -1 and 0). */
13265 return true;
13266 return false;
13267 }
13268
13269 /* Return true if (CODE OP0) is known to be non-negative. If the return
13270 value is based on the assumption that signed overflow is undefined,
13271 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13272 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13273
13274 bool
13275 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
13276 bool *strict_overflow_p, int depth)
13277 {
13278 if (TYPE_UNSIGNED (type))
13279 return true;
13280
13281 switch (code)
13282 {
13283 case ABS_EXPR:
13284 /* We can't return 1 if flag_wrapv is set because
13285 ABS_EXPR<INT_MIN> = INT_MIN. */
13286 if (!ANY_INTEGRAL_TYPE_P (type))
13287 return true;
13288 if (TYPE_OVERFLOW_UNDEFINED (type))
13289 {
13290 *strict_overflow_p = true;
13291 return true;
13292 }
13293 break;
13294
13295 case NON_LVALUE_EXPR:
13296 case FLOAT_EXPR:
13297 case FIX_TRUNC_EXPR:
13298 return RECURSE (op0);
13299
13300 CASE_CONVERT:
13301 {
13302 tree inner_type = TREE_TYPE (op0);
13303 tree outer_type = type;
13304
13305 if (TREE_CODE (outer_type) == REAL_TYPE)
13306 {
13307 if (TREE_CODE (inner_type) == REAL_TYPE)
13308 return RECURSE (op0);
13309 if (INTEGRAL_TYPE_P (inner_type))
13310 {
13311 if (TYPE_UNSIGNED (inner_type))
13312 return true;
13313 return RECURSE (op0);
13314 }
13315 }
13316 else if (INTEGRAL_TYPE_P (outer_type))
13317 {
13318 if (TREE_CODE (inner_type) == REAL_TYPE)
13319 return RECURSE (op0);
13320 if (INTEGRAL_TYPE_P (inner_type))
13321 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
13322 && TYPE_UNSIGNED (inner_type);
13323 }
13324 }
13325 break;
13326
13327 default:
13328 return tree_simple_nonnegative_warnv_p (code, type);
13329 }
13330
13331 /* We don't know sign of `t', so be conservative and return false. */
13332 return false;
13333 }
13334
13335 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
13336 value is based on the assumption that signed overflow is undefined,
13337 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13338 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13339
13340 bool
13341 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
13342 tree op1, bool *strict_overflow_p,
13343 int depth)
13344 {
13345 if (TYPE_UNSIGNED (type))
13346 return true;
13347
13348 switch (code)
13349 {
13350 case POINTER_PLUS_EXPR:
13351 case PLUS_EXPR:
13352 if (FLOAT_TYPE_P (type))
13353 return RECURSE (op0) && RECURSE (op1);
13354
13355 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
13356 both unsigned and at least 2 bits shorter than the result. */
13357 if (TREE_CODE (type) == INTEGER_TYPE
13358 && TREE_CODE (op0) == NOP_EXPR
13359 && TREE_CODE (op1) == NOP_EXPR)
13360 {
13361 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
13362 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
13363 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
13364 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
13365 {
13366 unsigned int prec = MAX (TYPE_PRECISION (inner1),
13367 TYPE_PRECISION (inner2)) + 1;
13368 return prec < TYPE_PRECISION (type);
13369 }
13370 }
13371 break;
13372
13373 case MULT_EXPR:
13374 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
13375 {
13376 /* x * x is always non-negative for floating point x
13377 or without overflow. */
13378 if (operand_equal_p (op0, op1, 0)
13379 || (RECURSE (op0) && RECURSE (op1)))
13380 {
13381 if (ANY_INTEGRAL_TYPE_P (type)
13382 && TYPE_OVERFLOW_UNDEFINED (type))
13383 *strict_overflow_p = true;
13384 return true;
13385 }
13386 }
13387
13388 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
13389 both unsigned and their total bits is shorter than the result. */
13390 if (TREE_CODE (type) == INTEGER_TYPE
13391 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
13392 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
13393 {
13394 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
13395 ? TREE_TYPE (TREE_OPERAND (op0, 0))
13396 : TREE_TYPE (op0);
13397 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
13398 ? TREE_TYPE (TREE_OPERAND (op1, 0))
13399 : TREE_TYPE (op1);
13400
13401 bool unsigned0 = TYPE_UNSIGNED (inner0);
13402 bool unsigned1 = TYPE_UNSIGNED (inner1);
13403
13404 if (TREE_CODE (op0) == INTEGER_CST)
13405 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
13406
13407 if (TREE_CODE (op1) == INTEGER_CST)
13408 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
13409
13410 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
13411 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
13412 {
13413 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
13414 ? tree_int_cst_min_precision (op0, UNSIGNED)
13415 : TYPE_PRECISION (inner0);
13416
13417 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
13418 ? tree_int_cst_min_precision (op1, UNSIGNED)
13419 : TYPE_PRECISION (inner1);
13420
13421 return precision0 + precision1 < TYPE_PRECISION (type);
13422 }
13423 }
13424 return false;
13425
13426 case BIT_AND_EXPR:
13427 case MAX_EXPR:
13428 return RECURSE (op0) || RECURSE (op1);
13429
13430 case BIT_IOR_EXPR:
13431 case BIT_XOR_EXPR:
13432 case MIN_EXPR:
13433 case RDIV_EXPR:
13434 case TRUNC_DIV_EXPR:
13435 case CEIL_DIV_EXPR:
13436 case FLOOR_DIV_EXPR:
13437 case ROUND_DIV_EXPR:
13438 return RECURSE (op0) && RECURSE (op1);
13439
13440 case TRUNC_MOD_EXPR:
13441 return RECURSE (op0);
13442
13443 case FLOOR_MOD_EXPR:
13444 return RECURSE (op1);
13445
13446 case CEIL_MOD_EXPR:
13447 case ROUND_MOD_EXPR:
13448 default:
13449 return tree_simple_nonnegative_warnv_p (code, type);
13450 }
13451
13452 /* We don't know sign of `t', so be conservative and return false. */
13453 return false;
13454 }
13455
13456 /* Return true if T is known to be non-negative. If the return
13457 value is based on the assumption that signed overflow is undefined,
13458 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13459 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13460
13461 bool
13462 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13463 {
13464 if (TYPE_UNSIGNED (TREE_TYPE (t)))
13465 return true;
13466
13467 switch (TREE_CODE (t))
13468 {
13469 case INTEGER_CST:
13470 return tree_int_cst_sgn (t) >= 0;
13471
13472 case REAL_CST:
13473 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
13474
13475 case FIXED_CST:
13476 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
13477
13478 case COND_EXPR:
13479 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
13480
13481 case SSA_NAME:
13482 /* Limit the depth of recursion to avoid quadratic behavior.
13483 This is expected to catch almost all occurrences in practice.
13484 If this code misses important cases that unbounded recursion
13485 would not, passes that need this information could be revised
13486 to provide it through dataflow propagation. */
13487 return (!name_registered_for_update_p (t)
13488 && depth < param_max_ssa_name_query_depth
13489 && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
13490 strict_overflow_p, depth));
13491
13492 default:
13493 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
13494 }
13495 }
13496
13497 /* Return true if T is known to be non-negative. If the return
13498 value is based on the assumption that signed overflow is undefined,
13499 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13500 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13501
13502 bool
13503 tree_call_nonnegative_warnv_p (tree type, combined_fn fn, tree arg0, tree arg1,
13504 bool *strict_overflow_p, int depth)
13505 {
13506 switch (fn)
13507 {
13508 CASE_CFN_ACOS:
13509 CASE_CFN_ACOSH:
13510 CASE_CFN_CABS:
13511 CASE_CFN_COSH:
13512 CASE_CFN_ERFC:
13513 CASE_CFN_EXP:
13514 CASE_CFN_EXP10:
13515 CASE_CFN_EXP2:
13516 CASE_CFN_FABS:
13517 CASE_CFN_FDIM:
13518 CASE_CFN_HYPOT:
13519 CASE_CFN_POW10:
13520 CASE_CFN_FFS:
13521 CASE_CFN_PARITY:
13522 CASE_CFN_POPCOUNT:
13523 CASE_CFN_CLZ:
13524 CASE_CFN_CLRSB:
13525 case CFN_BUILT_IN_BSWAP32:
13526 case CFN_BUILT_IN_BSWAP64:
13527 /* Always true. */
13528 return true;
13529
13530 CASE_CFN_SQRT:
13531 CASE_CFN_SQRT_FN:
13532 /* sqrt(-0.0) is -0.0. */
13533 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
13534 return true;
13535 return RECURSE (arg0);
13536
13537 CASE_CFN_ASINH:
13538 CASE_CFN_ATAN:
13539 CASE_CFN_ATANH:
13540 CASE_CFN_CBRT:
13541 CASE_CFN_CEIL:
13542 CASE_CFN_CEIL_FN:
13543 CASE_CFN_ERF:
13544 CASE_CFN_EXPM1:
13545 CASE_CFN_FLOOR:
13546 CASE_CFN_FLOOR_FN:
13547 CASE_CFN_FMOD:
13548 CASE_CFN_FREXP:
13549 CASE_CFN_ICEIL:
13550 CASE_CFN_IFLOOR:
13551 CASE_CFN_IRINT:
13552 CASE_CFN_IROUND:
13553 CASE_CFN_LCEIL:
13554 CASE_CFN_LDEXP:
13555 CASE_CFN_LFLOOR:
13556 CASE_CFN_LLCEIL:
13557 CASE_CFN_LLFLOOR:
13558 CASE_CFN_LLRINT:
13559 CASE_CFN_LLROUND:
13560 CASE_CFN_LRINT:
13561 CASE_CFN_LROUND:
13562 CASE_CFN_MODF:
13563 CASE_CFN_NEARBYINT:
13564 CASE_CFN_NEARBYINT_FN:
13565 CASE_CFN_RINT:
13566 CASE_CFN_RINT_FN:
13567 CASE_CFN_ROUND:
13568 CASE_CFN_ROUND_FN:
13569 CASE_CFN_ROUNDEVEN:
13570 CASE_CFN_ROUNDEVEN_FN:
13571 CASE_CFN_SCALB:
13572 CASE_CFN_SCALBLN:
13573 CASE_CFN_SCALBN:
13574 CASE_CFN_SIGNBIT:
13575 CASE_CFN_SIGNIFICAND:
13576 CASE_CFN_SINH:
13577 CASE_CFN_TANH:
13578 CASE_CFN_TRUNC:
13579 CASE_CFN_TRUNC_FN:
13580 /* True if the 1st argument is nonnegative. */
13581 return RECURSE (arg0);
13582
13583 CASE_CFN_FMAX:
13584 CASE_CFN_FMAX_FN:
13585 /* True if the 1st OR 2nd arguments are nonnegative. */
13586 return RECURSE (arg0) || RECURSE (arg1);
13587
13588 CASE_CFN_FMIN:
13589 CASE_CFN_FMIN_FN:
13590 /* True if the 1st AND 2nd arguments are nonnegative. */
13591 return RECURSE (arg0) && RECURSE (arg1);
13592
13593 CASE_CFN_COPYSIGN:
13594 CASE_CFN_COPYSIGN_FN:
13595 /* True if the 2nd argument is nonnegative. */
13596 return RECURSE (arg1);
13597
13598 CASE_CFN_POWI:
13599 /* True if the 1st argument is nonnegative or the second
13600 argument is an even integer. */
13601 if (TREE_CODE (arg1) == INTEGER_CST
13602 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
13603 return true;
13604 return RECURSE (arg0);
13605
13606 CASE_CFN_POW:
13607 /* True if the 1st argument is nonnegative or the second
13608 argument is an even integer valued real. */
13609 if (TREE_CODE (arg1) == REAL_CST)
13610 {
13611 REAL_VALUE_TYPE c;
13612 HOST_WIDE_INT n;
13613
13614 c = TREE_REAL_CST (arg1);
13615 n = real_to_integer (&c);
13616 if ((n & 1) == 0)
13617 {
13618 REAL_VALUE_TYPE cint;
13619 real_from_integer (&cint, VOIDmode, n, SIGNED);
13620 if (real_identical (&c, &cint))
13621 return true;
13622 }
13623 }
13624 return RECURSE (arg0);
13625
13626 default:
13627 break;
13628 }
13629 return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
13630 }
13631
13632 /* Return true if T is known to be non-negative. If the return
13633 value is based on the assumption that signed overflow is undefined,
13634 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13635 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13636
13637 static bool
13638 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13639 {
13640 enum tree_code code = TREE_CODE (t);
13641 if (TYPE_UNSIGNED (TREE_TYPE (t)))
13642 return true;
13643
13644 switch (code)
13645 {
13646 case TARGET_EXPR:
13647 {
13648 tree temp = TARGET_EXPR_SLOT (t);
13649 t = TARGET_EXPR_INITIAL (t);
13650
13651 /* If the initializer is non-void, then it's a normal expression
13652 that will be assigned to the slot. */
13653 if (!VOID_TYPE_P (t))
13654 return RECURSE (t);
13655
13656 /* Otherwise, the initializer sets the slot in some way. One common
13657 way is an assignment statement at the end of the initializer. */
13658 while (1)
13659 {
13660 if (TREE_CODE (t) == BIND_EXPR)
13661 t = expr_last (BIND_EXPR_BODY (t));
13662 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
13663 || TREE_CODE (t) == TRY_CATCH_EXPR)
13664 t = expr_last (TREE_OPERAND (t, 0));
13665 else if (TREE_CODE (t) == STATEMENT_LIST)
13666 t = expr_last (t);
13667 else
13668 break;
13669 }
13670 if (TREE_CODE (t) == MODIFY_EXPR
13671 && TREE_OPERAND (t, 0) == temp)
13672 return RECURSE (TREE_OPERAND (t, 1));
13673
13674 return false;
13675 }
13676
13677 case CALL_EXPR:
13678 {
13679 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
13680 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
13681
13682 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
13683 get_call_combined_fn (t),
13684 arg0,
13685 arg1,
13686 strict_overflow_p, depth);
13687 }
13688 case COMPOUND_EXPR:
13689 case MODIFY_EXPR:
13690 return RECURSE (TREE_OPERAND (t, 1));
13691
13692 case BIND_EXPR:
13693 return RECURSE (expr_last (TREE_OPERAND (t, 1)));
13694
13695 case SAVE_EXPR:
13696 return RECURSE (TREE_OPERAND (t, 0));
13697
13698 default:
13699 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
13700 }
13701 }
13702
13703 #undef RECURSE
13704 #undef tree_expr_nonnegative_warnv_p
13705
13706 /* Return true if T is known to be non-negative. If the return
13707 value is based on the assumption that signed overflow is undefined,
13708 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13709 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13710
13711 bool
13712 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13713 {
13714 enum tree_code code;
13715 if (t == error_mark_node)
13716 return false;
13717
13718 code = TREE_CODE (t);
13719 switch (TREE_CODE_CLASS (code))
13720 {
13721 case tcc_binary:
13722 case tcc_comparison:
13723 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13724 TREE_TYPE (t),
13725 TREE_OPERAND (t, 0),
13726 TREE_OPERAND (t, 1),
13727 strict_overflow_p, depth);
13728
13729 case tcc_unary:
13730 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13731 TREE_TYPE (t),
13732 TREE_OPERAND (t, 0),
13733 strict_overflow_p, depth);
13734
13735 case tcc_constant:
13736 case tcc_declaration:
13737 case tcc_reference:
13738 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13739
13740 default:
13741 break;
13742 }
13743
13744 switch (code)
13745 {
13746 case TRUTH_AND_EXPR:
13747 case TRUTH_OR_EXPR:
13748 case TRUTH_XOR_EXPR:
13749 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13750 TREE_TYPE (t),
13751 TREE_OPERAND (t, 0),
13752 TREE_OPERAND (t, 1),
13753 strict_overflow_p, depth);
13754 case TRUTH_NOT_EXPR:
13755 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13756 TREE_TYPE (t),
13757 TREE_OPERAND (t, 0),
13758 strict_overflow_p, depth);
13759
13760 case COND_EXPR:
13761 case CONSTRUCTOR:
13762 case OBJ_TYPE_REF:
13763 case ASSERT_EXPR:
13764 case ADDR_EXPR:
13765 case WITH_SIZE_EXPR:
13766 case SSA_NAME:
13767 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13768
13769 default:
13770 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
13771 }
13772 }
13773
13774 /* Return true if `t' is known to be non-negative. Handle warnings
13775 about undefined signed overflow. */
13776
13777 bool
13778 tree_expr_nonnegative_p (tree t)
13779 {
13780 bool ret, strict_overflow_p;
13781
13782 strict_overflow_p = false;
13783 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
13784 if (strict_overflow_p)
13785 fold_overflow_warning (("assuming signed overflow does not occur when "
13786 "determining that expression is always "
13787 "non-negative"),
13788 WARN_STRICT_OVERFLOW_MISC);
13789 return ret;
13790 }
13791
13792
13793 /* Return true when (CODE OP0) is an address and is known to be nonzero.
13794 For floating point we further ensure that T is not denormal.
13795 Similar logic is present in nonzero_address in rtlanal.h.
13796
13797 If the return value is based on the assumption that signed overflow
13798 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13799 change *STRICT_OVERFLOW_P. */
13800
13801 bool
13802 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
13803 bool *strict_overflow_p)
13804 {
13805 switch (code)
13806 {
13807 case ABS_EXPR:
13808 return tree_expr_nonzero_warnv_p (op0,
13809 strict_overflow_p);
13810
13811 case NOP_EXPR:
13812 {
13813 tree inner_type = TREE_TYPE (op0);
13814 tree outer_type = type;
13815
13816 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
13817 && tree_expr_nonzero_warnv_p (op0,
13818 strict_overflow_p));
13819 }
13820 break;
13821
13822 case NON_LVALUE_EXPR:
13823 return tree_expr_nonzero_warnv_p (op0,
13824 strict_overflow_p);
13825
13826 default:
13827 break;
13828 }
13829
13830 return false;
13831 }
13832
13833 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
13834 For floating point we further ensure that T is not denormal.
13835 Similar logic is present in nonzero_address in rtlanal.h.
13836
13837 If the return value is based on the assumption that signed overflow
13838 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13839 change *STRICT_OVERFLOW_P. */
13840
13841 bool
13842 tree_binary_nonzero_warnv_p (enum tree_code code,
13843 tree type,
13844 tree op0,
13845 tree op1, bool *strict_overflow_p)
13846 {
13847 bool sub_strict_overflow_p;
13848 switch (code)
13849 {
13850 case POINTER_PLUS_EXPR:
13851 case PLUS_EXPR:
13852 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
13853 {
13854 /* With the presence of negative values it is hard
13855 to say something. */
13856 sub_strict_overflow_p = false;
13857 if (!tree_expr_nonnegative_warnv_p (op0,
13858 &sub_strict_overflow_p)
13859 || !tree_expr_nonnegative_warnv_p (op1,
13860 &sub_strict_overflow_p))
13861 return false;
13862 /* One of operands must be positive and the other non-negative. */
13863 /* We don't set *STRICT_OVERFLOW_P here: even if this value
13864 overflows, on a twos-complement machine the sum of two
13865 nonnegative numbers can never be zero. */
13866 return (tree_expr_nonzero_warnv_p (op0,
13867 strict_overflow_p)
13868 || tree_expr_nonzero_warnv_p (op1,
13869 strict_overflow_p));
13870 }
13871 break;
13872
13873 case MULT_EXPR:
13874 if (TYPE_OVERFLOW_UNDEFINED (type))
13875 {
13876 if (tree_expr_nonzero_warnv_p (op0,
13877 strict_overflow_p)
13878 && tree_expr_nonzero_warnv_p (op1,
13879 strict_overflow_p))
13880 {
13881 *strict_overflow_p = true;
13882 return true;
13883 }
13884 }
13885 break;
13886
13887 case MIN_EXPR:
13888 sub_strict_overflow_p = false;
13889 if (tree_expr_nonzero_warnv_p (op0,
13890 &sub_strict_overflow_p)
13891 && tree_expr_nonzero_warnv_p (op1,
13892 &sub_strict_overflow_p))
13893 {
13894 if (sub_strict_overflow_p)
13895 *strict_overflow_p = true;
13896 }
13897 break;
13898
13899 case MAX_EXPR:
13900 sub_strict_overflow_p = false;
13901 if (tree_expr_nonzero_warnv_p (op0,
13902 &sub_strict_overflow_p))
13903 {
13904 if (sub_strict_overflow_p)
13905 *strict_overflow_p = true;
13906
13907 /* When both operands are nonzero, then MAX must be too. */
13908 if (tree_expr_nonzero_warnv_p (op1,
13909 strict_overflow_p))
13910 return true;
13911
13912 /* MAX where operand 0 is positive is positive. */
13913 return tree_expr_nonnegative_warnv_p (op0,
13914 strict_overflow_p);
13915 }
13916 /* MAX where operand 1 is positive is positive. */
13917 else if (tree_expr_nonzero_warnv_p (op1,
13918 &sub_strict_overflow_p)
13919 && tree_expr_nonnegative_warnv_p (op1,
13920 &sub_strict_overflow_p))
13921 {
13922 if (sub_strict_overflow_p)
13923 *strict_overflow_p = true;
13924 return true;
13925 }
13926 break;
13927
13928 case BIT_IOR_EXPR:
13929 return (tree_expr_nonzero_warnv_p (op1,
13930 strict_overflow_p)
13931 || tree_expr_nonzero_warnv_p (op0,
13932 strict_overflow_p));
13933
13934 default:
13935 break;
13936 }
13937
13938 return false;
13939 }
13940
13941 /* Return true when T is an address and is known to be nonzero.
13942 For floating point we further ensure that T is not denormal.
13943 Similar logic is present in nonzero_address in rtlanal.h.
13944
13945 If the return value is based on the assumption that signed overflow
13946 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13947 change *STRICT_OVERFLOW_P. */
13948
13949 bool
13950 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
13951 {
13952 bool sub_strict_overflow_p;
13953 switch (TREE_CODE (t))
13954 {
13955 case INTEGER_CST:
13956 return !integer_zerop (t);
13957
13958 case ADDR_EXPR:
13959 {
13960 tree base = TREE_OPERAND (t, 0);
13961
13962 if (!DECL_P (base))
13963 base = get_base_address (base);
13964
13965 if (base && TREE_CODE (base) == TARGET_EXPR)
13966 base = TARGET_EXPR_SLOT (base);
13967
13968 if (!base)
13969 return false;
13970
13971 /* For objects in symbol table check if we know they are non-zero.
13972 Don't do anything for variables and functions before symtab is built;
13973 it is quite possible that they will be declared weak later. */
13974 int nonzero_addr = maybe_nonzero_address (base);
13975 if (nonzero_addr >= 0)
13976 return nonzero_addr;
13977
13978 /* Constants are never weak. */
13979 if (CONSTANT_CLASS_P (base))
13980 return true;
13981
13982 return false;
13983 }
13984
13985 case COND_EXPR:
13986 sub_strict_overflow_p = false;
13987 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13988 &sub_strict_overflow_p)
13989 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
13990 &sub_strict_overflow_p))
13991 {
13992 if (sub_strict_overflow_p)
13993 *strict_overflow_p = true;
13994 return true;
13995 }
13996 break;
13997
13998 case SSA_NAME:
13999 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
14000 break;
14001 return expr_not_equal_to (t, wi::zero (TYPE_PRECISION (TREE_TYPE (t))));
14002
14003 default:
14004 break;
14005 }
14006 return false;
14007 }
14008
14009 #define integer_valued_real_p(X) \
14010 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
14011
14012 #define RECURSE(X) \
14013 ((integer_valued_real_p) (X, depth + 1))
14014
14015 /* Return true if the floating point result of (CODE OP0) has an
14016 integer value. We also allow +Inf, -Inf and NaN to be considered
14017 integer values. Return false for signaling NaN.
14018
14019 DEPTH is the current nesting depth of the query. */
14020
14021 bool
14022 integer_valued_real_unary_p (tree_code code, tree op0, int depth)
14023 {
14024 switch (code)
14025 {
14026 case FLOAT_EXPR:
14027 return true;
14028
14029 case ABS_EXPR:
14030 return RECURSE (op0);
14031
14032 CASE_CONVERT:
14033 {
14034 tree type = TREE_TYPE (op0);
14035 if (TREE_CODE (type) == INTEGER_TYPE)
14036 return true;
14037 if (TREE_CODE (type) == REAL_TYPE)
14038 return RECURSE (op0);
14039 break;
14040 }
14041
14042 default:
14043 break;
14044 }
14045 return false;
14046 }
14047
14048 /* Return true if the floating point result of (CODE OP0 OP1) has an
14049 integer value. We also allow +Inf, -Inf and NaN to be considered
14050 integer values. Return false for signaling NaN.
14051
14052 DEPTH is the current nesting depth of the query. */
14053
14054 bool
14055 integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth)
14056 {
14057 switch (code)
14058 {
14059 case PLUS_EXPR:
14060 case MINUS_EXPR:
14061 case MULT_EXPR:
14062 case MIN_EXPR:
14063 case MAX_EXPR:
14064 return RECURSE (op0) && RECURSE (op1);
14065
14066 default:
14067 break;
14068 }
14069 return false;
14070 }
14071
14072 /* Return true if the floating point result of calling FNDECL with arguments
14073 ARG0 and ARG1 has an integer value. We also allow +Inf, -Inf and NaN to be
14074 considered integer values. Return false for signaling NaN. If FNDECL
14075 takes fewer than 2 arguments, the remaining ARGn are null.
14076
14077 DEPTH is the current nesting depth of the query. */
14078
14079 bool
14080 integer_valued_real_call_p (combined_fn fn, tree arg0, tree arg1, int depth)
14081 {
14082 switch (fn)
14083 {
14084 CASE_CFN_CEIL:
14085 CASE_CFN_CEIL_FN:
14086 CASE_CFN_FLOOR:
14087 CASE_CFN_FLOOR_FN:
14088 CASE_CFN_NEARBYINT:
14089 CASE_CFN_NEARBYINT_FN:
14090 CASE_CFN_RINT:
14091 CASE_CFN_RINT_FN:
14092 CASE_CFN_ROUND:
14093 CASE_CFN_ROUND_FN:
14094 CASE_CFN_ROUNDEVEN:
14095 CASE_CFN_ROUNDEVEN_FN:
14096 CASE_CFN_TRUNC:
14097 CASE_CFN_TRUNC_FN:
14098 return true;
14099
14100 CASE_CFN_FMIN:
14101 CASE_CFN_FMIN_FN:
14102 CASE_CFN_FMAX:
14103 CASE_CFN_FMAX_FN:
14104 return RECURSE (arg0) && RECURSE (arg1);
14105
14106 default:
14107 break;
14108 }
14109 return false;
14110 }
14111
14112 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
14113 has an integer value. We also allow +Inf, -Inf and NaN to be
14114 considered integer values. Return false for signaling NaN.
14115
14116 DEPTH is the current nesting depth of the query. */
14117
14118 bool
14119 integer_valued_real_single_p (tree t, int depth)
14120 {
14121 switch (TREE_CODE (t))
14122 {
14123 case REAL_CST:
14124 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
14125
14126 case COND_EXPR:
14127 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
14128
14129 case SSA_NAME:
14130 /* Limit the depth of recursion to avoid quadratic behavior.
14131 This is expected to catch almost all occurrences in practice.
14132 If this code misses important cases that unbounded recursion
14133 would not, passes that need this information could be revised
14134 to provide it through dataflow propagation. */
14135 return (!name_registered_for_update_p (t)
14136 && depth < param_max_ssa_name_query_depth
14137 && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t),
14138 depth));
14139
14140 default:
14141 break;
14142 }
14143 return false;
14144 }
14145
14146 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
14147 has an integer value. We also allow +Inf, -Inf and NaN to be
14148 considered integer values. Return false for signaling NaN.
14149
14150 DEPTH is the current nesting depth of the query. */
14151
14152 static bool
14153 integer_valued_real_invalid_p (tree t, int depth)
14154 {
14155 switch (TREE_CODE (t))
14156 {
14157 case COMPOUND_EXPR:
14158 case MODIFY_EXPR:
14159 case BIND_EXPR:
14160 return RECURSE (TREE_OPERAND (t, 1));
14161
14162 case SAVE_EXPR:
14163 return RECURSE (TREE_OPERAND (t, 0));
14164
14165 default:
14166 break;
14167 }
14168 return false;
14169 }
14170
14171 #undef RECURSE
14172 #undef integer_valued_real_p
14173
14174 /* Return true if the floating point expression T has an integer value.
14175 We also allow +Inf, -Inf and NaN to be considered integer values.
14176 Return false for signaling NaN.
14177
14178 DEPTH is the current nesting depth of the query. */
14179
14180 bool
14181 integer_valued_real_p (tree t, int depth)
14182 {
14183 if (t == error_mark_node)
14184 return false;
14185
14186 STRIP_ANY_LOCATION_WRAPPER (t);
14187
14188 tree_code code = TREE_CODE (t);
14189 switch (TREE_CODE_CLASS (code))
14190 {
14191 case tcc_binary:
14192 case tcc_comparison:
14193 return integer_valued_real_binary_p (code, TREE_OPERAND (t, 0),
14194 TREE_OPERAND (t, 1), depth);
14195
14196 case tcc_unary:
14197 return integer_valued_real_unary_p (code, TREE_OPERAND (t, 0), depth);
14198
14199 case tcc_constant:
14200 case tcc_declaration:
14201 case tcc_reference:
14202 return integer_valued_real_single_p (t, depth);
14203
14204 default:
14205 break;
14206 }
14207
14208 switch (code)
14209 {
14210 case COND_EXPR:
14211 case SSA_NAME:
14212 return integer_valued_real_single_p (t, depth);
14213
14214 case CALL_EXPR:
14215 {
14216 tree arg0 = (call_expr_nargs (t) > 0
14217 ? CALL_EXPR_ARG (t, 0)
14218 : NULL_TREE);
14219 tree arg1 = (call_expr_nargs (t) > 1
14220 ? CALL_EXPR_ARG (t, 1)
14221 : NULL_TREE);
14222 return integer_valued_real_call_p (get_call_combined_fn (t),
14223 arg0, arg1, depth);
14224 }
14225
14226 default:
14227 return integer_valued_real_invalid_p (t, depth);
14228 }
14229 }
14230
14231 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
14232 attempt to fold the expression to a constant without modifying TYPE,
14233 OP0 or OP1.
14234
14235 If the expression could be simplified to a constant, then return
14236 the constant. If the expression would not be simplified to a
14237 constant, then return NULL_TREE. */
14238
14239 tree
14240 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
14241 {
14242 tree tem = fold_binary (code, type, op0, op1);
14243 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
14244 }
14245
14246 /* Given the components of a unary expression CODE, TYPE and OP0,
14247 attempt to fold the expression to a constant without modifying
14248 TYPE or OP0.
14249
14250 If the expression could be simplified to a constant, then return
14251 the constant. If the expression would not be simplified to a
14252 constant, then return NULL_TREE. */
14253
14254 tree
14255 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
14256 {
14257 tree tem = fold_unary (code, type, op0);
14258 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
14259 }
14260
14261 /* If EXP represents referencing an element in a constant string
14262 (either via pointer arithmetic or array indexing), return the
14263 tree representing the value accessed, otherwise return NULL. */
14264
14265 tree
14266 fold_read_from_constant_string (tree exp)
14267 {
14268 if ((TREE_CODE (exp) == INDIRECT_REF
14269 || TREE_CODE (exp) == ARRAY_REF)
14270 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
14271 {
14272 tree exp1 = TREE_OPERAND (exp, 0);
14273 tree index;
14274 tree string;
14275 location_t loc = EXPR_LOCATION (exp);
14276
14277 if (TREE_CODE (exp) == INDIRECT_REF)
14278 string = string_constant (exp1, &index, NULL, NULL);
14279 else
14280 {
14281 tree low_bound = array_ref_low_bound (exp);
14282 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
14283
14284 /* Optimize the special-case of a zero lower bound.
14285
14286 We convert the low_bound to sizetype to avoid some problems
14287 with constant folding. (E.g. suppose the lower bound is 1,
14288 and its mode is QI. Without the conversion,l (ARRAY
14289 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
14290 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
14291 if (! integer_zerop (low_bound))
14292 index = size_diffop_loc (loc, index,
14293 fold_convert_loc (loc, sizetype, low_bound));
14294
14295 string = exp1;
14296 }
14297
14298 scalar_int_mode char_mode;
14299 if (string
14300 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
14301 && TREE_CODE (string) == STRING_CST
14302 && TREE_CODE (index) == INTEGER_CST
14303 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
14304 && is_int_mode (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))),
14305 &char_mode)
14306 && GET_MODE_SIZE (char_mode) == 1)
14307 return build_int_cst_type (TREE_TYPE (exp),
14308 (TREE_STRING_POINTER (string)
14309 [TREE_INT_CST_LOW (index)]));
14310 }
14311 return NULL;
14312 }
14313
14314 /* Folds a read from vector element at IDX of vector ARG. */
14315
14316 tree
14317 fold_read_from_vector (tree arg, poly_uint64 idx)
14318 {
14319 unsigned HOST_WIDE_INT i;
14320 if (known_lt (idx, TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)))
14321 && known_ge (idx, 0u)
14322 && idx.is_constant (&i))
14323 {
14324 if (TREE_CODE (arg) == VECTOR_CST)
14325 return VECTOR_CST_ELT (arg, i);
14326 else if (TREE_CODE (arg) == CONSTRUCTOR)
14327 {
14328 if (i >= CONSTRUCTOR_NELTS (arg))
14329 return build_zero_cst (TREE_TYPE (TREE_TYPE (arg)));
14330 return CONSTRUCTOR_ELT (arg, i)->value;
14331 }
14332 }
14333 return NULL_TREE;
14334 }
14335
14336 /* Return the tree for neg (ARG0) when ARG0 is known to be either
14337 an integer constant, real, or fixed-point constant.
14338
14339 TYPE is the type of the result. */
14340
14341 static tree
14342 fold_negate_const (tree arg0, tree type)
14343 {
14344 tree t = NULL_TREE;
14345
14346 switch (TREE_CODE (arg0))
14347 {
14348 case REAL_CST:
14349 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
14350 break;
14351
14352 case FIXED_CST:
14353 {
14354 FIXED_VALUE_TYPE f;
14355 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
14356 &(TREE_FIXED_CST (arg0)), NULL,
14357 TYPE_SATURATING (type));
14358 t = build_fixed (type, f);
14359 /* Propagate overflow flags. */
14360 if (overflow_p | TREE_OVERFLOW (arg0))
14361 TREE_OVERFLOW (t) = 1;
14362 break;
14363 }
14364
14365 default:
14366 if (poly_int_tree_p (arg0))
14367 {
14368 wi::overflow_type overflow;
14369 poly_wide_int res = wi::neg (wi::to_poly_wide (arg0), &overflow);
14370 t = force_fit_type (type, res, 1,
14371 (overflow && ! TYPE_UNSIGNED (type))
14372 || TREE_OVERFLOW (arg0));
14373 break;
14374 }
14375
14376 gcc_unreachable ();
14377 }
14378
14379 return t;
14380 }
14381
14382 /* Return the tree for abs (ARG0) when ARG0 is known to be either
14383 an integer constant or real constant.
14384
14385 TYPE is the type of the result. */
14386
14387 tree
14388 fold_abs_const (tree arg0, tree type)
14389 {
14390 tree t = NULL_TREE;
14391
14392 switch (TREE_CODE (arg0))
14393 {
14394 case INTEGER_CST:
14395 {
14396 /* If the value is unsigned or non-negative, then the absolute value
14397 is the same as the ordinary value. */
14398 wide_int val = wi::to_wide (arg0);
14399 wi::overflow_type overflow = wi::OVF_NONE;
14400 if (!wi::neg_p (val, TYPE_SIGN (TREE_TYPE (arg0))))
14401 ;
14402
14403 /* If the value is negative, then the absolute value is
14404 its negation. */
14405 else
14406 val = wi::neg (val, &overflow);
14407
14408 /* Force to the destination type, set TREE_OVERFLOW for signed
14409 TYPE only. */
14410 t = force_fit_type (type, val, 1, overflow | TREE_OVERFLOW (arg0));
14411 }
14412 break;
14413
14414 case REAL_CST:
14415 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
14416 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
14417 else
14418 t = arg0;
14419 break;
14420
14421 default:
14422 gcc_unreachable ();
14423 }
14424
14425 return t;
14426 }
14427
14428 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
14429 constant. TYPE is the type of the result. */
14430
14431 static tree
14432 fold_not_const (const_tree arg0, tree type)
14433 {
14434 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
14435
14436 return force_fit_type (type, ~wi::to_wide (arg0), 0, TREE_OVERFLOW (arg0));
14437 }
14438
14439 /* Given CODE, a relational operator, the target type, TYPE and two
14440 constant operands OP0 and OP1, return the result of the
14441 relational operation. If the result is not a compile time
14442 constant, then return NULL_TREE. */
14443
14444 static tree
14445 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
14446 {
14447 int result, invert;
14448
14449 /* From here on, the only cases we handle are when the result is
14450 known to be a constant. */
14451
14452 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
14453 {
14454 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
14455 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
14456
14457 /* Handle the cases where either operand is a NaN. */
14458 if (real_isnan (c0) || real_isnan (c1))
14459 {
14460 switch (code)
14461 {
14462 case EQ_EXPR:
14463 case ORDERED_EXPR:
14464 result = 0;
14465 break;
14466
14467 case NE_EXPR:
14468 case UNORDERED_EXPR:
14469 case UNLT_EXPR:
14470 case UNLE_EXPR:
14471 case UNGT_EXPR:
14472 case UNGE_EXPR:
14473 case UNEQ_EXPR:
14474 result = 1;
14475 break;
14476
14477 case LT_EXPR:
14478 case LE_EXPR:
14479 case GT_EXPR:
14480 case GE_EXPR:
14481 case LTGT_EXPR:
14482 if (flag_trapping_math)
14483 return NULL_TREE;
14484 result = 0;
14485 break;
14486
14487 default:
14488 gcc_unreachable ();
14489 }
14490
14491 return constant_boolean_node (result, type);
14492 }
14493
14494 return constant_boolean_node (real_compare (code, c0, c1), type);
14495 }
14496
14497 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
14498 {
14499 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
14500 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
14501 return constant_boolean_node (fixed_compare (code, c0, c1), type);
14502 }
14503
14504 /* Handle equality/inequality of complex constants. */
14505 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
14506 {
14507 tree rcond = fold_relational_const (code, type,
14508 TREE_REALPART (op0),
14509 TREE_REALPART (op1));
14510 tree icond = fold_relational_const (code, type,
14511 TREE_IMAGPART (op0),
14512 TREE_IMAGPART (op1));
14513 if (code == EQ_EXPR)
14514 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
14515 else if (code == NE_EXPR)
14516 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
14517 else
14518 return NULL_TREE;
14519 }
14520
14521 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
14522 {
14523 if (!VECTOR_TYPE_P (type))
14524 {
14525 /* Have vector comparison with scalar boolean result. */
14526 gcc_assert ((code == EQ_EXPR || code == NE_EXPR)
14527 && known_eq (VECTOR_CST_NELTS (op0),
14528 VECTOR_CST_NELTS (op1)));
14529 unsigned HOST_WIDE_INT nunits;
14530 if (!VECTOR_CST_NELTS (op0).is_constant (&nunits))
14531 return NULL_TREE;
14532 for (unsigned i = 0; i < nunits; i++)
14533 {
14534 tree elem0 = VECTOR_CST_ELT (op0, i);
14535 tree elem1 = VECTOR_CST_ELT (op1, i);
14536 tree tmp = fold_relational_const (EQ_EXPR, type, elem0, elem1);
14537 if (tmp == NULL_TREE)
14538 return NULL_TREE;
14539 if (integer_zerop (tmp))
14540 return constant_boolean_node (code == NE_EXPR, type);
14541 }
14542 return constant_boolean_node (code == EQ_EXPR, type);
14543 }
14544 tree_vector_builder elts;
14545 if (!elts.new_binary_operation (type, op0, op1, false))
14546 return NULL_TREE;
14547 unsigned int count = elts.encoded_nelts ();
14548 for (unsigned i = 0; i < count; i++)
14549 {
14550 tree elem_type = TREE_TYPE (type);
14551 tree elem0 = VECTOR_CST_ELT (op0, i);
14552 tree elem1 = VECTOR_CST_ELT (op1, i);
14553
14554 tree tem = fold_relational_const (code, elem_type,
14555 elem0, elem1);
14556
14557 if (tem == NULL_TREE)
14558 return NULL_TREE;
14559
14560 elts.quick_push (build_int_cst (elem_type,
14561 integer_zerop (tem) ? 0 : -1));
14562 }
14563
14564 return elts.build ();
14565 }
14566
14567 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
14568
14569 To compute GT, swap the arguments and do LT.
14570 To compute GE, do LT and invert the result.
14571 To compute LE, swap the arguments, do LT and invert the result.
14572 To compute NE, do EQ and invert the result.
14573
14574 Therefore, the code below must handle only EQ and LT. */
14575
14576 if (code == LE_EXPR || code == GT_EXPR)
14577 {
14578 std::swap (op0, op1);
14579 code = swap_tree_comparison (code);
14580 }
14581
14582 /* Note that it is safe to invert for real values here because we
14583 have already handled the one case that it matters. */
14584
14585 invert = 0;
14586 if (code == NE_EXPR || code == GE_EXPR)
14587 {
14588 invert = 1;
14589 code = invert_tree_comparison (code, false);
14590 }
14591
14592 /* Compute a result for LT or EQ if args permit;
14593 Otherwise return T. */
14594 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
14595 {
14596 if (code == EQ_EXPR)
14597 result = tree_int_cst_equal (op0, op1);
14598 else
14599 result = tree_int_cst_lt (op0, op1);
14600 }
14601 else
14602 return NULL_TREE;
14603
14604 if (invert)
14605 result ^= 1;
14606 return constant_boolean_node (result, type);
14607 }
14608
14609 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
14610 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
14611 itself. */
14612
14613 tree
14614 fold_build_cleanup_point_expr (tree type, tree expr)
14615 {
14616 /* If the expression does not have side effects then we don't have to wrap
14617 it with a cleanup point expression. */
14618 if (!TREE_SIDE_EFFECTS (expr))
14619 return expr;
14620
14621 /* If the expression is a return, check to see if the expression inside the
14622 return has no side effects or the right hand side of the modify expression
14623 inside the return. If either don't have side effects set we don't need to
14624 wrap the expression in a cleanup point expression. Note we don't check the
14625 left hand side of the modify because it should always be a return decl. */
14626 if (TREE_CODE (expr) == RETURN_EXPR)
14627 {
14628 tree op = TREE_OPERAND (expr, 0);
14629 if (!op || !TREE_SIDE_EFFECTS (op))
14630 return expr;
14631 op = TREE_OPERAND (op, 1);
14632 if (!TREE_SIDE_EFFECTS (op))
14633 return expr;
14634 }
14635
14636 return build1_loc (EXPR_LOCATION (expr), CLEANUP_POINT_EXPR, type, expr);
14637 }
14638
14639 /* Given a pointer value OP0 and a type TYPE, return a simplified version
14640 of an indirection through OP0, or NULL_TREE if no simplification is
14641 possible. */
14642
14643 tree
14644 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
14645 {
14646 tree sub = op0;
14647 tree subtype;
14648 poly_uint64 const_op01;
14649
14650 STRIP_NOPS (sub);
14651 subtype = TREE_TYPE (sub);
14652 if (!POINTER_TYPE_P (subtype)
14653 || TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (op0)))
14654 return NULL_TREE;
14655
14656 if (TREE_CODE (sub) == ADDR_EXPR)
14657 {
14658 tree op = TREE_OPERAND (sub, 0);
14659 tree optype = TREE_TYPE (op);
14660
14661 /* *&CONST_DECL -> to the value of the const decl. */
14662 if (TREE_CODE (op) == CONST_DECL)
14663 return DECL_INITIAL (op);
14664 /* *&p => p; make sure to handle *&"str"[cst] here. */
14665 if (type == optype)
14666 {
14667 tree fop = fold_read_from_constant_string (op);
14668 if (fop)
14669 return fop;
14670 else
14671 return op;
14672 }
14673 /* *(foo *)&fooarray => fooarray[0] */
14674 else if (TREE_CODE (optype) == ARRAY_TYPE
14675 && type == TREE_TYPE (optype)
14676 && (!in_gimple_form
14677 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14678 {
14679 tree type_domain = TYPE_DOMAIN (optype);
14680 tree min_val = size_zero_node;
14681 if (type_domain && TYPE_MIN_VALUE (type_domain))
14682 min_val = TYPE_MIN_VALUE (type_domain);
14683 if (in_gimple_form
14684 && TREE_CODE (min_val) != INTEGER_CST)
14685 return NULL_TREE;
14686 return build4_loc (loc, ARRAY_REF, type, op, min_val,
14687 NULL_TREE, NULL_TREE);
14688 }
14689 /* *(foo *)&complexfoo => __real__ complexfoo */
14690 else if (TREE_CODE (optype) == COMPLEX_TYPE
14691 && type == TREE_TYPE (optype))
14692 return fold_build1_loc (loc, REALPART_EXPR, type, op);
14693 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
14694 else if (VECTOR_TYPE_P (optype)
14695 && type == TREE_TYPE (optype))
14696 {
14697 tree part_width = TYPE_SIZE (type);
14698 tree index = bitsize_int (0);
14699 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width,
14700 index);
14701 }
14702 }
14703
14704 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
14705 && poly_int_tree_p (TREE_OPERAND (sub, 1), &const_op01))
14706 {
14707 tree op00 = TREE_OPERAND (sub, 0);
14708 tree op01 = TREE_OPERAND (sub, 1);
14709
14710 STRIP_NOPS (op00);
14711 if (TREE_CODE (op00) == ADDR_EXPR)
14712 {
14713 tree op00type;
14714 op00 = TREE_OPERAND (op00, 0);
14715 op00type = TREE_TYPE (op00);
14716
14717 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
14718 if (VECTOR_TYPE_P (op00type)
14719 && type == TREE_TYPE (op00type)
14720 /* POINTER_PLUS_EXPR second operand is sizetype, unsigned,
14721 but we want to treat offsets with MSB set as negative.
14722 For the code below negative offsets are invalid and
14723 TYPE_SIZE of the element is something unsigned, so
14724 check whether op01 fits into poly_int64, which implies
14725 it is from 0 to INTTYPE_MAXIMUM (HOST_WIDE_INT), and
14726 then just use poly_uint64 because we want to treat the
14727 value as unsigned. */
14728 && tree_fits_poly_int64_p (op01))
14729 {
14730 tree part_width = TYPE_SIZE (type);
14731 poly_uint64 max_offset
14732 = (tree_to_uhwi (part_width) / BITS_PER_UNIT
14733 * TYPE_VECTOR_SUBPARTS (op00type));
14734 if (known_lt (const_op01, max_offset))
14735 {
14736 tree index = bitsize_int (const_op01 * BITS_PER_UNIT);
14737 return fold_build3_loc (loc,
14738 BIT_FIELD_REF, type, op00,
14739 part_width, index);
14740 }
14741 }
14742 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
14743 else if (TREE_CODE (op00type) == COMPLEX_TYPE
14744 && type == TREE_TYPE (op00type))
14745 {
14746 if (known_eq (wi::to_poly_offset (TYPE_SIZE_UNIT (type)),
14747 const_op01))
14748 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
14749 }
14750 /* ((foo *)&fooarray)[1] => fooarray[1] */
14751 else if (TREE_CODE (op00type) == ARRAY_TYPE
14752 && type == TREE_TYPE (op00type))
14753 {
14754 tree type_domain = TYPE_DOMAIN (op00type);
14755 tree min_val = size_zero_node;
14756 if (type_domain && TYPE_MIN_VALUE (type_domain))
14757 min_val = TYPE_MIN_VALUE (type_domain);
14758 poly_uint64 type_size, index;
14759 if (poly_int_tree_p (min_val)
14760 && poly_int_tree_p (TYPE_SIZE_UNIT (type), &type_size)
14761 && multiple_p (const_op01, type_size, &index))
14762 {
14763 poly_offset_int off = index + wi::to_poly_offset (min_val);
14764 op01 = wide_int_to_tree (sizetype, off);
14765 return build4_loc (loc, ARRAY_REF, type, op00, op01,
14766 NULL_TREE, NULL_TREE);
14767 }
14768 }
14769 }
14770 }
14771
14772 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
14773 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
14774 && type == TREE_TYPE (TREE_TYPE (subtype))
14775 && (!in_gimple_form
14776 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14777 {
14778 tree type_domain;
14779 tree min_val = size_zero_node;
14780 sub = build_fold_indirect_ref_loc (loc, sub);
14781 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
14782 if (type_domain && TYPE_MIN_VALUE (type_domain))
14783 min_val = TYPE_MIN_VALUE (type_domain);
14784 if (in_gimple_form
14785 && TREE_CODE (min_val) != INTEGER_CST)
14786 return NULL_TREE;
14787 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
14788 NULL_TREE);
14789 }
14790
14791 return NULL_TREE;
14792 }
14793
14794 /* Builds an expression for an indirection through T, simplifying some
14795 cases. */
14796
14797 tree
14798 build_fold_indirect_ref_loc (location_t loc, tree t)
14799 {
14800 tree type = TREE_TYPE (TREE_TYPE (t));
14801 tree sub = fold_indirect_ref_1 (loc, type, t);
14802
14803 if (sub)
14804 return sub;
14805
14806 return build1_loc (loc, INDIRECT_REF, type, t);
14807 }
14808
14809 /* Given an INDIRECT_REF T, return either T or a simplified version. */
14810
14811 tree
14812 fold_indirect_ref_loc (location_t loc, tree t)
14813 {
14814 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
14815
14816 if (sub)
14817 return sub;
14818 else
14819 return t;
14820 }
14821
14822 /* Strip non-trapping, non-side-effecting tree nodes from an expression
14823 whose result is ignored. The type of the returned tree need not be
14824 the same as the original expression. */
14825
14826 tree
14827 fold_ignored_result (tree t)
14828 {
14829 if (!TREE_SIDE_EFFECTS (t))
14830 return integer_zero_node;
14831
14832 for (;;)
14833 switch (TREE_CODE_CLASS (TREE_CODE (t)))
14834 {
14835 case tcc_unary:
14836 t = TREE_OPERAND (t, 0);
14837 break;
14838
14839 case tcc_binary:
14840 case tcc_comparison:
14841 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14842 t = TREE_OPERAND (t, 0);
14843 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
14844 t = TREE_OPERAND (t, 1);
14845 else
14846 return t;
14847 break;
14848
14849 case tcc_expression:
14850 switch (TREE_CODE (t))
14851 {
14852 case COMPOUND_EXPR:
14853 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14854 return t;
14855 t = TREE_OPERAND (t, 0);
14856 break;
14857
14858 case COND_EXPR:
14859 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
14860 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
14861 return t;
14862 t = TREE_OPERAND (t, 0);
14863 break;
14864
14865 default:
14866 return t;
14867 }
14868 break;
14869
14870 default:
14871 return t;
14872 }
14873 }
14874
14875 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
14876
14877 tree
14878 round_up_loc (location_t loc, tree value, unsigned int divisor)
14879 {
14880 tree div = NULL_TREE;
14881
14882 if (divisor == 1)
14883 return value;
14884
14885 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14886 have to do anything. Only do this when we are not given a const,
14887 because in that case, this check is more expensive than just
14888 doing it. */
14889 if (TREE_CODE (value) != INTEGER_CST)
14890 {
14891 div = build_int_cst (TREE_TYPE (value), divisor);
14892
14893 if (multiple_of_p (TREE_TYPE (value), value, div))
14894 return value;
14895 }
14896
14897 /* If divisor is a power of two, simplify this to bit manipulation. */
14898 if (pow2_or_zerop (divisor))
14899 {
14900 if (TREE_CODE (value) == INTEGER_CST)
14901 {
14902 wide_int val = wi::to_wide (value);
14903 bool overflow_p;
14904
14905 if ((val & (divisor - 1)) == 0)
14906 return value;
14907
14908 overflow_p = TREE_OVERFLOW (value);
14909 val += divisor - 1;
14910 val &= (int) -divisor;
14911 if (val == 0)
14912 overflow_p = true;
14913
14914 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
14915 }
14916 else
14917 {
14918 tree t;
14919
14920 t = build_int_cst (TREE_TYPE (value), divisor - 1);
14921 value = size_binop_loc (loc, PLUS_EXPR, value, t);
14922 t = build_int_cst (TREE_TYPE (value), - (int) divisor);
14923 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14924 }
14925 }
14926 else
14927 {
14928 if (!div)
14929 div = build_int_cst (TREE_TYPE (value), divisor);
14930 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
14931 value = size_binop_loc (loc, MULT_EXPR, value, div);
14932 }
14933
14934 return value;
14935 }
14936
14937 /* Likewise, but round down. */
14938
14939 tree
14940 round_down_loc (location_t loc, tree value, int divisor)
14941 {
14942 tree div = NULL_TREE;
14943
14944 gcc_assert (divisor > 0);
14945 if (divisor == 1)
14946 return value;
14947
14948 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14949 have to do anything. Only do this when we are not given a const,
14950 because in that case, this check is more expensive than just
14951 doing it. */
14952 if (TREE_CODE (value) != INTEGER_CST)
14953 {
14954 div = build_int_cst (TREE_TYPE (value), divisor);
14955
14956 if (multiple_of_p (TREE_TYPE (value), value, div))
14957 return value;
14958 }
14959
14960 /* If divisor is a power of two, simplify this to bit manipulation. */
14961 if (pow2_or_zerop (divisor))
14962 {
14963 tree t;
14964
14965 t = build_int_cst (TREE_TYPE (value), -divisor);
14966 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14967 }
14968 else
14969 {
14970 if (!div)
14971 div = build_int_cst (TREE_TYPE (value), divisor);
14972 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
14973 value = size_binop_loc (loc, MULT_EXPR, value, div);
14974 }
14975
14976 return value;
14977 }
14978
14979 /* Returns the pointer to the base of the object addressed by EXP and
14980 extracts the information about the offset of the access, storing it
14981 to PBITPOS and POFFSET. */
14982
14983 static tree
14984 split_address_to_core_and_offset (tree exp,
14985 poly_int64_pod *pbitpos, tree *poffset)
14986 {
14987 tree core;
14988 machine_mode mode;
14989 int unsignedp, reversep, volatilep;
14990 poly_int64 bitsize;
14991 location_t loc = EXPR_LOCATION (exp);
14992
14993 if (TREE_CODE (exp) == ADDR_EXPR)
14994 {
14995 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
14996 poffset, &mode, &unsignedp, &reversep,
14997 &volatilep);
14998 core = build_fold_addr_expr_loc (loc, core);
14999 }
15000 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
15001 {
15002 core = TREE_OPERAND (exp, 0);
15003 STRIP_NOPS (core);
15004 *pbitpos = 0;
15005 *poffset = TREE_OPERAND (exp, 1);
15006 if (poly_int_tree_p (*poffset))
15007 {
15008 poly_offset_int tem
15009 = wi::sext (wi::to_poly_offset (*poffset),
15010 TYPE_PRECISION (TREE_TYPE (*poffset)));
15011 tem <<= LOG2_BITS_PER_UNIT;
15012 if (tem.to_shwi (pbitpos))
15013 *poffset = NULL_TREE;
15014 }
15015 }
15016 else
15017 {
15018 core = exp;
15019 *pbitpos = 0;
15020 *poffset = NULL_TREE;
15021 }
15022
15023 return core;
15024 }
15025
15026 /* Returns true if addresses of E1 and E2 differ by a constant, false
15027 otherwise. If they do, E1 - E2 is stored in *DIFF. */
15028
15029 bool
15030 ptr_difference_const (tree e1, tree e2, poly_int64_pod *diff)
15031 {
15032 tree core1, core2;
15033 poly_int64 bitpos1, bitpos2;
15034 tree toffset1, toffset2, tdiff, type;
15035
15036 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
15037 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
15038
15039 poly_int64 bytepos1, bytepos2;
15040 if (!multiple_p (bitpos1, BITS_PER_UNIT, &bytepos1)
15041 || !multiple_p (bitpos2, BITS_PER_UNIT, &bytepos2)
15042 || !operand_equal_p (core1, core2, 0))
15043 return false;
15044
15045 if (toffset1 && toffset2)
15046 {
15047 type = TREE_TYPE (toffset1);
15048 if (type != TREE_TYPE (toffset2))
15049 toffset2 = fold_convert (type, toffset2);
15050
15051 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
15052 if (!cst_and_fits_in_hwi (tdiff))
15053 return false;
15054
15055 *diff = int_cst_value (tdiff);
15056 }
15057 else if (toffset1 || toffset2)
15058 {
15059 /* If only one of the offsets is non-constant, the difference cannot
15060 be a constant. */
15061 return false;
15062 }
15063 else
15064 *diff = 0;
15065
15066 *diff += bytepos1 - bytepos2;
15067 return true;
15068 }
15069
15070 /* Return OFF converted to a pointer offset type suitable as offset for
15071 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
15072 tree
15073 convert_to_ptrofftype_loc (location_t loc, tree off)
15074 {
15075 return fold_convert_loc (loc, sizetype, off);
15076 }
15077
15078 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
15079 tree
15080 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
15081 {
15082 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
15083 ptr, convert_to_ptrofftype_loc (loc, off));
15084 }
15085
15086 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
15087 tree
15088 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
15089 {
15090 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
15091 ptr, size_int (off));
15092 }
15093
15094 /* Return a pointer P to a NUL-terminated string representing the sequence
15095 of constant characters referred to by SRC (or a subsequence of such
15096 characters within it if SRC is a reference to a string plus some
15097 constant offset). If STRLEN is non-null, store the number of bytes
15098 in the string constant including the terminating NUL char. *STRLEN is
15099 typically strlen(P) + 1 in the absence of embedded NUL characters. */
15100
15101 const char *
15102 c_getstr (tree src, unsigned HOST_WIDE_INT *strlen /* = NULL */)
15103 {
15104 tree offset_node;
15105 tree mem_size;
15106
15107 if (strlen)
15108 *strlen = 0;
15109
15110 src = string_constant (src, &offset_node, &mem_size, NULL);
15111 if (src == 0)
15112 return NULL;
15113
15114 unsigned HOST_WIDE_INT offset = 0;
15115 if (offset_node != NULL_TREE)
15116 {
15117 if (!tree_fits_uhwi_p (offset_node))
15118 return NULL;
15119 else
15120 offset = tree_to_uhwi (offset_node);
15121 }
15122
15123 if (!tree_fits_uhwi_p (mem_size))
15124 return NULL;
15125
15126 /* STRING_LENGTH is the size of the string literal, including any
15127 embedded NULs. STRING_SIZE is the size of the array the string
15128 literal is stored in. */
15129 unsigned HOST_WIDE_INT string_length = TREE_STRING_LENGTH (src);
15130 unsigned HOST_WIDE_INT string_size = tree_to_uhwi (mem_size);
15131
15132 /* Ideally this would turn into a gcc_checking_assert over time. */
15133 if (string_length > string_size)
15134 string_length = string_size;
15135
15136 const char *string = TREE_STRING_POINTER (src);
15137
15138 /* Ideally this would turn into a gcc_checking_assert over time. */
15139 if (string_length > string_size)
15140 string_length = string_size;
15141
15142 if (string_length == 0
15143 || offset >= string_size)
15144 return NULL;
15145
15146 if (strlen)
15147 {
15148 /* Compute and store the length of the substring at OFFSET.
15149 All offsets past the initial length refer to null strings. */
15150 if (offset < string_length)
15151 *strlen = string_length - offset;
15152 else
15153 *strlen = 1;
15154 }
15155 else
15156 {
15157 tree eltype = TREE_TYPE (TREE_TYPE (src));
15158 /* Support only properly NUL-terminated single byte strings. */
15159 if (tree_to_uhwi (TYPE_SIZE_UNIT (eltype)) != 1)
15160 return NULL;
15161 if (string[string_length - 1] != '\0')
15162 return NULL;
15163 }
15164
15165 return offset < string_length ? string + offset : "";
15166 }
15167
15168 /* Given a tree T, compute which bits in T may be nonzero. */
15169
15170 wide_int
15171 tree_nonzero_bits (const_tree t)
15172 {
15173 switch (TREE_CODE (t))
15174 {
15175 case INTEGER_CST:
15176 return wi::to_wide (t);
15177 case SSA_NAME:
15178 return get_nonzero_bits (t);
15179 case NON_LVALUE_EXPR:
15180 case SAVE_EXPR:
15181 return tree_nonzero_bits (TREE_OPERAND (t, 0));
15182 case BIT_AND_EXPR:
15183 return wi::bit_and (tree_nonzero_bits (TREE_OPERAND (t, 0)),
15184 tree_nonzero_bits (TREE_OPERAND (t, 1)));
15185 case BIT_IOR_EXPR:
15186 case BIT_XOR_EXPR:
15187 return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 0)),
15188 tree_nonzero_bits (TREE_OPERAND (t, 1)));
15189 case COND_EXPR:
15190 return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 1)),
15191 tree_nonzero_bits (TREE_OPERAND (t, 2)));
15192 CASE_CONVERT:
15193 return wide_int::from (tree_nonzero_bits (TREE_OPERAND (t, 0)),
15194 TYPE_PRECISION (TREE_TYPE (t)),
15195 TYPE_SIGN (TREE_TYPE (TREE_OPERAND (t, 0))));
15196 case PLUS_EXPR:
15197 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
15198 {
15199 wide_int nzbits1 = tree_nonzero_bits (TREE_OPERAND (t, 0));
15200 wide_int nzbits2 = tree_nonzero_bits (TREE_OPERAND (t, 1));
15201 if (wi::bit_and (nzbits1, nzbits2) == 0)
15202 return wi::bit_or (nzbits1, nzbits2);
15203 }
15204 break;
15205 case LSHIFT_EXPR:
15206 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
15207 {
15208 tree type = TREE_TYPE (t);
15209 wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
15210 wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
15211 TYPE_PRECISION (type));
15212 return wi::neg_p (arg1)
15213 ? wi::rshift (nzbits, -arg1, TYPE_SIGN (type))
15214 : wi::lshift (nzbits, arg1);
15215 }
15216 break;
15217 case RSHIFT_EXPR:
15218 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
15219 {
15220 tree type = TREE_TYPE (t);
15221 wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
15222 wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
15223 TYPE_PRECISION (type));
15224 return wi::neg_p (arg1)
15225 ? wi::lshift (nzbits, -arg1)
15226 : wi::rshift (nzbits, arg1, TYPE_SIGN (type));
15227 }
15228 break;
15229 default:
15230 break;
15231 }
15232
15233 return wi::shwi (-1, TYPE_PRECISION (TREE_TYPE (t)));
15234 }
15235
15236 #if CHECKING_P
15237
15238 namespace selftest {
15239
15240 /* Helper functions for writing tests of folding trees. */
15241
15242 /* Verify that the binary op (LHS CODE RHS) folds to CONSTANT. */
15243
15244 static void
15245 assert_binop_folds_to_const (tree lhs, enum tree_code code, tree rhs,
15246 tree constant)
15247 {
15248 ASSERT_EQ (constant, fold_build2 (code, TREE_TYPE (lhs), lhs, rhs));
15249 }
15250
15251 /* Verify that the binary op (LHS CODE RHS) folds to an NON_LVALUE_EXPR
15252 wrapping WRAPPED_EXPR. */
15253
15254 static void
15255 assert_binop_folds_to_nonlvalue (tree lhs, enum tree_code code, tree rhs,
15256 tree wrapped_expr)
15257 {
15258 tree result = fold_build2 (code, TREE_TYPE (lhs), lhs, rhs);
15259 ASSERT_NE (wrapped_expr, result);
15260 ASSERT_EQ (NON_LVALUE_EXPR, TREE_CODE (result));
15261 ASSERT_EQ (wrapped_expr, TREE_OPERAND (result, 0));
15262 }
15263
15264 /* Verify that various arithmetic binary operations are folded
15265 correctly. */
15266
15267 static void
15268 test_arithmetic_folding ()
15269 {
15270 tree type = integer_type_node;
15271 tree x = create_tmp_var_raw (type, "x");
15272 tree zero = build_zero_cst (type);
15273 tree one = build_int_cst (type, 1);
15274
15275 /* Addition. */
15276 /* 1 <-- (0 + 1) */
15277 assert_binop_folds_to_const (zero, PLUS_EXPR, one,
15278 one);
15279 assert_binop_folds_to_const (one, PLUS_EXPR, zero,
15280 one);
15281
15282 /* (nonlvalue)x <-- (x + 0) */
15283 assert_binop_folds_to_nonlvalue (x, PLUS_EXPR, zero,
15284 x);
15285
15286 /* Subtraction. */
15287 /* 0 <-- (x - x) */
15288 assert_binop_folds_to_const (x, MINUS_EXPR, x,
15289 zero);
15290 assert_binop_folds_to_nonlvalue (x, MINUS_EXPR, zero,
15291 x);
15292
15293 /* Multiplication. */
15294 /* 0 <-- (x * 0) */
15295 assert_binop_folds_to_const (x, MULT_EXPR, zero,
15296 zero);
15297
15298 /* (nonlvalue)x <-- (x * 1) */
15299 assert_binop_folds_to_nonlvalue (x, MULT_EXPR, one,
15300 x);
15301 }
15302
15303 /* Verify that various binary operations on vectors are folded
15304 correctly. */
15305
15306 static void
15307 test_vector_folding ()
15308 {
15309 tree inner_type = integer_type_node;
15310 tree type = build_vector_type (inner_type, 4);
15311 tree zero = build_zero_cst (type);
15312 tree one = build_one_cst (type);
15313 tree index = build_index_vector (type, 0, 1);
15314
15315 /* Verify equality tests that return a scalar boolean result. */
15316 tree res_type = boolean_type_node;
15317 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, one)));
15318 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, zero)));
15319 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, zero, one)));
15320 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, one, one)));
15321 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, index, one)));
15322 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
15323 index, one)));
15324 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type,
15325 index, index)));
15326 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
15327 index, index)));
15328 }
15329
15330 /* Verify folding of VEC_DUPLICATE_EXPRs. */
15331
15332 static void
15333 test_vec_duplicate_folding ()
15334 {
15335 scalar_int_mode int_mode = SCALAR_INT_TYPE_MODE (ssizetype);
15336 machine_mode vec_mode = targetm.vectorize.preferred_simd_mode (int_mode);
15337 /* This will be 1 if VEC_MODE isn't a vector mode. */
15338 poly_uint64 nunits = GET_MODE_NUNITS (vec_mode);
15339
15340 tree type = build_vector_type (ssizetype, nunits);
15341 tree dup5_expr = fold_unary (VEC_DUPLICATE_EXPR, type, ssize_int (5));
15342 tree dup5_cst = build_vector_from_val (type, ssize_int (5));
15343 ASSERT_TRUE (operand_equal_p (dup5_expr, dup5_cst, 0));
15344 }
15345
15346 /* Run all of the selftests within this file. */
15347
15348 void
15349 fold_const_c_tests ()
15350 {
15351 test_arithmetic_folding ();
15352 test_vector_folding ();
15353 test_vec_duplicate_folding ();
15354 }
15355
15356 } // namespace selftest
15357
15358 #endif /* CHECKING_P */