Correct handling of constant representations containing embedded nuls.
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2020 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
27
28 /* The entry points in this file are fold, size_int_wide and size_binop.
29
30 fold takes a tree as argument and returns a simplified tree.
31
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
35
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
38
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
42
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "backend.h"
47 #include "target.h"
48 #include "rtl.h"
49 #include "tree.h"
50 #include "gimple.h"
51 #include "predict.h"
52 #include "memmodel.h"
53 #include "tm_p.h"
54 #include "tree-ssa-operands.h"
55 #include "optabs-query.h"
56 #include "cgraph.h"
57 #include "diagnostic-core.h"
58 #include "flags.h"
59 #include "alias.h"
60 #include "fold-const.h"
61 #include "fold-const-call.h"
62 #include "stor-layout.h"
63 #include "calls.h"
64 #include "tree-iterator.h"
65 #include "expr.h"
66 #include "intl.h"
67 #include "langhooks.h"
68 #include "tree-eh.h"
69 #include "gimplify.h"
70 #include "tree-dfa.h"
71 #include "builtins.h"
72 #include "generic-match.h"
73 #include "gimple-fold.h"
74 #include "tree-into-ssa.h"
75 #include "md5.h"
76 #include "case-cfn-macros.h"
77 #include "stringpool.h"
78 #include "tree-vrp.h"
79 #include "tree-ssanames.h"
80 #include "selftest.h"
81 #include "stringpool.h"
82 #include "attribs.h"
83 #include "tree-vector-builder.h"
84 #include "vec-perm-indices.h"
85
86 /* Nonzero if we are folding constants inside an initializer; zero
87 otherwise. */
88 int folding_initializer = 0;
89
90 /* The following constants represent a bit based encoding of GCC's
91 comparison operators. This encoding simplifies transformations
92 on relational comparison operators, such as AND and OR. */
93 enum comparison_code {
94 COMPCODE_FALSE = 0,
95 COMPCODE_LT = 1,
96 COMPCODE_EQ = 2,
97 COMPCODE_LE = 3,
98 COMPCODE_GT = 4,
99 COMPCODE_LTGT = 5,
100 COMPCODE_GE = 6,
101 COMPCODE_ORD = 7,
102 COMPCODE_UNORD = 8,
103 COMPCODE_UNLT = 9,
104 COMPCODE_UNEQ = 10,
105 COMPCODE_UNLE = 11,
106 COMPCODE_UNGT = 12,
107 COMPCODE_NE = 13,
108 COMPCODE_UNGE = 14,
109 COMPCODE_TRUE = 15
110 };
111
112 static bool negate_expr_p (tree);
113 static tree negate_expr (tree);
114 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
115 static enum comparison_code comparison_to_compcode (enum tree_code);
116 static enum tree_code compcode_to_comparison (enum comparison_code);
117 static bool twoval_comparison_p (tree, tree *, tree *);
118 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
119 static tree optimize_bit_field_compare (location_t, enum tree_code,
120 tree, tree, tree);
121 static bool simple_operand_p (const_tree);
122 static bool simple_operand_p_2 (tree);
123 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
124 static tree range_predecessor (tree);
125 static tree range_successor (tree);
126 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
127 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
128 static tree unextend (tree, int, int, tree);
129 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
130 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
131 static tree fold_binary_op_with_conditional_arg (location_t,
132 enum tree_code, tree,
133 tree, tree,
134 tree, tree, int);
135 static tree fold_negate_const (tree, tree);
136 static tree fold_not_const (const_tree, tree);
137 static tree fold_relational_const (enum tree_code, tree, tree, tree);
138 static tree fold_convert_const (enum tree_code, tree, tree);
139 static tree fold_view_convert_expr (tree, tree);
140 static tree fold_negate_expr (location_t, tree);
141
142
143 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
144 Otherwise, return LOC. */
145
146 static location_t
147 expr_location_or (tree t, location_t loc)
148 {
149 location_t tloc = EXPR_LOCATION (t);
150 return tloc == UNKNOWN_LOCATION ? loc : tloc;
151 }
152
153 /* Similar to protected_set_expr_location, but never modify x in place,
154 if location can and needs to be set, unshare it. */
155
156 static inline tree
157 protected_set_expr_location_unshare (tree x, location_t loc)
158 {
159 if (CAN_HAVE_LOCATION_P (x)
160 && EXPR_LOCATION (x) != loc
161 && !(TREE_CODE (x) == SAVE_EXPR
162 || TREE_CODE (x) == TARGET_EXPR
163 || TREE_CODE (x) == BIND_EXPR))
164 {
165 x = copy_node (x);
166 SET_EXPR_LOCATION (x, loc);
167 }
168 return x;
169 }
170 \f
171 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
172 division and returns the quotient. Otherwise returns
173 NULL_TREE. */
174
175 tree
176 div_if_zero_remainder (const_tree arg1, const_tree arg2)
177 {
178 widest_int quo;
179
180 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
181 SIGNED, &quo))
182 return wide_int_to_tree (TREE_TYPE (arg1), quo);
183
184 return NULL_TREE;
185 }
186 \f
187 /* This is nonzero if we should defer warnings about undefined
188 overflow. This facility exists because these warnings are a
189 special case. The code to estimate loop iterations does not want
190 to issue any warnings, since it works with expressions which do not
191 occur in user code. Various bits of cleanup code call fold(), but
192 only use the result if it has certain characteristics (e.g., is a
193 constant); that code only wants to issue a warning if the result is
194 used. */
195
196 static int fold_deferring_overflow_warnings;
197
198 /* If a warning about undefined overflow is deferred, this is the
199 warning. Note that this may cause us to turn two warnings into
200 one, but that is fine since it is sufficient to only give one
201 warning per expression. */
202
203 static const char* fold_deferred_overflow_warning;
204
205 /* If a warning about undefined overflow is deferred, this is the
206 level at which the warning should be emitted. */
207
208 static enum warn_strict_overflow_code fold_deferred_overflow_code;
209
210 /* Start deferring overflow warnings. We could use a stack here to
211 permit nested calls, but at present it is not necessary. */
212
213 void
214 fold_defer_overflow_warnings (void)
215 {
216 ++fold_deferring_overflow_warnings;
217 }
218
219 /* Stop deferring overflow warnings. If there is a pending warning,
220 and ISSUE is true, then issue the warning if appropriate. STMT is
221 the statement with which the warning should be associated (used for
222 location information); STMT may be NULL. CODE is the level of the
223 warning--a warn_strict_overflow_code value. This function will use
224 the smaller of CODE and the deferred code when deciding whether to
225 issue the warning. CODE may be zero to mean to always use the
226 deferred code. */
227
228 void
229 fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
230 {
231 const char *warnmsg;
232 location_t locus;
233
234 gcc_assert (fold_deferring_overflow_warnings > 0);
235 --fold_deferring_overflow_warnings;
236 if (fold_deferring_overflow_warnings > 0)
237 {
238 if (fold_deferred_overflow_warning != NULL
239 && code != 0
240 && code < (int) fold_deferred_overflow_code)
241 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
242 return;
243 }
244
245 warnmsg = fold_deferred_overflow_warning;
246 fold_deferred_overflow_warning = NULL;
247
248 if (!issue || warnmsg == NULL)
249 return;
250
251 if (gimple_no_warning_p (stmt))
252 return;
253
254 /* Use the smallest code level when deciding to issue the
255 warning. */
256 if (code == 0 || code > (int) fold_deferred_overflow_code)
257 code = fold_deferred_overflow_code;
258
259 if (!issue_strict_overflow_warning (code))
260 return;
261
262 if (stmt == NULL)
263 locus = input_location;
264 else
265 locus = gimple_location (stmt);
266 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
267 }
268
269 /* Stop deferring overflow warnings, ignoring any deferred
270 warnings. */
271
272 void
273 fold_undefer_and_ignore_overflow_warnings (void)
274 {
275 fold_undefer_overflow_warnings (false, NULL, 0);
276 }
277
278 /* Whether we are deferring overflow warnings. */
279
280 bool
281 fold_deferring_overflow_warnings_p (void)
282 {
283 return fold_deferring_overflow_warnings > 0;
284 }
285
286 /* This is called when we fold something based on the fact that signed
287 overflow is undefined. */
288
289 void
290 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
291 {
292 if (fold_deferring_overflow_warnings > 0)
293 {
294 if (fold_deferred_overflow_warning == NULL
295 || wc < fold_deferred_overflow_code)
296 {
297 fold_deferred_overflow_warning = gmsgid;
298 fold_deferred_overflow_code = wc;
299 }
300 }
301 else if (issue_strict_overflow_warning (wc))
302 warning (OPT_Wstrict_overflow, gmsgid);
303 }
304 \f
305 /* Return true if the built-in mathematical function specified by CODE
306 is odd, i.e. -f(x) == f(-x). */
307
308 bool
309 negate_mathfn_p (combined_fn fn)
310 {
311 switch (fn)
312 {
313 CASE_CFN_ASIN:
314 CASE_CFN_ASINH:
315 CASE_CFN_ATAN:
316 CASE_CFN_ATANH:
317 CASE_CFN_CASIN:
318 CASE_CFN_CASINH:
319 CASE_CFN_CATAN:
320 CASE_CFN_CATANH:
321 CASE_CFN_CBRT:
322 CASE_CFN_CPROJ:
323 CASE_CFN_CSIN:
324 CASE_CFN_CSINH:
325 CASE_CFN_CTAN:
326 CASE_CFN_CTANH:
327 CASE_CFN_ERF:
328 CASE_CFN_LLROUND:
329 CASE_CFN_LROUND:
330 CASE_CFN_ROUND:
331 CASE_CFN_ROUNDEVEN:
332 CASE_CFN_ROUNDEVEN_FN:
333 CASE_CFN_SIN:
334 CASE_CFN_SINH:
335 CASE_CFN_TAN:
336 CASE_CFN_TANH:
337 CASE_CFN_TRUNC:
338 return true;
339
340 CASE_CFN_LLRINT:
341 CASE_CFN_LRINT:
342 CASE_CFN_NEARBYINT:
343 CASE_CFN_RINT:
344 return !flag_rounding_math;
345
346 default:
347 break;
348 }
349 return false;
350 }
351
352 /* Check whether we may negate an integer constant T without causing
353 overflow. */
354
355 bool
356 may_negate_without_overflow_p (const_tree t)
357 {
358 tree type;
359
360 gcc_assert (TREE_CODE (t) == INTEGER_CST);
361
362 type = TREE_TYPE (t);
363 if (TYPE_UNSIGNED (type))
364 return false;
365
366 return !wi::only_sign_bit_p (wi::to_wide (t));
367 }
368
369 /* Determine whether an expression T can be cheaply negated using
370 the function negate_expr without introducing undefined overflow. */
371
372 static bool
373 negate_expr_p (tree t)
374 {
375 tree type;
376
377 if (t == 0)
378 return false;
379
380 type = TREE_TYPE (t);
381
382 STRIP_SIGN_NOPS (t);
383 switch (TREE_CODE (t))
384 {
385 case INTEGER_CST:
386 if (INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type))
387 return true;
388
389 /* Check that -CST will not overflow type. */
390 return may_negate_without_overflow_p (t);
391 case BIT_NOT_EXPR:
392 return (INTEGRAL_TYPE_P (type)
393 && TYPE_OVERFLOW_WRAPS (type));
394
395 case FIXED_CST:
396 return true;
397
398 case NEGATE_EXPR:
399 return !TYPE_OVERFLOW_SANITIZED (type);
400
401 case REAL_CST:
402 /* We want to canonicalize to positive real constants. Pretend
403 that only negative ones can be easily negated. */
404 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
405
406 case COMPLEX_CST:
407 return negate_expr_p (TREE_REALPART (t))
408 && negate_expr_p (TREE_IMAGPART (t));
409
410 case VECTOR_CST:
411 {
412 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
413 return true;
414
415 /* Steps don't prevent negation. */
416 unsigned int count = vector_cst_encoded_nelts (t);
417 for (unsigned int i = 0; i < count; ++i)
418 if (!negate_expr_p (VECTOR_CST_ENCODED_ELT (t, i)))
419 return false;
420
421 return true;
422 }
423
424 case COMPLEX_EXPR:
425 return negate_expr_p (TREE_OPERAND (t, 0))
426 && negate_expr_p (TREE_OPERAND (t, 1));
427
428 case CONJ_EXPR:
429 return negate_expr_p (TREE_OPERAND (t, 0));
430
431 case PLUS_EXPR:
432 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
433 || HONOR_SIGNED_ZEROS (element_mode (type))
434 || (ANY_INTEGRAL_TYPE_P (type)
435 && ! TYPE_OVERFLOW_WRAPS (type)))
436 return false;
437 /* -(A + B) -> (-B) - A. */
438 if (negate_expr_p (TREE_OPERAND (t, 1)))
439 return true;
440 /* -(A + B) -> (-A) - B. */
441 return negate_expr_p (TREE_OPERAND (t, 0));
442
443 case MINUS_EXPR:
444 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
445 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
446 && !HONOR_SIGNED_ZEROS (element_mode (type))
447 && (! ANY_INTEGRAL_TYPE_P (type)
448 || TYPE_OVERFLOW_WRAPS (type));
449
450 case MULT_EXPR:
451 if (TYPE_UNSIGNED (type))
452 break;
453 /* INT_MIN/n * n doesn't overflow while negating one operand it does
454 if n is a (negative) power of two. */
455 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
456 && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
457 && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
458 && (wi::popcount
459 (wi::abs (wi::to_wide (TREE_OPERAND (t, 0))))) != 1)
460 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
461 && (wi::popcount
462 (wi::abs (wi::to_wide (TREE_OPERAND (t, 1))))) != 1)))
463 break;
464
465 /* Fall through. */
466
467 case RDIV_EXPR:
468 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
469 return negate_expr_p (TREE_OPERAND (t, 1))
470 || negate_expr_p (TREE_OPERAND (t, 0));
471 break;
472
473 case TRUNC_DIV_EXPR:
474 case ROUND_DIV_EXPR:
475 case EXACT_DIV_EXPR:
476 if (TYPE_UNSIGNED (type))
477 break;
478 /* In general we can't negate A in A / B, because if A is INT_MIN and
479 B is not 1 we change the sign of the result. */
480 if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
481 && negate_expr_p (TREE_OPERAND (t, 0)))
482 return true;
483 /* In general we can't negate B in A / B, because if A is INT_MIN and
484 B is 1, we may turn this into INT_MIN / -1 which is undefined
485 and actually traps on some architectures. */
486 if (! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
487 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
488 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
489 && ! integer_onep (TREE_OPERAND (t, 1))))
490 return negate_expr_p (TREE_OPERAND (t, 1));
491 break;
492
493 case NOP_EXPR:
494 /* Negate -((double)float) as (double)(-float). */
495 if (TREE_CODE (type) == REAL_TYPE)
496 {
497 tree tem = strip_float_extensions (t);
498 if (tem != t)
499 return negate_expr_p (tem);
500 }
501 break;
502
503 case CALL_EXPR:
504 /* Negate -f(x) as f(-x). */
505 if (negate_mathfn_p (get_call_combined_fn (t)))
506 return negate_expr_p (CALL_EXPR_ARG (t, 0));
507 break;
508
509 case RSHIFT_EXPR:
510 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
511 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
512 {
513 tree op1 = TREE_OPERAND (t, 1);
514 if (wi::to_wide (op1) == TYPE_PRECISION (type) - 1)
515 return true;
516 }
517 break;
518
519 default:
520 break;
521 }
522 return false;
523 }
524
525 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
526 simplification is possible.
527 If negate_expr_p would return true for T, NULL_TREE will never be
528 returned. */
529
530 static tree
531 fold_negate_expr_1 (location_t loc, tree t)
532 {
533 tree type = TREE_TYPE (t);
534 tree tem;
535
536 switch (TREE_CODE (t))
537 {
538 /* Convert - (~A) to A + 1. */
539 case BIT_NOT_EXPR:
540 if (INTEGRAL_TYPE_P (type))
541 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
542 build_one_cst (type));
543 break;
544
545 case INTEGER_CST:
546 tem = fold_negate_const (t, type);
547 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
548 || (ANY_INTEGRAL_TYPE_P (type)
549 && !TYPE_OVERFLOW_TRAPS (type)
550 && TYPE_OVERFLOW_WRAPS (type))
551 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
552 return tem;
553 break;
554
555 case POLY_INT_CST:
556 case REAL_CST:
557 case FIXED_CST:
558 tem = fold_negate_const (t, type);
559 return tem;
560
561 case COMPLEX_CST:
562 {
563 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
564 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
565 if (rpart && ipart)
566 return build_complex (type, rpart, ipart);
567 }
568 break;
569
570 case VECTOR_CST:
571 {
572 tree_vector_builder elts;
573 elts.new_unary_operation (type, t, true);
574 unsigned int count = elts.encoded_nelts ();
575 for (unsigned int i = 0; i < count; ++i)
576 {
577 tree elt = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
578 if (elt == NULL_TREE)
579 return NULL_TREE;
580 elts.quick_push (elt);
581 }
582
583 return elts.build ();
584 }
585
586 case COMPLEX_EXPR:
587 if (negate_expr_p (t))
588 return fold_build2_loc (loc, COMPLEX_EXPR, type,
589 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
590 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
591 break;
592
593 case CONJ_EXPR:
594 if (negate_expr_p (t))
595 return fold_build1_loc (loc, CONJ_EXPR, type,
596 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
597 break;
598
599 case NEGATE_EXPR:
600 if (!TYPE_OVERFLOW_SANITIZED (type))
601 return TREE_OPERAND (t, 0);
602 break;
603
604 case PLUS_EXPR:
605 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
606 && !HONOR_SIGNED_ZEROS (element_mode (type)))
607 {
608 /* -(A + B) -> (-B) - A. */
609 if (negate_expr_p (TREE_OPERAND (t, 1)))
610 {
611 tem = negate_expr (TREE_OPERAND (t, 1));
612 return fold_build2_loc (loc, MINUS_EXPR, type,
613 tem, TREE_OPERAND (t, 0));
614 }
615
616 /* -(A + B) -> (-A) - B. */
617 if (negate_expr_p (TREE_OPERAND (t, 0)))
618 {
619 tem = negate_expr (TREE_OPERAND (t, 0));
620 return fold_build2_loc (loc, MINUS_EXPR, type,
621 tem, TREE_OPERAND (t, 1));
622 }
623 }
624 break;
625
626 case MINUS_EXPR:
627 /* - (A - B) -> B - A */
628 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
629 && !HONOR_SIGNED_ZEROS (element_mode (type)))
630 return fold_build2_loc (loc, MINUS_EXPR, type,
631 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
632 break;
633
634 case MULT_EXPR:
635 if (TYPE_UNSIGNED (type))
636 break;
637
638 /* Fall through. */
639
640 case RDIV_EXPR:
641 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
642 {
643 tem = TREE_OPERAND (t, 1);
644 if (negate_expr_p (tem))
645 return fold_build2_loc (loc, TREE_CODE (t), type,
646 TREE_OPERAND (t, 0), negate_expr (tem));
647 tem = TREE_OPERAND (t, 0);
648 if (negate_expr_p (tem))
649 return fold_build2_loc (loc, TREE_CODE (t), type,
650 negate_expr (tem), TREE_OPERAND (t, 1));
651 }
652 break;
653
654 case TRUNC_DIV_EXPR:
655 case ROUND_DIV_EXPR:
656 case EXACT_DIV_EXPR:
657 if (TYPE_UNSIGNED (type))
658 break;
659 /* In general we can't negate A in A / B, because if A is INT_MIN and
660 B is not 1 we change the sign of the result. */
661 if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
662 && negate_expr_p (TREE_OPERAND (t, 0)))
663 return fold_build2_loc (loc, TREE_CODE (t), type,
664 negate_expr (TREE_OPERAND (t, 0)),
665 TREE_OPERAND (t, 1));
666 /* In general we can't negate B in A / B, because if A is INT_MIN and
667 B is 1, we may turn this into INT_MIN / -1 which is undefined
668 and actually traps on some architectures. */
669 if ((! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
670 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
671 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
672 && ! integer_onep (TREE_OPERAND (t, 1))))
673 && negate_expr_p (TREE_OPERAND (t, 1)))
674 return fold_build2_loc (loc, TREE_CODE (t), type,
675 TREE_OPERAND (t, 0),
676 negate_expr (TREE_OPERAND (t, 1)));
677 break;
678
679 case NOP_EXPR:
680 /* Convert -((double)float) into (double)(-float). */
681 if (TREE_CODE (type) == REAL_TYPE)
682 {
683 tem = strip_float_extensions (t);
684 if (tem != t && negate_expr_p (tem))
685 return fold_convert_loc (loc, type, negate_expr (tem));
686 }
687 break;
688
689 case CALL_EXPR:
690 /* Negate -f(x) as f(-x). */
691 if (negate_mathfn_p (get_call_combined_fn (t))
692 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
693 {
694 tree fndecl, arg;
695
696 fndecl = get_callee_fndecl (t);
697 arg = negate_expr (CALL_EXPR_ARG (t, 0));
698 return build_call_expr_loc (loc, fndecl, 1, arg);
699 }
700 break;
701
702 case RSHIFT_EXPR:
703 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
704 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
705 {
706 tree op1 = TREE_OPERAND (t, 1);
707 if (wi::to_wide (op1) == TYPE_PRECISION (type) - 1)
708 {
709 tree ntype = TYPE_UNSIGNED (type)
710 ? signed_type_for (type)
711 : unsigned_type_for (type);
712 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
713 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
714 return fold_convert_loc (loc, type, temp);
715 }
716 }
717 break;
718
719 default:
720 break;
721 }
722
723 return NULL_TREE;
724 }
725
726 /* A wrapper for fold_negate_expr_1. */
727
728 static tree
729 fold_negate_expr (location_t loc, tree t)
730 {
731 tree type = TREE_TYPE (t);
732 STRIP_SIGN_NOPS (t);
733 tree tem = fold_negate_expr_1 (loc, t);
734 if (tem == NULL_TREE)
735 return NULL_TREE;
736 return fold_convert_loc (loc, type, tem);
737 }
738
739 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T cannot be
740 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
741 return NULL_TREE. */
742
743 static tree
744 negate_expr (tree t)
745 {
746 tree type, tem;
747 location_t loc;
748
749 if (t == NULL_TREE)
750 return NULL_TREE;
751
752 loc = EXPR_LOCATION (t);
753 type = TREE_TYPE (t);
754 STRIP_SIGN_NOPS (t);
755
756 tem = fold_negate_expr (loc, t);
757 if (!tem)
758 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
759 return fold_convert_loc (loc, type, tem);
760 }
761 \f
762 /* Split a tree IN into a constant, literal and variable parts that could be
763 combined with CODE to make IN. "constant" means an expression with
764 TREE_CONSTANT but that isn't an actual constant. CODE must be a
765 commutative arithmetic operation. Store the constant part into *CONP,
766 the literal in *LITP and return the variable part. If a part isn't
767 present, set it to null. If the tree does not decompose in this way,
768 return the entire tree as the variable part and the other parts as null.
769
770 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
771 case, we negate an operand that was subtracted. Except if it is a
772 literal for which we use *MINUS_LITP instead.
773
774 If NEGATE_P is true, we are negating all of IN, again except a literal
775 for which we use *MINUS_LITP instead. If a variable part is of pointer
776 type, it is negated after converting to TYPE. This prevents us from
777 generating illegal MINUS pointer expression. LOC is the location of
778 the converted variable part.
779
780 If IN is itself a literal or constant, return it as appropriate.
781
782 Note that we do not guarantee that any of the three values will be the
783 same type as IN, but they will have the same signedness and mode. */
784
785 static tree
786 split_tree (tree in, tree type, enum tree_code code,
787 tree *minus_varp, tree *conp, tree *minus_conp,
788 tree *litp, tree *minus_litp, int negate_p)
789 {
790 tree var = 0;
791 *minus_varp = 0;
792 *conp = 0;
793 *minus_conp = 0;
794 *litp = 0;
795 *minus_litp = 0;
796
797 /* Strip any conversions that don't change the machine mode or signedness. */
798 STRIP_SIGN_NOPS (in);
799
800 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
801 || TREE_CODE (in) == FIXED_CST)
802 *litp = in;
803 else if (TREE_CODE (in) == code
804 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
805 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
806 /* We can associate addition and subtraction together (even
807 though the C standard doesn't say so) for integers because
808 the value is not affected. For reals, the value might be
809 affected, so we can't. */
810 && ((code == PLUS_EXPR && TREE_CODE (in) == POINTER_PLUS_EXPR)
811 || (code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
812 || (code == MINUS_EXPR
813 && (TREE_CODE (in) == PLUS_EXPR
814 || TREE_CODE (in) == POINTER_PLUS_EXPR)))))
815 {
816 tree op0 = TREE_OPERAND (in, 0);
817 tree op1 = TREE_OPERAND (in, 1);
818 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
819 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
820
821 /* First see if either of the operands is a literal, then a constant. */
822 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
823 || TREE_CODE (op0) == FIXED_CST)
824 *litp = op0, op0 = 0;
825 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
826 || TREE_CODE (op1) == FIXED_CST)
827 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
828
829 if (op0 != 0 && TREE_CONSTANT (op0))
830 *conp = op0, op0 = 0;
831 else if (op1 != 0 && TREE_CONSTANT (op1))
832 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
833
834 /* If we haven't dealt with either operand, this is not a case we can
835 decompose. Otherwise, VAR is either of the ones remaining, if any. */
836 if (op0 != 0 && op1 != 0)
837 var = in;
838 else if (op0 != 0)
839 var = op0;
840 else
841 var = op1, neg_var_p = neg1_p;
842
843 /* Now do any needed negations. */
844 if (neg_litp_p)
845 *minus_litp = *litp, *litp = 0;
846 if (neg_conp_p && *conp)
847 *minus_conp = *conp, *conp = 0;
848 if (neg_var_p && var)
849 *minus_varp = var, var = 0;
850 }
851 else if (TREE_CONSTANT (in))
852 *conp = in;
853 else if (TREE_CODE (in) == BIT_NOT_EXPR
854 && code == PLUS_EXPR)
855 {
856 /* -1 - X is folded to ~X, undo that here. Do _not_ do this
857 when IN is constant. */
858 *litp = build_minus_one_cst (type);
859 *minus_varp = TREE_OPERAND (in, 0);
860 }
861 else
862 var = in;
863
864 if (negate_p)
865 {
866 if (*litp)
867 *minus_litp = *litp, *litp = 0;
868 else if (*minus_litp)
869 *litp = *minus_litp, *minus_litp = 0;
870 if (*conp)
871 *minus_conp = *conp, *conp = 0;
872 else if (*minus_conp)
873 *conp = *minus_conp, *minus_conp = 0;
874 if (var)
875 *minus_varp = var, var = 0;
876 else if (*minus_varp)
877 var = *minus_varp, *minus_varp = 0;
878 }
879
880 if (*litp
881 && TREE_OVERFLOW_P (*litp))
882 *litp = drop_tree_overflow (*litp);
883 if (*minus_litp
884 && TREE_OVERFLOW_P (*minus_litp))
885 *minus_litp = drop_tree_overflow (*minus_litp);
886
887 return var;
888 }
889
890 /* Re-associate trees split by the above function. T1 and T2 are
891 either expressions to associate or null. Return the new
892 expression, if any. LOC is the location of the new expression. If
893 we build an operation, do it in TYPE and with CODE. */
894
895 static tree
896 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
897 {
898 if (t1 == 0)
899 {
900 gcc_assert (t2 == 0 || code != MINUS_EXPR);
901 return t2;
902 }
903 else if (t2 == 0)
904 return t1;
905
906 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
907 try to fold this since we will have infinite recursion. But do
908 deal with any NEGATE_EXPRs. */
909 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
910 || TREE_CODE (t1) == PLUS_EXPR || TREE_CODE (t2) == PLUS_EXPR
911 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
912 {
913 if (code == PLUS_EXPR)
914 {
915 if (TREE_CODE (t1) == NEGATE_EXPR)
916 return build2_loc (loc, MINUS_EXPR, type,
917 fold_convert_loc (loc, type, t2),
918 fold_convert_loc (loc, type,
919 TREE_OPERAND (t1, 0)));
920 else if (TREE_CODE (t2) == NEGATE_EXPR)
921 return build2_loc (loc, MINUS_EXPR, type,
922 fold_convert_loc (loc, type, t1),
923 fold_convert_loc (loc, type,
924 TREE_OPERAND (t2, 0)));
925 else if (integer_zerop (t2))
926 return fold_convert_loc (loc, type, t1);
927 }
928 else if (code == MINUS_EXPR)
929 {
930 if (integer_zerop (t2))
931 return fold_convert_loc (loc, type, t1);
932 }
933
934 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
935 fold_convert_loc (loc, type, t2));
936 }
937
938 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
939 fold_convert_loc (loc, type, t2));
940 }
941 \f
942 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
943 for use in int_const_binop, size_binop and size_diffop. */
944
945 static bool
946 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
947 {
948 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
949 return false;
950 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
951 return false;
952
953 switch (code)
954 {
955 case LSHIFT_EXPR:
956 case RSHIFT_EXPR:
957 case LROTATE_EXPR:
958 case RROTATE_EXPR:
959 return true;
960
961 default:
962 break;
963 }
964
965 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
966 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
967 && TYPE_MODE (type1) == TYPE_MODE (type2);
968 }
969
970 /* Combine two wide ints ARG1 and ARG2 under operation CODE to produce
971 a new constant in RES. Return FALSE if we don't know how to
972 evaluate CODE at compile-time. */
973
974 bool
975 wide_int_binop (wide_int &res,
976 enum tree_code code, const wide_int &arg1, const wide_int &arg2,
977 signop sign, wi::overflow_type *overflow)
978 {
979 wide_int tmp;
980 *overflow = wi::OVF_NONE;
981 switch (code)
982 {
983 case BIT_IOR_EXPR:
984 res = wi::bit_or (arg1, arg2);
985 break;
986
987 case BIT_XOR_EXPR:
988 res = wi::bit_xor (arg1, arg2);
989 break;
990
991 case BIT_AND_EXPR:
992 res = wi::bit_and (arg1, arg2);
993 break;
994
995 case RSHIFT_EXPR:
996 case LSHIFT_EXPR:
997 if (wi::neg_p (arg2))
998 {
999 tmp = -arg2;
1000 if (code == RSHIFT_EXPR)
1001 code = LSHIFT_EXPR;
1002 else
1003 code = RSHIFT_EXPR;
1004 }
1005 else
1006 tmp = arg2;
1007
1008 if (code == RSHIFT_EXPR)
1009 /* It's unclear from the C standard whether shifts can overflow.
1010 The following code ignores overflow; perhaps a C standard
1011 interpretation ruling is needed. */
1012 res = wi::rshift (arg1, tmp, sign);
1013 else
1014 res = wi::lshift (arg1, tmp);
1015 break;
1016
1017 case RROTATE_EXPR:
1018 case LROTATE_EXPR:
1019 if (wi::neg_p (arg2))
1020 {
1021 tmp = -arg2;
1022 if (code == RROTATE_EXPR)
1023 code = LROTATE_EXPR;
1024 else
1025 code = RROTATE_EXPR;
1026 }
1027 else
1028 tmp = arg2;
1029
1030 if (code == RROTATE_EXPR)
1031 res = wi::rrotate (arg1, tmp);
1032 else
1033 res = wi::lrotate (arg1, tmp);
1034 break;
1035
1036 case PLUS_EXPR:
1037 res = wi::add (arg1, arg2, sign, overflow);
1038 break;
1039
1040 case MINUS_EXPR:
1041 res = wi::sub (arg1, arg2, sign, overflow);
1042 break;
1043
1044 case MULT_EXPR:
1045 res = wi::mul (arg1, arg2, sign, overflow);
1046 break;
1047
1048 case MULT_HIGHPART_EXPR:
1049 res = wi::mul_high (arg1, arg2, sign);
1050 break;
1051
1052 case TRUNC_DIV_EXPR:
1053 case EXACT_DIV_EXPR:
1054 if (arg2 == 0)
1055 return false;
1056 res = wi::div_trunc (arg1, arg2, sign, overflow);
1057 break;
1058
1059 case FLOOR_DIV_EXPR:
1060 if (arg2 == 0)
1061 return false;
1062 res = wi::div_floor (arg1, arg2, sign, overflow);
1063 break;
1064
1065 case CEIL_DIV_EXPR:
1066 if (arg2 == 0)
1067 return false;
1068 res = wi::div_ceil (arg1, arg2, sign, overflow);
1069 break;
1070
1071 case ROUND_DIV_EXPR:
1072 if (arg2 == 0)
1073 return false;
1074 res = wi::div_round (arg1, arg2, sign, overflow);
1075 break;
1076
1077 case TRUNC_MOD_EXPR:
1078 if (arg2 == 0)
1079 return false;
1080 res = wi::mod_trunc (arg1, arg2, sign, overflow);
1081 break;
1082
1083 case FLOOR_MOD_EXPR:
1084 if (arg2 == 0)
1085 return false;
1086 res = wi::mod_floor (arg1, arg2, sign, overflow);
1087 break;
1088
1089 case CEIL_MOD_EXPR:
1090 if (arg2 == 0)
1091 return false;
1092 res = wi::mod_ceil (arg1, arg2, sign, overflow);
1093 break;
1094
1095 case ROUND_MOD_EXPR:
1096 if (arg2 == 0)
1097 return false;
1098 res = wi::mod_round (arg1, arg2, sign, overflow);
1099 break;
1100
1101 case MIN_EXPR:
1102 res = wi::min (arg1, arg2, sign);
1103 break;
1104
1105 case MAX_EXPR:
1106 res = wi::max (arg1, arg2, sign);
1107 break;
1108
1109 default:
1110 return false;
1111 }
1112 return true;
1113 }
1114
1115 /* Combine two poly int's ARG1 and ARG2 under operation CODE to
1116 produce a new constant in RES. Return FALSE if we don't know how
1117 to evaluate CODE at compile-time. */
1118
1119 static bool
1120 poly_int_binop (poly_wide_int &res, enum tree_code code,
1121 const_tree arg1, const_tree arg2,
1122 signop sign, wi::overflow_type *overflow)
1123 {
1124 gcc_assert (NUM_POLY_INT_COEFFS != 1);
1125 gcc_assert (poly_int_tree_p (arg1) && poly_int_tree_p (arg2));
1126 switch (code)
1127 {
1128 case PLUS_EXPR:
1129 res = wi::add (wi::to_poly_wide (arg1),
1130 wi::to_poly_wide (arg2), sign, overflow);
1131 break;
1132
1133 case MINUS_EXPR:
1134 res = wi::sub (wi::to_poly_wide (arg1),
1135 wi::to_poly_wide (arg2), sign, overflow);
1136 break;
1137
1138 case MULT_EXPR:
1139 if (TREE_CODE (arg2) == INTEGER_CST)
1140 res = wi::mul (wi::to_poly_wide (arg1),
1141 wi::to_wide (arg2), sign, overflow);
1142 else if (TREE_CODE (arg1) == INTEGER_CST)
1143 res = wi::mul (wi::to_poly_wide (arg2),
1144 wi::to_wide (arg1), sign, overflow);
1145 else
1146 return NULL_TREE;
1147 break;
1148
1149 case LSHIFT_EXPR:
1150 if (TREE_CODE (arg2) == INTEGER_CST)
1151 res = wi::to_poly_wide (arg1) << wi::to_wide (arg2);
1152 else
1153 return false;
1154 break;
1155
1156 case BIT_IOR_EXPR:
1157 if (TREE_CODE (arg2) != INTEGER_CST
1158 || !can_ior_p (wi::to_poly_wide (arg1), wi::to_wide (arg2),
1159 &res))
1160 return false;
1161 break;
1162
1163 default:
1164 return false;
1165 }
1166 return true;
1167 }
1168
1169 /* Combine two integer constants ARG1 and ARG2 under operation CODE to
1170 produce a new constant. Return NULL_TREE if we don't know how to
1171 evaluate CODE at compile-time. */
1172
1173 tree
1174 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2,
1175 int overflowable)
1176 {
1177 poly_wide_int poly_res;
1178 tree type = TREE_TYPE (arg1);
1179 signop sign = TYPE_SIGN (type);
1180 wi::overflow_type overflow = wi::OVF_NONE;
1181
1182 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1183 {
1184 wide_int warg1 = wi::to_wide (arg1), res;
1185 wide_int warg2 = wi::to_wide (arg2, TYPE_PRECISION (type));
1186 if (!wide_int_binop (res, code, warg1, warg2, sign, &overflow))
1187 return NULL_TREE;
1188 poly_res = res;
1189 }
1190 else if (!poly_int_tree_p (arg1)
1191 || !poly_int_tree_p (arg2)
1192 || !poly_int_binop (poly_res, code, arg1, arg2, sign, &overflow))
1193 return NULL_TREE;
1194 return force_fit_type (type, poly_res, overflowable,
1195 (((sign == SIGNED || overflowable == -1)
1196 && overflow)
1197 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2)));
1198 }
1199
1200 /* Return true if binary operation OP distributes over addition in operand
1201 OPNO, with the other operand being held constant. OPNO counts from 1. */
1202
1203 static bool
1204 distributes_over_addition_p (tree_code op, int opno)
1205 {
1206 switch (op)
1207 {
1208 case PLUS_EXPR:
1209 case MINUS_EXPR:
1210 case MULT_EXPR:
1211 return true;
1212
1213 case LSHIFT_EXPR:
1214 return opno == 1;
1215
1216 default:
1217 return false;
1218 }
1219 }
1220
1221 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1222 constant. We assume ARG1 and ARG2 have the same data type, or at least
1223 are the same kind of constant and the same machine mode. Return zero if
1224 combining the constants is not allowed in the current operating mode. */
1225
1226 static tree
1227 const_binop (enum tree_code code, tree arg1, tree arg2)
1228 {
1229 /* Sanity check for the recursive cases. */
1230 if (!arg1 || !arg2)
1231 return NULL_TREE;
1232
1233 STRIP_NOPS (arg1);
1234 STRIP_NOPS (arg2);
1235
1236 if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1237 {
1238 if (code == POINTER_PLUS_EXPR)
1239 return int_const_binop (PLUS_EXPR,
1240 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1241
1242 return int_const_binop (code, arg1, arg2);
1243 }
1244
1245 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1246 {
1247 machine_mode mode;
1248 REAL_VALUE_TYPE d1;
1249 REAL_VALUE_TYPE d2;
1250 REAL_VALUE_TYPE value;
1251 REAL_VALUE_TYPE result;
1252 bool inexact;
1253 tree t, type;
1254
1255 /* The following codes are handled by real_arithmetic. */
1256 switch (code)
1257 {
1258 case PLUS_EXPR:
1259 case MINUS_EXPR:
1260 case MULT_EXPR:
1261 case RDIV_EXPR:
1262 case MIN_EXPR:
1263 case MAX_EXPR:
1264 break;
1265
1266 default:
1267 return NULL_TREE;
1268 }
1269
1270 d1 = TREE_REAL_CST (arg1);
1271 d2 = TREE_REAL_CST (arg2);
1272
1273 type = TREE_TYPE (arg1);
1274 mode = TYPE_MODE (type);
1275
1276 /* Don't perform operation if we honor signaling NaNs and
1277 either operand is a signaling NaN. */
1278 if (HONOR_SNANS (mode)
1279 && (REAL_VALUE_ISSIGNALING_NAN (d1)
1280 || REAL_VALUE_ISSIGNALING_NAN (d2)))
1281 return NULL_TREE;
1282
1283 /* Don't perform operation if it would raise a division
1284 by zero exception. */
1285 if (code == RDIV_EXPR
1286 && real_equal (&d2, &dconst0)
1287 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1288 return NULL_TREE;
1289
1290 /* If either operand is a NaN, just return it. Otherwise, set up
1291 for floating-point trap; we return an overflow. */
1292 if (REAL_VALUE_ISNAN (d1))
1293 {
1294 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1295 is off. */
1296 d1.signalling = 0;
1297 t = build_real (type, d1);
1298 return t;
1299 }
1300 else if (REAL_VALUE_ISNAN (d2))
1301 {
1302 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1303 is off. */
1304 d2.signalling = 0;
1305 t = build_real (type, d2);
1306 return t;
1307 }
1308
1309 inexact = real_arithmetic (&value, code, &d1, &d2);
1310 real_convert (&result, mode, &value);
1311
1312 /* Don't constant fold this floating point operation if
1313 the result has overflowed and flag_trapping_math. */
1314 if (flag_trapping_math
1315 && MODE_HAS_INFINITIES (mode)
1316 && REAL_VALUE_ISINF (result)
1317 && !REAL_VALUE_ISINF (d1)
1318 && !REAL_VALUE_ISINF (d2))
1319 return NULL_TREE;
1320
1321 /* Don't constant fold this floating point operation if the
1322 result may dependent upon the run-time rounding mode and
1323 flag_rounding_math is set, or if GCC's software emulation
1324 is unable to accurately represent the result. */
1325 if ((flag_rounding_math
1326 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1327 && (inexact || !real_identical (&result, &value)))
1328 return NULL_TREE;
1329
1330 t = build_real (type, result);
1331
1332 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1333 return t;
1334 }
1335
1336 if (TREE_CODE (arg1) == FIXED_CST)
1337 {
1338 FIXED_VALUE_TYPE f1;
1339 FIXED_VALUE_TYPE f2;
1340 FIXED_VALUE_TYPE result;
1341 tree t, type;
1342 int sat_p;
1343 bool overflow_p;
1344
1345 /* The following codes are handled by fixed_arithmetic. */
1346 switch (code)
1347 {
1348 case PLUS_EXPR:
1349 case MINUS_EXPR:
1350 case MULT_EXPR:
1351 case TRUNC_DIV_EXPR:
1352 if (TREE_CODE (arg2) != FIXED_CST)
1353 return NULL_TREE;
1354 f2 = TREE_FIXED_CST (arg2);
1355 break;
1356
1357 case LSHIFT_EXPR:
1358 case RSHIFT_EXPR:
1359 {
1360 if (TREE_CODE (arg2) != INTEGER_CST)
1361 return NULL_TREE;
1362 wi::tree_to_wide_ref w2 = wi::to_wide (arg2);
1363 f2.data.high = w2.elt (1);
1364 f2.data.low = w2.ulow ();
1365 f2.mode = SImode;
1366 }
1367 break;
1368
1369 default:
1370 return NULL_TREE;
1371 }
1372
1373 f1 = TREE_FIXED_CST (arg1);
1374 type = TREE_TYPE (arg1);
1375 sat_p = TYPE_SATURATING (type);
1376 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1377 t = build_fixed (type, result);
1378 /* Propagate overflow flags. */
1379 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1380 TREE_OVERFLOW (t) = 1;
1381 return t;
1382 }
1383
1384 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1385 {
1386 tree type = TREE_TYPE (arg1);
1387 tree r1 = TREE_REALPART (arg1);
1388 tree i1 = TREE_IMAGPART (arg1);
1389 tree r2 = TREE_REALPART (arg2);
1390 tree i2 = TREE_IMAGPART (arg2);
1391 tree real, imag;
1392
1393 switch (code)
1394 {
1395 case PLUS_EXPR:
1396 case MINUS_EXPR:
1397 real = const_binop (code, r1, r2);
1398 imag = const_binop (code, i1, i2);
1399 break;
1400
1401 case MULT_EXPR:
1402 if (COMPLEX_FLOAT_TYPE_P (type))
1403 return do_mpc_arg2 (arg1, arg2, type,
1404 /* do_nonfinite= */ folding_initializer,
1405 mpc_mul);
1406
1407 real = const_binop (MINUS_EXPR,
1408 const_binop (MULT_EXPR, r1, r2),
1409 const_binop (MULT_EXPR, i1, i2));
1410 imag = const_binop (PLUS_EXPR,
1411 const_binop (MULT_EXPR, r1, i2),
1412 const_binop (MULT_EXPR, i1, r2));
1413 break;
1414
1415 case RDIV_EXPR:
1416 if (COMPLEX_FLOAT_TYPE_P (type))
1417 return do_mpc_arg2 (arg1, arg2, type,
1418 /* do_nonfinite= */ folding_initializer,
1419 mpc_div);
1420 /* Fallthru. */
1421 case TRUNC_DIV_EXPR:
1422 case CEIL_DIV_EXPR:
1423 case FLOOR_DIV_EXPR:
1424 case ROUND_DIV_EXPR:
1425 if (flag_complex_method == 0)
1426 {
1427 /* Keep this algorithm in sync with
1428 tree-complex.c:expand_complex_div_straight().
1429
1430 Expand complex division to scalars, straightforward algorithm.
1431 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1432 t = br*br + bi*bi
1433 */
1434 tree magsquared
1435 = const_binop (PLUS_EXPR,
1436 const_binop (MULT_EXPR, r2, r2),
1437 const_binop (MULT_EXPR, i2, i2));
1438 tree t1
1439 = const_binop (PLUS_EXPR,
1440 const_binop (MULT_EXPR, r1, r2),
1441 const_binop (MULT_EXPR, i1, i2));
1442 tree t2
1443 = const_binop (MINUS_EXPR,
1444 const_binop (MULT_EXPR, i1, r2),
1445 const_binop (MULT_EXPR, r1, i2));
1446
1447 real = const_binop (code, t1, magsquared);
1448 imag = const_binop (code, t2, magsquared);
1449 }
1450 else
1451 {
1452 /* Keep this algorithm in sync with
1453 tree-complex.c:expand_complex_div_wide().
1454
1455 Expand complex division to scalars, modified algorithm to minimize
1456 overflow with wide input ranges. */
1457 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1458 fold_abs_const (r2, TREE_TYPE (type)),
1459 fold_abs_const (i2, TREE_TYPE (type)));
1460
1461 if (integer_nonzerop (compare))
1462 {
1463 /* In the TRUE branch, we compute
1464 ratio = br/bi;
1465 div = (br * ratio) + bi;
1466 tr = (ar * ratio) + ai;
1467 ti = (ai * ratio) - ar;
1468 tr = tr / div;
1469 ti = ti / div; */
1470 tree ratio = const_binop (code, r2, i2);
1471 tree div = const_binop (PLUS_EXPR, i2,
1472 const_binop (MULT_EXPR, r2, ratio));
1473 real = const_binop (MULT_EXPR, r1, ratio);
1474 real = const_binop (PLUS_EXPR, real, i1);
1475 real = const_binop (code, real, div);
1476
1477 imag = const_binop (MULT_EXPR, i1, ratio);
1478 imag = const_binop (MINUS_EXPR, imag, r1);
1479 imag = const_binop (code, imag, div);
1480 }
1481 else
1482 {
1483 /* In the FALSE branch, we compute
1484 ratio = d/c;
1485 divisor = (d * ratio) + c;
1486 tr = (b * ratio) + a;
1487 ti = b - (a * ratio);
1488 tr = tr / div;
1489 ti = ti / div; */
1490 tree ratio = const_binop (code, i2, r2);
1491 tree div = const_binop (PLUS_EXPR, r2,
1492 const_binop (MULT_EXPR, i2, ratio));
1493
1494 real = const_binop (MULT_EXPR, i1, ratio);
1495 real = const_binop (PLUS_EXPR, real, r1);
1496 real = const_binop (code, real, div);
1497
1498 imag = const_binop (MULT_EXPR, r1, ratio);
1499 imag = const_binop (MINUS_EXPR, i1, imag);
1500 imag = const_binop (code, imag, div);
1501 }
1502 }
1503 break;
1504
1505 default:
1506 return NULL_TREE;
1507 }
1508
1509 if (real && imag)
1510 return build_complex (type, real, imag);
1511 }
1512
1513 if (TREE_CODE (arg1) == VECTOR_CST
1514 && TREE_CODE (arg2) == VECTOR_CST
1515 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)),
1516 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2))))
1517 {
1518 tree type = TREE_TYPE (arg1);
1519 bool step_ok_p;
1520 if (VECTOR_CST_STEPPED_P (arg1)
1521 && VECTOR_CST_STEPPED_P (arg2))
1522 /* We can operate directly on the encoding if:
1523
1524 a3 - a2 == a2 - a1 && b3 - b2 == b2 - b1
1525 implies
1526 (a3 op b3) - (a2 op b2) == (a2 op b2) - (a1 op b1)
1527
1528 Addition and subtraction are the supported operators
1529 for which this is true. */
1530 step_ok_p = (code == PLUS_EXPR || code == MINUS_EXPR);
1531 else if (VECTOR_CST_STEPPED_P (arg1))
1532 /* We can operate directly on stepped encodings if:
1533
1534 a3 - a2 == a2 - a1
1535 implies:
1536 (a3 op c) - (a2 op c) == (a2 op c) - (a1 op c)
1537
1538 which is true if (x -> x op c) distributes over addition. */
1539 step_ok_p = distributes_over_addition_p (code, 1);
1540 else
1541 /* Similarly in reverse. */
1542 step_ok_p = distributes_over_addition_p (code, 2);
1543 tree_vector_builder elts;
1544 if (!elts.new_binary_operation (type, arg1, arg2, step_ok_p))
1545 return NULL_TREE;
1546 unsigned int count = elts.encoded_nelts ();
1547 for (unsigned int i = 0; i < count; ++i)
1548 {
1549 tree elem1 = VECTOR_CST_ELT (arg1, i);
1550 tree elem2 = VECTOR_CST_ELT (arg2, i);
1551
1552 tree elt = const_binop (code, elem1, elem2);
1553
1554 /* It is possible that const_binop cannot handle the given
1555 code and return NULL_TREE */
1556 if (elt == NULL_TREE)
1557 return NULL_TREE;
1558 elts.quick_push (elt);
1559 }
1560
1561 return elts.build ();
1562 }
1563
1564 /* Shifts allow a scalar offset for a vector. */
1565 if (TREE_CODE (arg1) == VECTOR_CST
1566 && TREE_CODE (arg2) == INTEGER_CST)
1567 {
1568 tree type = TREE_TYPE (arg1);
1569 bool step_ok_p = distributes_over_addition_p (code, 1);
1570 tree_vector_builder elts;
1571 if (!elts.new_unary_operation (type, arg1, step_ok_p))
1572 return NULL_TREE;
1573 unsigned int count = elts.encoded_nelts ();
1574 for (unsigned int i = 0; i < count; ++i)
1575 {
1576 tree elem1 = VECTOR_CST_ELT (arg1, i);
1577
1578 tree elt = const_binop (code, elem1, arg2);
1579
1580 /* It is possible that const_binop cannot handle the given
1581 code and return NULL_TREE. */
1582 if (elt == NULL_TREE)
1583 return NULL_TREE;
1584 elts.quick_push (elt);
1585 }
1586
1587 return elts.build ();
1588 }
1589 return NULL_TREE;
1590 }
1591
1592 /* Overload that adds a TYPE parameter to be able to dispatch
1593 to fold_relational_const. */
1594
1595 tree
1596 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1597 {
1598 if (TREE_CODE_CLASS (code) == tcc_comparison)
1599 return fold_relational_const (code, type, arg1, arg2);
1600
1601 /* ??? Until we make the const_binop worker take the type of the
1602 result as argument put those cases that need it here. */
1603 switch (code)
1604 {
1605 case VEC_SERIES_EXPR:
1606 if (CONSTANT_CLASS_P (arg1)
1607 && CONSTANT_CLASS_P (arg2))
1608 return build_vec_series (type, arg1, arg2);
1609 return NULL_TREE;
1610
1611 case COMPLEX_EXPR:
1612 if ((TREE_CODE (arg1) == REAL_CST
1613 && TREE_CODE (arg2) == REAL_CST)
1614 || (TREE_CODE (arg1) == INTEGER_CST
1615 && TREE_CODE (arg2) == INTEGER_CST))
1616 return build_complex (type, arg1, arg2);
1617 return NULL_TREE;
1618
1619 case POINTER_DIFF_EXPR:
1620 if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1621 {
1622 poly_offset_int res = (wi::to_poly_offset (arg1)
1623 - wi::to_poly_offset (arg2));
1624 return force_fit_type (type, res, 1,
1625 TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1626 }
1627 return NULL_TREE;
1628
1629 case VEC_PACK_TRUNC_EXPR:
1630 case VEC_PACK_FIX_TRUNC_EXPR:
1631 case VEC_PACK_FLOAT_EXPR:
1632 {
1633 unsigned int HOST_WIDE_INT out_nelts, in_nelts, i;
1634
1635 if (TREE_CODE (arg1) != VECTOR_CST
1636 || TREE_CODE (arg2) != VECTOR_CST)
1637 return NULL_TREE;
1638
1639 if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1640 return NULL_TREE;
1641
1642 out_nelts = in_nelts * 2;
1643 gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1644 && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1645
1646 tree_vector_builder elts (type, out_nelts, 1);
1647 for (i = 0; i < out_nelts; i++)
1648 {
1649 tree elt = (i < in_nelts
1650 ? VECTOR_CST_ELT (arg1, i)
1651 : VECTOR_CST_ELT (arg2, i - in_nelts));
1652 elt = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1653 ? NOP_EXPR
1654 : code == VEC_PACK_FLOAT_EXPR
1655 ? FLOAT_EXPR : FIX_TRUNC_EXPR,
1656 TREE_TYPE (type), elt);
1657 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1658 return NULL_TREE;
1659 elts.quick_push (elt);
1660 }
1661
1662 return elts.build ();
1663 }
1664
1665 case VEC_WIDEN_MULT_LO_EXPR:
1666 case VEC_WIDEN_MULT_HI_EXPR:
1667 case VEC_WIDEN_MULT_EVEN_EXPR:
1668 case VEC_WIDEN_MULT_ODD_EXPR:
1669 {
1670 unsigned HOST_WIDE_INT out_nelts, in_nelts, out, ofs, scale;
1671
1672 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1673 return NULL_TREE;
1674
1675 if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1676 return NULL_TREE;
1677 out_nelts = in_nelts / 2;
1678 gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1679 && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1680
1681 if (code == VEC_WIDEN_MULT_LO_EXPR)
1682 scale = 0, ofs = BYTES_BIG_ENDIAN ? out_nelts : 0;
1683 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1684 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : out_nelts;
1685 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1686 scale = 1, ofs = 0;
1687 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1688 scale = 1, ofs = 1;
1689
1690 tree_vector_builder elts (type, out_nelts, 1);
1691 for (out = 0; out < out_nelts; out++)
1692 {
1693 unsigned int in = (out << scale) + ofs;
1694 tree t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1695 VECTOR_CST_ELT (arg1, in));
1696 tree t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1697 VECTOR_CST_ELT (arg2, in));
1698
1699 if (t1 == NULL_TREE || t2 == NULL_TREE)
1700 return NULL_TREE;
1701 tree elt = const_binop (MULT_EXPR, t1, t2);
1702 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1703 return NULL_TREE;
1704 elts.quick_push (elt);
1705 }
1706
1707 return elts.build ();
1708 }
1709
1710 default:;
1711 }
1712
1713 if (TREE_CODE_CLASS (code) != tcc_binary)
1714 return NULL_TREE;
1715
1716 /* Make sure type and arg0 have the same saturating flag. */
1717 gcc_checking_assert (TYPE_SATURATING (type)
1718 == TYPE_SATURATING (TREE_TYPE (arg1)));
1719
1720 return const_binop (code, arg1, arg2);
1721 }
1722
1723 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1724 Return zero if computing the constants is not possible. */
1725
1726 tree
1727 const_unop (enum tree_code code, tree type, tree arg0)
1728 {
1729 /* Don't perform the operation, other than NEGATE and ABS, if
1730 flag_signaling_nans is on and the operand is a signaling NaN. */
1731 if (TREE_CODE (arg0) == REAL_CST
1732 && HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
1733 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0))
1734 && code != NEGATE_EXPR
1735 && code != ABS_EXPR
1736 && code != ABSU_EXPR)
1737 return NULL_TREE;
1738
1739 switch (code)
1740 {
1741 CASE_CONVERT:
1742 case FLOAT_EXPR:
1743 case FIX_TRUNC_EXPR:
1744 case FIXED_CONVERT_EXPR:
1745 return fold_convert_const (code, type, arg0);
1746
1747 case ADDR_SPACE_CONVERT_EXPR:
1748 /* If the source address is 0, and the source address space
1749 cannot have a valid object at 0, fold to dest type null. */
1750 if (integer_zerop (arg0)
1751 && !(targetm.addr_space.zero_address_valid
1752 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))))))
1753 return fold_convert_const (code, type, arg0);
1754 break;
1755
1756 case VIEW_CONVERT_EXPR:
1757 return fold_view_convert_expr (type, arg0);
1758
1759 case NEGATE_EXPR:
1760 {
1761 /* Can't call fold_negate_const directly here as that doesn't
1762 handle all cases and we might not be able to negate some
1763 constants. */
1764 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1765 if (tem && CONSTANT_CLASS_P (tem))
1766 return tem;
1767 break;
1768 }
1769
1770 case ABS_EXPR:
1771 case ABSU_EXPR:
1772 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1773 return fold_abs_const (arg0, type);
1774 break;
1775
1776 case CONJ_EXPR:
1777 if (TREE_CODE (arg0) == COMPLEX_CST)
1778 {
1779 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1780 TREE_TYPE (type));
1781 return build_complex (type, TREE_REALPART (arg0), ipart);
1782 }
1783 break;
1784
1785 case BIT_NOT_EXPR:
1786 if (TREE_CODE (arg0) == INTEGER_CST)
1787 return fold_not_const (arg0, type);
1788 else if (POLY_INT_CST_P (arg0))
1789 return wide_int_to_tree (type, -poly_int_cst_value (arg0));
1790 /* Perform BIT_NOT_EXPR on each element individually. */
1791 else if (TREE_CODE (arg0) == VECTOR_CST)
1792 {
1793 tree elem;
1794
1795 /* This can cope with stepped encodings because ~x == -1 - x. */
1796 tree_vector_builder elements;
1797 elements.new_unary_operation (type, arg0, true);
1798 unsigned int i, count = elements.encoded_nelts ();
1799 for (i = 0; i < count; ++i)
1800 {
1801 elem = VECTOR_CST_ELT (arg0, i);
1802 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1803 if (elem == NULL_TREE)
1804 break;
1805 elements.quick_push (elem);
1806 }
1807 if (i == count)
1808 return elements.build ();
1809 }
1810 break;
1811
1812 case TRUTH_NOT_EXPR:
1813 if (TREE_CODE (arg0) == INTEGER_CST)
1814 return constant_boolean_node (integer_zerop (arg0), type);
1815 break;
1816
1817 case REALPART_EXPR:
1818 if (TREE_CODE (arg0) == COMPLEX_CST)
1819 return fold_convert (type, TREE_REALPART (arg0));
1820 break;
1821
1822 case IMAGPART_EXPR:
1823 if (TREE_CODE (arg0) == COMPLEX_CST)
1824 return fold_convert (type, TREE_IMAGPART (arg0));
1825 break;
1826
1827 case VEC_UNPACK_LO_EXPR:
1828 case VEC_UNPACK_HI_EXPR:
1829 case VEC_UNPACK_FLOAT_LO_EXPR:
1830 case VEC_UNPACK_FLOAT_HI_EXPR:
1831 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
1832 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
1833 {
1834 unsigned HOST_WIDE_INT out_nelts, in_nelts, i;
1835 enum tree_code subcode;
1836
1837 if (TREE_CODE (arg0) != VECTOR_CST)
1838 return NULL_TREE;
1839
1840 if (!VECTOR_CST_NELTS (arg0).is_constant (&in_nelts))
1841 return NULL_TREE;
1842 out_nelts = in_nelts / 2;
1843 gcc_assert (known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1844
1845 unsigned int offset = 0;
1846 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1847 || code == VEC_UNPACK_FLOAT_LO_EXPR
1848 || code == VEC_UNPACK_FIX_TRUNC_LO_EXPR))
1849 offset = out_nelts;
1850
1851 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1852 subcode = NOP_EXPR;
1853 else if (code == VEC_UNPACK_FLOAT_LO_EXPR
1854 || code == VEC_UNPACK_FLOAT_HI_EXPR)
1855 subcode = FLOAT_EXPR;
1856 else
1857 subcode = FIX_TRUNC_EXPR;
1858
1859 tree_vector_builder elts (type, out_nelts, 1);
1860 for (i = 0; i < out_nelts; i++)
1861 {
1862 tree elt = fold_convert_const (subcode, TREE_TYPE (type),
1863 VECTOR_CST_ELT (arg0, i + offset));
1864 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1865 return NULL_TREE;
1866 elts.quick_push (elt);
1867 }
1868
1869 return elts.build ();
1870 }
1871
1872 case VEC_DUPLICATE_EXPR:
1873 if (CONSTANT_CLASS_P (arg0))
1874 return build_vector_from_val (type, arg0);
1875 return NULL_TREE;
1876
1877 default:
1878 break;
1879 }
1880
1881 return NULL_TREE;
1882 }
1883
1884 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1885 indicates which particular sizetype to create. */
1886
1887 tree
1888 size_int_kind (poly_int64 number, enum size_type_kind kind)
1889 {
1890 return build_int_cst (sizetype_tab[(int) kind], number);
1891 }
1892 \f
1893 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1894 is a tree code. The type of the result is taken from the operands.
1895 Both must be equivalent integer types, ala int_binop_types_match_p.
1896 If the operands are constant, so is the result. */
1897
1898 tree
1899 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1900 {
1901 tree type = TREE_TYPE (arg0);
1902
1903 if (arg0 == error_mark_node || arg1 == error_mark_node)
1904 return error_mark_node;
1905
1906 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1907 TREE_TYPE (arg1)));
1908
1909 /* Handle the special case of two poly_int constants faster. */
1910 if (poly_int_tree_p (arg0) && poly_int_tree_p (arg1))
1911 {
1912 /* And some specific cases even faster than that. */
1913 if (code == PLUS_EXPR)
1914 {
1915 if (integer_zerop (arg0)
1916 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0)))
1917 return arg1;
1918 if (integer_zerop (arg1)
1919 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1)))
1920 return arg0;
1921 }
1922 else if (code == MINUS_EXPR)
1923 {
1924 if (integer_zerop (arg1)
1925 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1)))
1926 return arg0;
1927 }
1928 else if (code == MULT_EXPR)
1929 {
1930 if (integer_onep (arg0)
1931 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0)))
1932 return arg1;
1933 }
1934
1935 /* Handle general case of two integer constants. For sizetype
1936 constant calculations we always want to know about overflow,
1937 even in the unsigned case. */
1938 tree res = int_const_binop (code, arg0, arg1, -1);
1939 if (res != NULL_TREE)
1940 return res;
1941 }
1942
1943 return fold_build2_loc (loc, code, type, arg0, arg1);
1944 }
1945
1946 /* Given two values, either both of sizetype or both of bitsizetype,
1947 compute the difference between the two values. Return the value
1948 in signed type corresponding to the type of the operands. */
1949
1950 tree
1951 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1952 {
1953 tree type = TREE_TYPE (arg0);
1954 tree ctype;
1955
1956 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1957 TREE_TYPE (arg1)));
1958
1959 /* If the type is already signed, just do the simple thing. */
1960 if (!TYPE_UNSIGNED (type))
1961 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1962
1963 if (type == sizetype)
1964 ctype = ssizetype;
1965 else if (type == bitsizetype)
1966 ctype = sbitsizetype;
1967 else
1968 ctype = signed_type_for (type);
1969
1970 /* If either operand is not a constant, do the conversions to the signed
1971 type and subtract. The hardware will do the right thing with any
1972 overflow in the subtraction. */
1973 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1974 return size_binop_loc (loc, MINUS_EXPR,
1975 fold_convert_loc (loc, ctype, arg0),
1976 fold_convert_loc (loc, ctype, arg1));
1977
1978 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1979 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1980 overflow) and negate (which can't either). Special-case a result
1981 of zero while we're here. */
1982 if (tree_int_cst_equal (arg0, arg1))
1983 return build_int_cst (ctype, 0);
1984 else if (tree_int_cst_lt (arg1, arg0))
1985 return fold_convert_loc (loc, ctype,
1986 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1987 else
1988 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1989 fold_convert_loc (loc, ctype,
1990 size_binop_loc (loc,
1991 MINUS_EXPR,
1992 arg1, arg0)));
1993 }
1994 \f
1995 /* A subroutine of fold_convert_const handling conversions of an
1996 INTEGER_CST to another integer type. */
1997
1998 static tree
1999 fold_convert_const_int_from_int (tree type, const_tree arg1)
2000 {
2001 /* Given an integer constant, make new constant with new type,
2002 appropriately sign-extended or truncated. Use widest_int
2003 so that any extension is done according ARG1's type. */
2004 return force_fit_type (type, wi::to_widest (arg1),
2005 !POINTER_TYPE_P (TREE_TYPE (arg1)),
2006 TREE_OVERFLOW (arg1));
2007 }
2008
2009 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2010 to an integer type. */
2011
2012 static tree
2013 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
2014 {
2015 bool overflow = false;
2016 tree t;
2017
2018 /* The following code implements the floating point to integer
2019 conversion rules required by the Java Language Specification,
2020 that IEEE NaNs are mapped to zero and values that overflow
2021 the target precision saturate, i.e. values greater than
2022 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2023 are mapped to INT_MIN. These semantics are allowed by the
2024 C and C++ standards that simply state that the behavior of
2025 FP-to-integer conversion is unspecified upon overflow. */
2026
2027 wide_int val;
2028 REAL_VALUE_TYPE r;
2029 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2030
2031 switch (code)
2032 {
2033 case FIX_TRUNC_EXPR:
2034 real_trunc (&r, VOIDmode, &x);
2035 break;
2036
2037 default:
2038 gcc_unreachable ();
2039 }
2040
2041 /* If R is NaN, return zero and show we have an overflow. */
2042 if (REAL_VALUE_ISNAN (r))
2043 {
2044 overflow = true;
2045 val = wi::zero (TYPE_PRECISION (type));
2046 }
2047
2048 /* See if R is less than the lower bound or greater than the
2049 upper bound. */
2050
2051 if (! overflow)
2052 {
2053 tree lt = TYPE_MIN_VALUE (type);
2054 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2055 if (real_less (&r, &l))
2056 {
2057 overflow = true;
2058 val = wi::to_wide (lt);
2059 }
2060 }
2061
2062 if (! overflow)
2063 {
2064 tree ut = TYPE_MAX_VALUE (type);
2065 if (ut)
2066 {
2067 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2068 if (real_less (&u, &r))
2069 {
2070 overflow = true;
2071 val = wi::to_wide (ut);
2072 }
2073 }
2074 }
2075
2076 if (! overflow)
2077 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
2078
2079 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
2080 return t;
2081 }
2082
2083 /* A subroutine of fold_convert_const handling conversions of a
2084 FIXED_CST to an integer type. */
2085
2086 static tree
2087 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2088 {
2089 tree t;
2090 double_int temp, temp_trunc;
2091 scalar_mode mode;
2092
2093 /* Right shift FIXED_CST to temp by fbit. */
2094 temp = TREE_FIXED_CST (arg1).data;
2095 mode = TREE_FIXED_CST (arg1).mode;
2096 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
2097 {
2098 temp = temp.rshift (GET_MODE_FBIT (mode),
2099 HOST_BITS_PER_DOUBLE_INT,
2100 SIGNED_FIXED_POINT_MODE_P (mode));
2101
2102 /* Left shift temp to temp_trunc by fbit. */
2103 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
2104 HOST_BITS_PER_DOUBLE_INT,
2105 SIGNED_FIXED_POINT_MODE_P (mode));
2106 }
2107 else
2108 {
2109 temp = double_int_zero;
2110 temp_trunc = double_int_zero;
2111 }
2112
2113 /* If FIXED_CST is negative, we need to round the value toward 0.
2114 By checking if the fractional bits are not zero to add 1 to temp. */
2115 if (SIGNED_FIXED_POINT_MODE_P (mode)
2116 && temp_trunc.is_negative ()
2117 && TREE_FIXED_CST (arg1).data != temp_trunc)
2118 temp += double_int_one;
2119
2120 /* Given a fixed-point constant, make new constant with new type,
2121 appropriately sign-extended or truncated. */
2122 t = force_fit_type (type, temp, -1,
2123 (temp.is_negative ()
2124 && (TYPE_UNSIGNED (type)
2125 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2126 | TREE_OVERFLOW (arg1));
2127
2128 return t;
2129 }
2130
2131 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2132 to another floating point type. */
2133
2134 static tree
2135 fold_convert_const_real_from_real (tree type, const_tree arg1)
2136 {
2137 REAL_VALUE_TYPE value;
2138 tree t;
2139
2140 /* Don't perform the operation if flag_signaling_nans is on
2141 and the operand is a signaling NaN. */
2142 if (HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
2143 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1)))
2144 return NULL_TREE;
2145
2146 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2147 t = build_real (type, value);
2148
2149 /* If converting an infinity or NAN to a representation that doesn't
2150 have one, set the overflow bit so that we can produce some kind of
2151 error message at the appropriate point if necessary. It's not the
2152 most user-friendly message, but it's better than nothing. */
2153 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2154 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2155 TREE_OVERFLOW (t) = 1;
2156 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2157 && !MODE_HAS_NANS (TYPE_MODE (type)))
2158 TREE_OVERFLOW (t) = 1;
2159 /* Regular overflow, conversion produced an infinity in a mode that
2160 can't represent them. */
2161 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2162 && REAL_VALUE_ISINF (value)
2163 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2164 TREE_OVERFLOW (t) = 1;
2165 else
2166 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2167 return t;
2168 }
2169
2170 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2171 to a floating point type. */
2172
2173 static tree
2174 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2175 {
2176 REAL_VALUE_TYPE value;
2177 tree t;
2178
2179 real_convert_from_fixed (&value, SCALAR_FLOAT_TYPE_MODE (type),
2180 &TREE_FIXED_CST (arg1));
2181 t = build_real (type, value);
2182
2183 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2184 return t;
2185 }
2186
2187 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2188 to another fixed-point type. */
2189
2190 static tree
2191 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2192 {
2193 FIXED_VALUE_TYPE value;
2194 tree t;
2195 bool overflow_p;
2196
2197 overflow_p = fixed_convert (&value, SCALAR_TYPE_MODE (type),
2198 &TREE_FIXED_CST (arg1), TYPE_SATURATING (type));
2199 t = build_fixed (type, value);
2200
2201 /* Propagate overflow flags. */
2202 if (overflow_p | TREE_OVERFLOW (arg1))
2203 TREE_OVERFLOW (t) = 1;
2204 return t;
2205 }
2206
2207 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2208 to a fixed-point type. */
2209
2210 static tree
2211 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2212 {
2213 FIXED_VALUE_TYPE value;
2214 tree t;
2215 bool overflow_p;
2216 double_int di;
2217
2218 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2219
2220 di.low = TREE_INT_CST_ELT (arg1, 0);
2221 if (TREE_INT_CST_NUNITS (arg1) == 1)
2222 di.high = (HOST_WIDE_INT) di.low < 0 ? HOST_WIDE_INT_M1 : 0;
2223 else
2224 di.high = TREE_INT_CST_ELT (arg1, 1);
2225
2226 overflow_p = fixed_convert_from_int (&value, SCALAR_TYPE_MODE (type), di,
2227 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2228 TYPE_SATURATING (type));
2229 t = build_fixed (type, value);
2230
2231 /* Propagate overflow flags. */
2232 if (overflow_p | TREE_OVERFLOW (arg1))
2233 TREE_OVERFLOW (t) = 1;
2234 return t;
2235 }
2236
2237 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2238 to a fixed-point type. */
2239
2240 static tree
2241 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2242 {
2243 FIXED_VALUE_TYPE value;
2244 tree t;
2245 bool overflow_p;
2246
2247 overflow_p = fixed_convert_from_real (&value, SCALAR_TYPE_MODE (type),
2248 &TREE_REAL_CST (arg1),
2249 TYPE_SATURATING (type));
2250 t = build_fixed (type, value);
2251
2252 /* Propagate overflow flags. */
2253 if (overflow_p | TREE_OVERFLOW (arg1))
2254 TREE_OVERFLOW (t) = 1;
2255 return t;
2256 }
2257
2258 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2259 type TYPE. If no simplification can be done return NULL_TREE. */
2260
2261 static tree
2262 fold_convert_const (enum tree_code code, tree type, tree arg1)
2263 {
2264 tree arg_type = TREE_TYPE (arg1);
2265 if (arg_type == type)
2266 return arg1;
2267
2268 /* We can't widen types, since the runtime value could overflow the
2269 original type before being extended to the new type. */
2270 if (POLY_INT_CST_P (arg1)
2271 && (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2272 && TYPE_PRECISION (type) <= TYPE_PRECISION (arg_type))
2273 return build_poly_int_cst (type,
2274 poly_wide_int::from (poly_int_cst_value (arg1),
2275 TYPE_PRECISION (type),
2276 TYPE_SIGN (arg_type)));
2277
2278 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2279 || TREE_CODE (type) == OFFSET_TYPE)
2280 {
2281 if (TREE_CODE (arg1) == INTEGER_CST)
2282 return fold_convert_const_int_from_int (type, arg1);
2283 else if (TREE_CODE (arg1) == REAL_CST)
2284 return fold_convert_const_int_from_real (code, type, arg1);
2285 else if (TREE_CODE (arg1) == FIXED_CST)
2286 return fold_convert_const_int_from_fixed (type, arg1);
2287 }
2288 else if (TREE_CODE (type) == REAL_TYPE)
2289 {
2290 if (TREE_CODE (arg1) == INTEGER_CST)
2291 return build_real_from_int_cst (type, arg1);
2292 else if (TREE_CODE (arg1) == REAL_CST)
2293 return fold_convert_const_real_from_real (type, arg1);
2294 else if (TREE_CODE (arg1) == FIXED_CST)
2295 return fold_convert_const_real_from_fixed (type, arg1);
2296 }
2297 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2298 {
2299 if (TREE_CODE (arg1) == FIXED_CST)
2300 return fold_convert_const_fixed_from_fixed (type, arg1);
2301 else if (TREE_CODE (arg1) == INTEGER_CST)
2302 return fold_convert_const_fixed_from_int (type, arg1);
2303 else if (TREE_CODE (arg1) == REAL_CST)
2304 return fold_convert_const_fixed_from_real (type, arg1);
2305 }
2306 else if (TREE_CODE (type) == VECTOR_TYPE)
2307 {
2308 if (TREE_CODE (arg1) == VECTOR_CST
2309 && known_eq (TYPE_VECTOR_SUBPARTS (type), VECTOR_CST_NELTS (arg1)))
2310 {
2311 tree elttype = TREE_TYPE (type);
2312 tree arg1_elttype = TREE_TYPE (TREE_TYPE (arg1));
2313 /* We can't handle steps directly when extending, since the
2314 values need to wrap at the original precision first. */
2315 bool step_ok_p
2316 = (INTEGRAL_TYPE_P (elttype)
2317 && INTEGRAL_TYPE_P (arg1_elttype)
2318 && TYPE_PRECISION (elttype) <= TYPE_PRECISION (arg1_elttype));
2319 tree_vector_builder v;
2320 if (!v.new_unary_operation (type, arg1, step_ok_p))
2321 return NULL_TREE;
2322 unsigned int len = v.encoded_nelts ();
2323 for (unsigned int i = 0; i < len; ++i)
2324 {
2325 tree elt = VECTOR_CST_ELT (arg1, i);
2326 tree cvt = fold_convert_const (code, elttype, elt);
2327 if (cvt == NULL_TREE)
2328 return NULL_TREE;
2329 v.quick_push (cvt);
2330 }
2331 return v.build ();
2332 }
2333 }
2334 return NULL_TREE;
2335 }
2336
2337 /* Construct a vector of zero elements of vector type TYPE. */
2338
2339 static tree
2340 build_zero_vector (tree type)
2341 {
2342 tree t;
2343
2344 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2345 return build_vector_from_val (type, t);
2346 }
2347
2348 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2349
2350 bool
2351 fold_convertible_p (const_tree type, const_tree arg)
2352 {
2353 tree orig = TREE_TYPE (arg);
2354
2355 if (type == orig)
2356 return true;
2357
2358 if (TREE_CODE (arg) == ERROR_MARK
2359 || TREE_CODE (type) == ERROR_MARK
2360 || TREE_CODE (orig) == ERROR_MARK)
2361 return false;
2362
2363 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2364 return true;
2365
2366 switch (TREE_CODE (type))
2367 {
2368 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2369 case POINTER_TYPE: case REFERENCE_TYPE:
2370 case OFFSET_TYPE:
2371 return (INTEGRAL_TYPE_P (orig)
2372 || (POINTER_TYPE_P (orig)
2373 && TYPE_PRECISION (type) <= TYPE_PRECISION (orig))
2374 || TREE_CODE (orig) == OFFSET_TYPE);
2375
2376 case REAL_TYPE:
2377 case FIXED_POINT_TYPE:
2378 case VOID_TYPE:
2379 return TREE_CODE (type) == TREE_CODE (orig);
2380
2381 case VECTOR_TYPE:
2382 return (VECTOR_TYPE_P (orig)
2383 && known_eq (TYPE_VECTOR_SUBPARTS (type),
2384 TYPE_VECTOR_SUBPARTS (orig))
2385 && fold_convertible_p (TREE_TYPE (type), TREE_TYPE (orig)));
2386
2387 default:
2388 return false;
2389 }
2390 }
2391
2392 /* Convert expression ARG to type TYPE. Used by the middle-end for
2393 simple conversions in preference to calling the front-end's convert. */
2394
2395 tree
2396 fold_convert_loc (location_t loc, tree type, tree arg)
2397 {
2398 tree orig = TREE_TYPE (arg);
2399 tree tem;
2400
2401 if (type == orig)
2402 return arg;
2403
2404 if (TREE_CODE (arg) == ERROR_MARK
2405 || TREE_CODE (type) == ERROR_MARK
2406 || TREE_CODE (orig) == ERROR_MARK)
2407 return error_mark_node;
2408
2409 switch (TREE_CODE (type))
2410 {
2411 case POINTER_TYPE:
2412 case REFERENCE_TYPE:
2413 /* Handle conversions between pointers to different address spaces. */
2414 if (POINTER_TYPE_P (orig)
2415 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2416 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2417 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2418 /* fall through */
2419
2420 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2421 case OFFSET_TYPE:
2422 if (TREE_CODE (arg) == INTEGER_CST)
2423 {
2424 tem = fold_convert_const (NOP_EXPR, type, arg);
2425 if (tem != NULL_TREE)
2426 return tem;
2427 }
2428 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2429 || TREE_CODE (orig) == OFFSET_TYPE)
2430 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2431 if (TREE_CODE (orig) == COMPLEX_TYPE)
2432 return fold_convert_loc (loc, type,
2433 fold_build1_loc (loc, REALPART_EXPR,
2434 TREE_TYPE (orig), arg));
2435 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2436 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2437 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2438
2439 case REAL_TYPE:
2440 if (TREE_CODE (arg) == INTEGER_CST)
2441 {
2442 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2443 if (tem != NULL_TREE)
2444 return tem;
2445 }
2446 else if (TREE_CODE (arg) == REAL_CST)
2447 {
2448 tem = fold_convert_const (NOP_EXPR, type, arg);
2449 if (tem != NULL_TREE)
2450 return tem;
2451 }
2452 else if (TREE_CODE (arg) == FIXED_CST)
2453 {
2454 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2455 if (tem != NULL_TREE)
2456 return tem;
2457 }
2458
2459 switch (TREE_CODE (orig))
2460 {
2461 case INTEGER_TYPE:
2462 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2463 case POINTER_TYPE: case REFERENCE_TYPE:
2464 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2465
2466 case REAL_TYPE:
2467 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2468
2469 case FIXED_POINT_TYPE:
2470 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2471
2472 case COMPLEX_TYPE:
2473 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2474 return fold_convert_loc (loc, type, tem);
2475
2476 default:
2477 gcc_unreachable ();
2478 }
2479
2480 case FIXED_POINT_TYPE:
2481 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2482 || TREE_CODE (arg) == REAL_CST)
2483 {
2484 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2485 if (tem != NULL_TREE)
2486 goto fold_convert_exit;
2487 }
2488
2489 switch (TREE_CODE (orig))
2490 {
2491 case FIXED_POINT_TYPE:
2492 case INTEGER_TYPE:
2493 case ENUMERAL_TYPE:
2494 case BOOLEAN_TYPE:
2495 case REAL_TYPE:
2496 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2497
2498 case COMPLEX_TYPE:
2499 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2500 return fold_convert_loc (loc, type, tem);
2501
2502 default:
2503 gcc_unreachable ();
2504 }
2505
2506 case COMPLEX_TYPE:
2507 switch (TREE_CODE (orig))
2508 {
2509 case INTEGER_TYPE:
2510 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2511 case POINTER_TYPE: case REFERENCE_TYPE:
2512 case REAL_TYPE:
2513 case FIXED_POINT_TYPE:
2514 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2515 fold_convert_loc (loc, TREE_TYPE (type), arg),
2516 fold_convert_loc (loc, TREE_TYPE (type),
2517 integer_zero_node));
2518 case COMPLEX_TYPE:
2519 {
2520 tree rpart, ipart;
2521
2522 if (TREE_CODE (arg) == COMPLEX_EXPR)
2523 {
2524 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2525 TREE_OPERAND (arg, 0));
2526 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2527 TREE_OPERAND (arg, 1));
2528 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2529 }
2530
2531 arg = save_expr (arg);
2532 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2533 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2534 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2535 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2536 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2537 }
2538
2539 default:
2540 gcc_unreachable ();
2541 }
2542
2543 case VECTOR_TYPE:
2544 if (integer_zerop (arg))
2545 return build_zero_vector (type);
2546 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2547 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2548 || TREE_CODE (orig) == VECTOR_TYPE);
2549 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2550
2551 case VOID_TYPE:
2552 tem = fold_ignored_result (arg);
2553 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2554
2555 default:
2556 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2557 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2558 gcc_unreachable ();
2559 }
2560 fold_convert_exit:
2561 protected_set_expr_location_unshare (tem, loc);
2562 return tem;
2563 }
2564 \f
2565 /* Return false if expr can be assumed not to be an lvalue, true
2566 otherwise. */
2567
2568 static bool
2569 maybe_lvalue_p (const_tree x)
2570 {
2571 /* We only need to wrap lvalue tree codes. */
2572 switch (TREE_CODE (x))
2573 {
2574 case VAR_DECL:
2575 case PARM_DECL:
2576 case RESULT_DECL:
2577 case LABEL_DECL:
2578 case FUNCTION_DECL:
2579 case SSA_NAME:
2580
2581 case COMPONENT_REF:
2582 case MEM_REF:
2583 case INDIRECT_REF:
2584 case ARRAY_REF:
2585 case ARRAY_RANGE_REF:
2586 case BIT_FIELD_REF:
2587 case OBJ_TYPE_REF:
2588
2589 case REALPART_EXPR:
2590 case IMAGPART_EXPR:
2591 case PREINCREMENT_EXPR:
2592 case PREDECREMENT_EXPR:
2593 case SAVE_EXPR:
2594 case TRY_CATCH_EXPR:
2595 case WITH_CLEANUP_EXPR:
2596 case COMPOUND_EXPR:
2597 case MODIFY_EXPR:
2598 case TARGET_EXPR:
2599 case COND_EXPR:
2600 case BIND_EXPR:
2601 case VIEW_CONVERT_EXPR:
2602 break;
2603
2604 default:
2605 /* Assume the worst for front-end tree codes. */
2606 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2607 break;
2608 return false;
2609 }
2610
2611 return true;
2612 }
2613
2614 /* Return an expr equal to X but certainly not valid as an lvalue. */
2615
2616 tree
2617 non_lvalue_loc (location_t loc, tree x)
2618 {
2619 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2620 us. */
2621 if (in_gimple_form)
2622 return x;
2623
2624 if (! maybe_lvalue_p (x))
2625 return x;
2626 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2627 }
2628
2629 /* When pedantic, return an expr equal to X but certainly not valid as a
2630 pedantic lvalue. Otherwise, return X. */
2631
2632 static tree
2633 pedantic_non_lvalue_loc (location_t loc, tree x)
2634 {
2635 return protected_set_expr_location_unshare (x, loc);
2636 }
2637 \f
2638 /* Given a tree comparison code, return the code that is the logical inverse.
2639 It is generally not safe to do this for floating-point comparisons, except
2640 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2641 ERROR_MARK in this case. */
2642
2643 enum tree_code
2644 invert_tree_comparison (enum tree_code code, bool honor_nans)
2645 {
2646 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2647 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2648 return ERROR_MARK;
2649
2650 switch (code)
2651 {
2652 case EQ_EXPR:
2653 return NE_EXPR;
2654 case NE_EXPR:
2655 return EQ_EXPR;
2656 case GT_EXPR:
2657 return honor_nans ? UNLE_EXPR : LE_EXPR;
2658 case GE_EXPR:
2659 return honor_nans ? UNLT_EXPR : LT_EXPR;
2660 case LT_EXPR:
2661 return honor_nans ? UNGE_EXPR : GE_EXPR;
2662 case LE_EXPR:
2663 return honor_nans ? UNGT_EXPR : GT_EXPR;
2664 case LTGT_EXPR:
2665 return UNEQ_EXPR;
2666 case UNEQ_EXPR:
2667 return LTGT_EXPR;
2668 case UNGT_EXPR:
2669 return LE_EXPR;
2670 case UNGE_EXPR:
2671 return LT_EXPR;
2672 case UNLT_EXPR:
2673 return GE_EXPR;
2674 case UNLE_EXPR:
2675 return GT_EXPR;
2676 case ORDERED_EXPR:
2677 return UNORDERED_EXPR;
2678 case UNORDERED_EXPR:
2679 return ORDERED_EXPR;
2680 default:
2681 gcc_unreachable ();
2682 }
2683 }
2684
2685 /* Similar, but return the comparison that results if the operands are
2686 swapped. This is safe for floating-point. */
2687
2688 enum tree_code
2689 swap_tree_comparison (enum tree_code code)
2690 {
2691 switch (code)
2692 {
2693 case EQ_EXPR:
2694 case NE_EXPR:
2695 case ORDERED_EXPR:
2696 case UNORDERED_EXPR:
2697 case LTGT_EXPR:
2698 case UNEQ_EXPR:
2699 return code;
2700 case GT_EXPR:
2701 return LT_EXPR;
2702 case GE_EXPR:
2703 return LE_EXPR;
2704 case LT_EXPR:
2705 return GT_EXPR;
2706 case LE_EXPR:
2707 return GE_EXPR;
2708 case UNGT_EXPR:
2709 return UNLT_EXPR;
2710 case UNGE_EXPR:
2711 return UNLE_EXPR;
2712 case UNLT_EXPR:
2713 return UNGT_EXPR;
2714 case UNLE_EXPR:
2715 return UNGE_EXPR;
2716 default:
2717 gcc_unreachable ();
2718 }
2719 }
2720
2721
2722 /* Convert a comparison tree code from an enum tree_code representation
2723 into a compcode bit-based encoding. This function is the inverse of
2724 compcode_to_comparison. */
2725
2726 static enum comparison_code
2727 comparison_to_compcode (enum tree_code code)
2728 {
2729 switch (code)
2730 {
2731 case LT_EXPR:
2732 return COMPCODE_LT;
2733 case EQ_EXPR:
2734 return COMPCODE_EQ;
2735 case LE_EXPR:
2736 return COMPCODE_LE;
2737 case GT_EXPR:
2738 return COMPCODE_GT;
2739 case NE_EXPR:
2740 return COMPCODE_NE;
2741 case GE_EXPR:
2742 return COMPCODE_GE;
2743 case ORDERED_EXPR:
2744 return COMPCODE_ORD;
2745 case UNORDERED_EXPR:
2746 return COMPCODE_UNORD;
2747 case UNLT_EXPR:
2748 return COMPCODE_UNLT;
2749 case UNEQ_EXPR:
2750 return COMPCODE_UNEQ;
2751 case UNLE_EXPR:
2752 return COMPCODE_UNLE;
2753 case UNGT_EXPR:
2754 return COMPCODE_UNGT;
2755 case LTGT_EXPR:
2756 return COMPCODE_LTGT;
2757 case UNGE_EXPR:
2758 return COMPCODE_UNGE;
2759 default:
2760 gcc_unreachable ();
2761 }
2762 }
2763
2764 /* Convert a compcode bit-based encoding of a comparison operator back
2765 to GCC's enum tree_code representation. This function is the
2766 inverse of comparison_to_compcode. */
2767
2768 static enum tree_code
2769 compcode_to_comparison (enum comparison_code code)
2770 {
2771 switch (code)
2772 {
2773 case COMPCODE_LT:
2774 return LT_EXPR;
2775 case COMPCODE_EQ:
2776 return EQ_EXPR;
2777 case COMPCODE_LE:
2778 return LE_EXPR;
2779 case COMPCODE_GT:
2780 return GT_EXPR;
2781 case COMPCODE_NE:
2782 return NE_EXPR;
2783 case COMPCODE_GE:
2784 return GE_EXPR;
2785 case COMPCODE_ORD:
2786 return ORDERED_EXPR;
2787 case COMPCODE_UNORD:
2788 return UNORDERED_EXPR;
2789 case COMPCODE_UNLT:
2790 return UNLT_EXPR;
2791 case COMPCODE_UNEQ:
2792 return UNEQ_EXPR;
2793 case COMPCODE_UNLE:
2794 return UNLE_EXPR;
2795 case COMPCODE_UNGT:
2796 return UNGT_EXPR;
2797 case COMPCODE_LTGT:
2798 return LTGT_EXPR;
2799 case COMPCODE_UNGE:
2800 return UNGE_EXPR;
2801 default:
2802 gcc_unreachable ();
2803 }
2804 }
2805
2806 /* Return true if COND1 tests the opposite condition of COND2. */
2807
2808 bool
2809 inverse_conditions_p (const_tree cond1, const_tree cond2)
2810 {
2811 return (COMPARISON_CLASS_P (cond1)
2812 && COMPARISON_CLASS_P (cond2)
2813 && (invert_tree_comparison
2814 (TREE_CODE (cond1),
2815 HONOR_NANS (TREE_OPERAND (cond1, 0))) == TREE_CODE (cond2))
2816 && operand_equal_p (TREE_OPERAND (cond1, 0),
2817 TREE_OPERAND (cond2, 0), 0)
2818 && operand_equal_p (TREE_OPERAND (cond1, 1),
2819 TREE_OPERAND (cond2, 1), 0));
2820 }
2821
2822 /* Return a tree for the comparison which is the combination of
2823 doing the AND or OR (depending on CODE) of the two operations LCODE
2824 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2825 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2826 if this makes the transformation invalid. */
2827
2828 tree
2829 combine_comparisons (location_t loc,
2830 enum tree_code code, enum tree_code lcode,
2831 enum tree_code rcode, tree truth_type,
2832 tree ll_arg, tree lr_arg)
2833 {
2834 bool honor_nans = HONOR_NANS (ll_arg);
2835 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2836 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2837 int compcode;
2838
2839 switch (code)
2840 {
2841 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2842 compcode = lcompcode & rcompcode;
2843 break;
2844
2845 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2846 compcode = lcompcode | rcompcode;
2847 break;
2848
2849 default:
2850 return NULL_TREE;
2851 }
2852
2853 if (!honor_nans)
2854 {
2855 /* Eliminate unordered comparisons, as well as LTGT and ORD
2856 which are not used unless the mode has NaNs. */
2857 compcode &= ~COMPCODE_UNORD;
2858 if (compcode == COMPCODE_LTGT)
2859 compcode = COMPCODE_NE;
2860 else if (compcode == COMPCODE_ORD)
2861 compcode = COMPCODE_TRUE;
2862 }
2863 else if (flag_trapping_math)
2864 {
2865 /* Check that the original operation and the optimized ones will trap
2866 under the same condition. */
2867 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2868 && (lcompcode != COMPCODE_EQ)
2869 && (lcompcode != COMPCODE_ORD);
2870 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2871 && (rcompcode != COMPCODE_EQ)
2872 && (rcompcode != COMPCODE_ORD);
2873 bool trap = (compcode & COMPCODE_UNORD) == 0
2874 && (compcode != COMPCODE_EQ)
2875 && (compcode != COMPCODE_ORD);
2876
2877 /* In a short-circuited boolean expression the LHS might be
2878 such that the RHS, if evaluated, will never trap. For
2879 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2880 if neither x nor y is NaN. (This is a mixed blessing: for
2881 example, the expression above will never trap, hence
2882 optimizing it to x < y would be invalid). */
2883 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2884 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2885 rtrap = false;
2886
2887 /* If the comparison was short-circuited, and only the RHS
2888 trapped, we may now generate a spurious trap. */
2889 if (rtrap && !ltrap
2890 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2891 return NULL_TREE;
2892
2893 /* If we changed the conditions that cause a trap, we lose. */
2894 if ((ltrap || rtrap) != trap)
2895 return NULL_TREE;
2896 }
2897
2898 if (compcode == COMPCODE_TRUE)
2899 return constant_boolean_node (true, truth_type);
2900 else if (compcode == COMPCODE_FALSE)
2901 return constant_boolean_node (false, truth_type);
2902 else
2903 {
2904 enum tree_code tcode;
2905
2906 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2907 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2908 }
2909 }
2910 \f
2911 /* Return nonzero if two operands (typically of the same tree node)
2912 are necessarily equal. FLAGS modifies behavior as follows:
2913
2914 If OEP_ONLY_CONST is set, only return nonzero for constants.
2915 This function tests whether the operands are indistinguishable;
2916 it does not test whether they are equal using C's == operation.
2917 The distinction is important for IEEE floating point, because
2918 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2919 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2920
2921 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2922 even though it may hold multiple values during a function.
2923 This is because a GCC tree node guarantees that nothing else is
2924 executed between the evaluation of its "operands" (which may often
2925 be evaluated in arbitrary order). Hence if the operands themselves
2926 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2927 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2928 unset means assuming isochronic (or instantaneous) tree equivalence.
2929 Unless comparing arbitrary expression trees, such as from different
2930 statements, this flag can usually be left unset.
2931
2932 If OEP_PURE_SAME is set, then pure functions with identical arguments
2933 are considered the same. It is used when the caller has other ways
2934 to ensure that global memory is unchanged in between.
2935
2936 If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
2937 not values of expressions.
2938
2939 If OEP_LEXICOGRAPHIC is set, then also handle expressions with side-effects
2940 such as MODIFY_EXPR, RETURN_EXPR, as well as STATEMENT_LISTs.
2941
2942 If OEP_BITWISE is set, then require the values to be bitwise identical
2943 rather than simply numerically equal. Do not take advantage of things
2944 like math-related flags or undefined behavior; only return true for
2945 values that are provably bitwise identical in all circumstances.
2946
2947 Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
2948 any operand with side effect. This is unnecesarily conservative in the
2949 case we know that arg0 and arg1 are in disjoint code paths (such as in
2950 ?: operator). In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
2951 addresses with TREE_CONSTANT flag set so we know that &var == &var
2952 even if var is volatile. */
2953
2954 bool
2955 operand_compare::operand_equal_p (const_tree arg0, const_tree arg1,
2956 unsigned int flags)
2957 {
2958 bool r;
2959 if (verify_hash_value (arg0, arg1, flags, &r))
2960 return r;
2961
2962 STRIP_ANY_LOCATION_WRAPPER (arg0);
2963 STRIP_ANY_LOCATION_WRAPPER (arg1);
2964
2965 /* If either is ERROR_MARK, they aren't equal. */
2966 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2967 || TREE_TYPE (arg0) == error_mark_node
2968 || TREE_TYPE (arg1) == error_mark_node)
2969 return false;
2970
2971 /* Similar, if either does not have a type (like a template id),
2972 they aren't equal. */
2973 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2974 return false;
2975
2976 /* Bitwise identity makes no sense if the values have different layouts. */
2977 if ((flags & OEP_BITWISE)
2978 && !tree_nop_conversion_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
2979 return false;
2980
2981 /* We cannot consider pointers to different address space equal. */
2982 if (POINTER_TYPE_P (TREE_TYPE (arg0))
2983 && POINTER_TYPE_P (TREE_TYPE (arg1))
2984 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2985 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2986 return false;
2987
2988 /* Check equality of integer constants before bailing out due to
2989 precision differences. */
2990 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2991 {
2992 /* Address of INTEGER_CST is not defined; check that we did not forget
2993 to drop the OEP_ADDRESS_OF flags. */
2994 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2995 return tree_int_cst_equal (arg0, arg1);
2996 }
2997
2998 if (!(flags & OEP_ADDRESS_OF))
2999 {
3000 /* If both types don't have the same signedness, then we can't consider
3001 them equal. We must check this before the STRIP_NOPS calls
3002 because they may change the signedness of the arguments. As pointers
3003 strictly don't have a signedness, require either two pointers or
3004 two non-pointers as well. */
3005 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
3006 || POINTER_TYPE_P (TREE_TYPE (arg0))
3007 != POINTER_TYPE_P (TREE_TYPE (arg1)))
3008 return false;
3009
3010 /* If both types don't have the same precision, then it is not safe
3011 to strip NOPs. */
3012 if (element_precision (TREE_TYPE (arg0))
3013 != element_precision (TREE_TYPE (arg1)))
3014 return false;
3015
3016 STRIP_NOPS (arg0);
3017 STRIP_NOPS (arg1);
3018 }
3019 #if 0
3020 /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
3021 sanity check once the issue is solved. */
3022 else
3023 /* Addresses of conversions and SSA_NAMEs (and many other things)
3024 are not defined. Check that we did not forget to drop the
3025 OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
3026 gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
3027 && TREE_CODE (arg0) != SSA_NAME);
3028 #endif
3029
3030 /* In case both args are comparisons but with different comparison
3031 code, try to swap the comparison operands of one arg to produce
3032 a match and compare that variant. */
3033 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3034 && COMPARISON_CLASS_P (arg0)
3035 && COMPARISON_CLASS_P (arg1))
3036 {
3037 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3038
3039 if (TREE_CODE (arg0) == swap_code)
3040 return operand_equal_p (TREE_OPERAND (arg0, 0),
3041 TREE_OPERAND (arg1, 1), flags)
3042 && operand_equal_p (TREE_OPERAND (arg0, 1),
3043 TREE_OPERAND (arg1, 0), flags);
3044 }
3045
3046 if (TREE_CODE (arg0) != TREE_CODE (arg1))
3047 {
3048 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
3049 if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
3050 ;
3051 else if (flags & OEP_ADDRESS_OF)
3052 {
3053 /* If we are interested in comparing addresses ignore
3054 MEM_REF wrappings of the base that can appear just for
3055 TBAA reasons. */
3056 if (TREE_CODE (arg0) == MEM_REF
3057 && DECL_P (arg1)
3058 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
3059 && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
3060 && integer_zerop (TREE_OPERAND (arg0, 1)))
3061 return true;
3062 else if (TREE_CODE (arg1) == MEM_REF
3063 && DECL_P (arg0)
3064 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
3065 && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
3066 && integer_zerop (TREE_OPERAND (arg1, 1)))
3067 return true;
3068 return false;
3069 }
3070 else
3071 return false;
3072 }
3073
3074 /* When not checking adddresses, this is needed for conversions and for
3075 COMPONENT_REF. Might as well play it safe and always test this. */
3076 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3077 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3078 || (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))
3079 && !(flags & OEP_ADDRESS_OF)))
3080 return false;
3081
3082 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3083 We don't care about side effects in that case because the SAVE_EXPR
3084 takes care of that for us. In all other cases, two expressions are
3085 equal if they have no side effects. If we have two identical
3086 expressions with side effects that should be treated the same due
3087 to the only side effects being identical SAVE_EXPR's, that will
3088 be detected in the recursive calls below.
3089 If we are taking an invariant address of two identical objects
3090 they are necessarily equal as well. */
3091 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3092 && (TREE_CODE (arg0) == SAVE_EXPR
3093 || (flags & OEP_MATCH_SIDE_EFFECTS)
3094 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3095 return true;
3096
3097 /* Next handle constant cases, those for which we can return 1 even
3098 if ONLY_CONST is set. */
3099 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3100 switch (TREE_CODE (arg0))
3101 {
3102 case INTEGER_CST:
3103 return tree_int_cst_equal (arg0, arg1);
3104
3105 case FIXED_CST:
3106 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3107 TREE_FIXED_CST (arg1));
3108
3109 case REAL_CST:
3110 if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
3111 return true;
3112
3113 if (!(flags & OEP_BITWISE) && !HONOR_SIGNED_ZEROS (arg0))
3114 {
3115 /* If we do not distinguish between signed and unsigned zero,
3116 consider them equal. */
3117 if (real_zerop (arg0) && real_zerop (arg1))
3118 return true;
3119 }
3120 return false;
3121
3122 case VECTOR_CST:
3123 {
3124 if (VECTOR_CST_LOG2_NPATTERNS (arg0)
3125 != VECTOR_CST_LOG2_NPATTERNS (arg1))
3126 return false;
3127
3128 if (VECTOR_CST_NELTS_PER_PATTERN (arg0)
3129 != VECTOR_CST_NELTS_PER_PATTERN (arg1))
3130 return false;
3131
3132 unsigned int count = vector_cst_encoded_nelts (arg0);
3133 for (unsigned int i = 0; i < count; ++i)
3134 if (!operand_equal_p (VECTOR_CST_ENCODED_ELT (arg0, i),
3135 VECTOR_CST_ENCODED_ELT (arg1, i), flags))
3136 return false;
3137 return true;
3138 }
3139
3140 case COMPLEX_CST:
3141 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3142 flags)
3143 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3144 flags));
3145
3146 case STRING_CST:
3147 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3148 && ! memcmp (TREE_STRING_POINTER (arg0),
3149 TREE_STRING_POINTER (arg1),
3150 TREE_STRING_LENGTH (arg0)));
3151
3152 case ADDR_EXPR:
3153 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3154 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3155 flags | OEP_ADDRESS_OF
3156 | OEP_MATCH_SIDE_EFFECTS);
3157 case CONSTRUCTOR:
3158 /* In GIMPLE empty constructors are allowed in initializers of
3159 aggregates. */
3160 return !CONSTRUCTOR_NELTS (arg0) && !CONSTRUCTOR_NELTS (arg1);
3161 default:
3162 break;
3163 }
3164
3165 /* Don't handle more cases for OEP_BITWISE, since we can't guarantee that
3166 two instances of undefined behavior will give identical results. */
3167 if (flags & (OEP_ONLY_CONST | OEP_BITWISE))
3168 return false;
3169
3170 /* Define macros to test an operand from arg0 and arg1 for equality and a
3171 variant that allows null and views null as being different from any
3172 non-null value. In the latter case, if either is null, the both
3173 must be; otherwise, do the normal comparison. */
3174 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3175 TREE_OPERAND (arg1, N), flags)
3176
3177 #define OP_SAME_WITH_NULL(N) \
3178 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3179 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3180
3181 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3182 {
3183 case tcc_unary:
3184 /* Two conversions are equal only if signedness and modes match. */
3185 switch (TREE_CODE (arg0))
3186 {
3187 CASE_CONVERT:
3188 case FIX_TRUNC_EXPR:
3189 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3190 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3191 return false;
3192 break;
3193 default:
3194 break;
3195 }
3196
3197 return OP_SAME (0);
3198
3199
3200 case tcc_comparison:
3201 case tcc_binary:
3202 if (OP_SAME (0) && OP_SAME (1))
3203 return true;
3204
3205 /* For commutative ops, allow the other order. */
3206 return (commutative_tree_code (TREE_CODE (arg0))
3207 && operand_equal_p (TREE_OPERAND (arg0, 0),
3208 TREE_OPERAND (arg1, 1), flags)
3209 && operand_equal_p (TREE_OPERAND (arg0, 1),
3210 TREE_OPERAND (arg1, 0), flags));
3211
3212 case tcc_reference:
3213 /* If either of the pointer (or reference) expressions we are
3214 dereferencing contain a side effect, these cannot be equal,
3215 but their addresses can be. */
3216 if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0
3217 && (TREE_SIDE_EFFECTS (arg0)
3218 || TREE_SIDE_EFFECTS (arg1)))
3219 return false;
3220
3221 switch (TREE_CODE (arg0))
3222 {
3223 case INDIRECT_REF:
3224 if (!(flags & OEP_ADDRESS_OF))
3225 {
3226 if (TYPE_ALIGN (TREE_TYPE (arg0))
3227 != TYPE_ALIGN (TREE_TYPE (arg1)))
3228 return false;
3229 /* Verify that the access types are compatible. */
3230 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg0))
3231 != TYPE_MAIN_VARIANT (TREE_TYPE (arg1)))
3232 return false;
3233 }
3234 flags &= ~OEP_ADDRESS_OF;
3235 return OP_SAME (0);
3236
3237 case IMAGPART_EXPR:
3238 /* Require the same offset. */
3239 if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3240 TYPE_SIZE (TREE_TYPE (arg1)),
3241 flags & ~OEP_ADDRESS_OF))
3242 return false;
3243
3244 /* Fallthru. */
3245 case REALPART_EXPR:
3246 case VIEW_CONVERT_EXPR:
3247 return OP_SAME (0);
3248
3249 case TARGET_MEM_REF:
3250 case MEM_REF:
3251 if (!(flags & OEP_ADDRESS_OF))
3252 {
3253 /* Require equal access sizes */
3254 if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1))
3255 && (!TYPE_SIZE (TREE_TYPE (arg0))
3256 || !TYPE_SIZE (TREE_TYPE (arg1))
3257 || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3258 TYPE_SIZE (TREE_TYPE (arg1)),
3259 flags)))
3260 return false;
3261 /* Verify that access happens in similar types. */
3262 if (!types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3263 return false;
3264 /* Verify that accesses are TBAA compatible. */
3265 if (!alias_ptr_types_compatible_p
3266 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
3267 TREE_TYPE (TREE_OPERAND (arg1, 1)))
3268 || (MR_DEPENDENCE_CLIQUE (arg0)
3269 != MR_DEPENDENCE_CLIQUE (arg1))
3270 || (MR_DEPENDENCE_BASE (arg0)
3271 != MR_DEPENDENCE_BASE (arg1)))
3272 return false;
3273 /* Verify that alignment is compatible. */
3274 if (TYPE_ALIGN (TREE_TYPE (arg0))
3275 != TYPE_ALIGN (TREE_TYPE (arg1)))
3276 return false;
3277 }
3278 flags &= ~OEP_ADDRESS_OF;
3279 return (OP_SAME (0) && OP_SAME (1)
3280 /* TARGET_MEM_REF require equal extra operands. */
3281 && (TREE_CODE (arg0) != TARGET_MEM_REF
3282 || (OP_SAME_WITH_NULL (2)
3283 && OP_SAME_WITH_NULL (3)
3284 && OP_SAME_WITH_NULL (4))));
3285
3286 case ARRAY_REF:
3287 case ARRAY_RANGE_REF:
3288 if (!OP_SAME (0))
3289 return false;
3290 flags &= ~OEP_ADDRESS_OF;
3291 /* Compare the array index by value if it is constant first as we
3292 may have different types but same value here. */
3293 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3294 TREE_OPERAND (arg1, 1))
3295 || OP_SAME (1))
3296 && OP_SAME_WITH_NULL (2)
3297 && OP_SAME_WITH_NULL (3)
3298 /* Compare low bound and element size as with OEP_ADDRESS_OF
3299 we have to account for the offset of the ref. */
3300 && (TREE_TYPE (TREE_OPERAND (arg0, 0))
3301 == TREE_TYPE (TREE_OPERAND (arg1, 0))
3302 || (operand_equal_p (array_ref_low_bound
3303 (CONST_CAST_TREE (arg0)),
3304 array_ref_low_bound
3305 (CONST_CAST_TREE (arg1)), flags)
3306 && operand_equal_p (array_ref_element_size
3307 (CONST_CAST_TREE (arg0)),
3308 array_ref_element_size
3309 (CONST_CAST_TREE (arg1)),
3310 flags))));
3311
3312 case COMPONENT_REF:
3313 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3314 may be NULL when we're called to compare MEM_EXPRs. */
3315 if (!OP_SAME_WITH_NULL (0)
3316 || !OP_SAME (1))
3317 return false;
3318 flags &= ~OEP_ADDRESS_OF;
3319 return OP_SAME_WITH_NULL (2);
3320
3321 case BIT_FIELD_REF:
3322 if (!OP_SAME (0))
3323 return false;
3324 flags &= ~OEP_ADDRESS_OF;
3325 return OP_SAME (1) && OP_SAME (2);
3326
3327 /* Virtual table call. */
3328 case OBJ_TYPE_REF:
3329 {
3330 if (!operand_equal_p (OBJ_TYPE_REF_EXPR (arg0),
3331 OBJ_TYPE_REF_EXPR (arg1), flags))
3332 return false;
3333 if (tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg0))
3334 != tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg1)))
3335 return false;
3336 if (!operand_equal_p (OBJ_TYPE_REF_OBJECT (arg0),
3337 OBJ_TYPE_REF_OBJECT (arg1), flags))
3338 return false;
3339 if (!types_same_for_odr (obj_type_ref_class (arg0),
3340 obj_type_ref_class (arg1)))
3341 return false;
3342 return true;
3343 }
3344
3345 default:
3346 return false;
3347 }
3348
3349 case tcc_expression:
3350 switch (TREE_CODE (arg0))
3351 {
3352 case ADDR_EXPR:
3353 /* Be sure we pass right ADDRESS_OF flag. */
3354 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3355 return operand_equal_p (TREE_OPERAND (arg0, 0),
3356 TREE_OPERAND (arg1, 0),
3357 flags | OEP_ADDRESS_OF);
3358
3359 case TRUTH_NOT_EXPR:
3360 return OP_SAME (0);
3361
3362 case TRUTH_ANDIF_EXPR:
3363 case TRUTH_ORIF_EXPR:
3364 return OP_SAME (0) && OP_SAME (1);
3365
3366 case WIDEN_MULT_PLUS_EXPR:
3367 case WIDEN_MULT_MINUS_EXPR:
3368 if (!OP_SAME (2))
3369 return false;
3370 /* The multiplcation operands are commutative. */
3371 /* FALLTHRU */
3372
3373 case TRUTH_AND_EXPR:
3374 case TRUTH_OR_EXPR:
3375 case TRUTH_XOR_EXPR:
3376 if (OP_SAME (0) && OP_SAME (1))
3377 return true;
3378
3379 /* Otherwise take into account this is a commutative operation. */
3380 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3381 TREE_OPERAND (arg1, 1), flags)
3382 && operand_equal_p (TREE_OPERAND (arg0, 1),
3383 TREE_OPERAND (arg1, 0), flags));
3384
3385 case COND_EXPR:
3386 if (! OP_SAME (1) || ! OP_SAME_WITH_NULL (2))
3387 return false;
3388 flags &= ~OEP_ADDRESS_OF;
3389 return OP_SAME (0);
3390
3391 case BIT_INSERT_EXPR:
3392 /* BIT_INSERT_EXPR has an implict operand as the type precision
3393 of op1. Need to check to make sure they are the same. */
3394 if (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
3395 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
3396 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 1)))
3397 != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 1))))
3398 return false;
3399 /* FALLTHRU */
3400
3401 case VEC_COND_EXPR:
3402 case DOT_PROD_EXPR:
3403 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3404
3405 case MODIFY_EXPR:
3406 case INIT_EXPR:
3407 case COMPOUND_EXPR:
3408 case PREDECREMENT_EXPR:
3409 case PREINCREMENT_EXPR:
3410 case POSTDECREMENT_EXPR:
3411 case POSTINCREMENT_EXPR:
3412 if (flags & OEP_LEXICOGRAPHIC)
3413 return OP_SAME (0) && OP_SAME (1);
3414 return false;
3415
3416 case CLEANUP_POINT_EXPR:
3417 case EXPR_STMT:
3418 case SAVE_EXPR:
3419 if (flags & OEP_LEXICOGRAPHIC)
3420 return OP_SAME (0);
3421 return false;
3422
3423 default:
3424 return false;
3425 }
3426
3427 case tcc_vl_exp:
3428 switch (TREE_CODE (arg0))
3429 {
3430 case CALL_EXPR:
3431 if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3432 != (CALL_EXPR_FN (arg1) == NULL_TREE))
3433 /* If not both CALL_EXPRs are either internal or normal function
3434 functions, then they are not equal. */
3435 return false;
3436 else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3437 {
3438 /* If the CALL_EXPRs call different internal functions, then they
3439 are not equal. */
3440 if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3441 return false;
3442 }
3443 else
3444 {
3445 /* If the CALL_EXPRs call different functions, then they are not
3446 equal. */
3447 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3448 flags))
3449 return false;
3450 }
3451
3452 /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */
3453 {
3454 unsigned int cef = call_expr_flags (arg0);
3455 if (flags & OEP_PURE_SAME)
3456 cef &= ECF_CONST | ECF_PURE;
3457 else
3458 cef &= ECF_CONST;
3459 if (!cef && !(flags & OEP_LEXICOGRAPHIC))
3460 return false;
3461 }
3462
3463 /* Now see if all the arguments are the same. */
3464 {
3465 const_call_expr_arg_iterator iter0, iter1;
3466 const_tree a0, a1;
3467 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3468 a1 = first_const_call_expr_arg (arg1, &iter1);
3469 a0 && a1;
3470 a0 = next_const_call_expr_arg (&iter0),
3471 a1 = next_const_call_expr_arg (&iter1))
3472 if (! operand_equal_p (a0, a1, flags))
3473 return false;
3474
3475 /* If we get here and both argument lists are exhausted
3476 then the CALL_EXPRs are equal. */
3477 return ! (a0 || a1);
3478 }
3479 default:
3480 return false;
3481 }
3482
3483 case tcc_declaration:
3484 /* Consider __builtin_sqrt equal to sqrt. */
3485 return (TREE_CODE (arg0) == FUNCTION_DECL
3486 && fndecl_built_in_p (arg0) && fndecl_built_in_p (arg1)
3487 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3488 && (DECL_UNCHECKED_FUNCTION_CODE (arg0)
3489 == DECL_UNCHECKED_FUNCTION_CODE (arg1)));
3490
3491 case tcc_exceptional:
3492 if (TREE_CODE (arg0) == CONSTRUCTOR)
3493 {
3494 if (CONSTRUCTOR_NO_CLEARING (arg0) != CONSTRUCTOR_NO_CLEARING (arg1))
3495 return false;
3496
3497 /* In GIMPLE constructors are used only to build vectors from
3498 elements. Individual elements in the constructor must be
3499 indexed in increasing order and form an initial sequence.
3500
3501 We make no effort to compare constructors in generic.
3502 (see sem_variable::equals in ipa-icf which can do so for
3503 constants). */
3504 if (!VECTOR_TYPE_P (TREE_TYPE (arg0))
3505 || !VECTOR_TYPE_P (TREE_TYPE (arg1)))
3506 return false;
3507
3508 /* Be sure that vectors constructed have the same representation.
3509 We only tested element precision and modes to match.
3510 Vectors may be BLKmode and thus also check that the number of
3511 parts match. */
3512 if (maybe_ne (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)),
3513 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1))))
3514 return false;
3515
3516 vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3517 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3518 unsigned int len = vec_safe_length (v0);
3519
3520 if (len != vec_safe_length (v1))
3521 return false;
3522
3523 for (unsigned int i = 0; i < len; i++)
3524 {
3525 constructor_elt *c0 = &(*v0)[i];
3526 constructor_elt *c1 = &(*v1)[i];
3527
3528 if (!operand_equal_p (c0->value, c1->value, flags)
3529 /* In GIMPLE the indexes can be either NULL or matching i.
3530 Double check this so we won't get false
3531 positives for GENERIC. */
3532 || (c0->index
3533 && (TREE_CODE (c0->index) != INTEGER_CST
3534 || compare_tree_int (c0->index, i)))
3535 || (c1->index
3536 && (TREE_CODE (c1->index) != INTEGER_CST
3537 || compare_tree_int (c1->index, i))))
3538 return false;
3539 }
3540 return true;
3541 }
3542 else if (TREE_CODE (arg0) == STATEMENT_LIST
3543 && (flags & OEP_LEXICOGRAPHIC))
3544 {
3545 /* Compare the STATEMENT_LISTs. */
3546 tree_stmt_iterator tsi1, tsi2;
3547 tree body1 = CONST_CAST_TREE (arg0);
3548 tree body2 = CONST_CAST_TREE (arg1);
3549 for (tsi1 = tsi_start (body1), tsi2 = tsi_start (body2); ;
3550 tsi_next (&tsi1), tsi_next (&tsi2))
3551 {
3552 /* The lists don't have the same number of statements. */
3553 if (tsi_end_p (tsi1) ^ tsi_end_p (tsi2))
3554 return false;
3555 if (tsi_end_p (tsi1) && tsi_end_p (tsi2))
3556 return true;
3557 if (!operand_equal_p (tsi_stmt (tsi1), tsi_stmt (tsi2),
3558 flags & (OEP_LEXICOGRAPHIC
3559 | OEP_NO_HASH_CHECK)))
3560 return false;
3561 }
3562 }
3563 return false;
3564
3565 case tcc_statement:
3566 switch (TREE_CODE (arg0))
3567 {
3568 case RETURN_EXPR:
3569 if (flags & OEP_LEXICOGRAPHIC)
3570 return OP_SAME_WITH_NULL (0);
3571 return false;
3572 case DEBUG_BEGIN_STMT:
3573 if (flags & OEP_LEXICOGRAPHIC)
3574 return true;
3575 return false;
3576 default:
3577 return false;
3578 }
3579
3580 default:
3581 return false;
3582 }
3583
3584 #undef OP_SAME
3585 #undef OP_SAME_WITH_NULL
3586 }
3587
3588 /* Generate a hash value for an expression. This can be used iteratively
3589 by passing a previous result as the HSTATE argument. */
3590
3591 void
3592 operand_compare::hash_operand (const_tree t, inchash::hash &hstate,
3593 unsigned int flags)
3594 {
3595 int i;
3596 enum tree_code code;
3597 enum tree_code_class tclass;
3598
3599 if (t == NULL_TREE || t == error_mark_node)
3600 {
3601 hstate.merge_hash (0);
3602 return;
3603 }
3604
3605 STRIP_ANY_LOCATION_WRAPPER (t);
3606
3607 if (!(flags & OEP_ADDRESS_OF))
3608 STRIP_NOPS (t);
3609
3610 code = TREE_CODE (t);
3611
3612 switch (code)
3613 {
3614 /* Alas, constants aren't shared, so we can't rely on pointer
3615 identity. */
3616 case VOID_CST:
3617 hstate.merge_hash (0);
3618 return;
3619 case INTEGER_CST:
3620 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3621 for (i = 0; i < TREE_INT_CST_EXT_NUNITS (t); i++)
3622 hstate.add_hwi (TREE_INT_CST_ELT (t, i));
3623 return;
3624 case REAL_CST:
3625 {
3626 unsigned int val2;
3627 if (!HONOR_SIGNED_ZEROS (t) && real_zerop (t))
3628 val2 = rvc_zero;
3629 else
3630 val2 = real_hash (TREE_REAL_CST_PTR (t));
3631 hstate.merge_hash (val2);
3632 return;
3633 }
3634 case FIXED_CST:
3635 {
3636 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
3637 hstate.merge_hash (val2);
3638 return;
3639 }
3640 case STRING_CST:
3641 hstate.add ((const void *) TREE_STRING_POINTER (t),
3642 TREE_STRING_LENGTH (t));
3643 return;
3644 case COMPLEX_CST:
3645 hash_operand (TREE_REALPART (t), hstate, flags);
3646 hash_operand (TREE_IMAGPART (t), hstate, flags);
3647 return;
3648 case VECTOR_CST:
3649 {
3650 hstate.add_int (VECTOR_CST_NPATTERNS (t));
3651 hstate.add_int (VECTOR_CST_NELTS_PER_PATTERN (t));
3652 unsigned int count = vector_cst_encoded_nelts (t);
3653 for (unsigned int i = 0; i < count; ++i)
3654 hash_operand (VECTOR_CST_ENCODED_ELT (t, i), hstate, flags);
3655 return;
3656 }
3657 case SSA_NAME:
3658 /* We can just compare by pointer. */
3659 hstate.add_hwi (SSA_NAME_VERSION (t));
3660 return;
3661 case PLACEHOLDER_EXPR:
3662 /* The node itself doesn't matter. */
3663 return;
3664 case BLOCK:
3665 case OMP_CLAUSE:
3666 /* Ignore. */
3667 return;
3668 case TREE_LIST:
3669 /* A list of expressions, for a CALL_EXPR or as the elements of a
3670 VECTOR_CST. */
3671 for (; t; t = TREE_CHAIN (t))
3672 hash_operand (TREE_VALUE (t), hstate, flags);
3673 return;
3674 case CONSTRUCTOR:
3675 {
3676 unsigned HOST_WIDE_INT idx;
3677 tree field, value;
3678 flags &= ~OEP_ADDRESS_OF;
3679 hstate.add_int (CONSTRUCTOR_NO_CLEARING (t));
3680 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
3681 {
3682 /* In GIMPLE the indexes can be either NULL or matching i. */
3683 if (field == NULL_TREE)
3684 field = bitsize_int (idx);
3685 hash_operand (field, hstate, flags);
3686 hash_operand (value, hstate, flags);
3687 }
3688 return;
3689 }
3690 case STATEMENT_LIST:
3691 {
3692 tree_stmt_iterator i;
3693 for (i = tsi_start (CONST_CAST_TREE (t));
3694 !tsi_end_p (i); tsi_next (&i))
3695 hash_operand (tsi_stmt (i), hstate, flags);
3696 return;
3697 }
3698 case TREE_VEC:
3699 for (i = 0; i < TREE_VEC_LENGTH (t); ++i)
3700 hash_operand (TREE_VEC_ELT (t, i), hstate, flags);
3701 return;
3702 case IDENTIFIER_NODE:
3703 hstate.add_object (IDENTIFIER_HASH_VALUE (t));
3704 return;
3705 case FUNCTION_DECL:
3706 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
3707 Otherwise nodes that compare equal according to operand_equal_p might
3708 get different hash codes. However, don't do this for machine specific
3709 or front end builtins, since the function code is overloaded in those
3710 cases. */
3711 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
3712 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
3713 {
3714 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
3715 code = TREE_CODE (t);
3716 }
3717 /* FALL THROUGH */
3718 default:
3719 if (POLY_INT_CST_P (t))
3720 {
3721 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
3722 hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
3723 return;
3724 }
3725 tclass = TREE_CODE_CLASS (code);
3726
3727 if (tclass == tcc_declaration)
3728 {
3729 /* DECL's have a unique ID */
3730 hstate.add_hwi (DECL_UID (t));
3731 }
3732 else if (tclass == tcc_comparison && !commutative_tree_code (code))
3733 {
3734 /* For comparisons that can be swapped, use the lower
3735 tree code. */
3736 enum tree_code ccode = swap_tree_comparison (code);
3737 if (code < ccode)
3738 ccode = code;
3739 hstate.add_object (ccode);
3740 hash_operand (TREE_OPERAND (t, ccode != code), hstate, flags);
3741 hash_operand (TREE_OPERAND (t, ccode == code), hstate, flags);
3742 }
3743 else if (CONVERT_EXPR_CODE_P (code))
3744 {
3745 /* NOP_EXPR and CONVERT_EXPR are considered equal by
3746 operand_equal_p. */
3747 enum tree_code ccode = NOP_EXPR;
3748 hstate.add_object (ccode);
3749
3750 /* Don't hash the type, that can lead to having nodes which
3751 compare equal according to operand_equal_p, but which
3752 have different hash codes. Make sure to include signedness
3753 in the hash computation. */
3754 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
3755 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
3756 }
3757 /* For OEP_ADDRESS_OF, hash MEM_EXPR[&decl, 0] the same as decl. */
3758 else if (code == MEM_REF
3759 && (flags & OEP_ADDRESS_OF) != 0
3760 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
3761 && DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
3762 && integer_zerop (TREE_OPERAND (t, 1)))
3763 hash_operand (TREE_OPERAND (TREE_OPERAND (t, 0), 0),
3764 hstate, flags);
3765 /* Don't ICE on FE specific trees, or their arguments etc.
3766 during operand_equal_p hash verification. */
3767 else if (!IS_EXPR_CODE_CLASS (tclass))
3768 gcc_assert (flags & OEP_HASH_CHECK);
3769 else
3770 {
3771 unsigned int sflags = flags;
3772
3773 hstate.add_object (code);
3774
3775 switch (code)
3776 {
3777 case ADDR_EXPR:
3778 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3779 flags |= OEP_ADDRESS_OF;
3780 sflags = flags;
3781 break;
3782
3783 case INDIRECT_REF:
3784 case MEM_REF:
3785 case TARGET_MEM_REF:
3786 flags &= ~OEP_ADDRESS_OF;
3787 sflags = flags;
3788 break;
3789
3790 case ARRAY_REF:
3791 case ARRAY_RANGE_REF:
3792 case COMPONENT_REF:
3793 case BIT_FIELD_REF:
3794 sflags &= ~OEP_ADDRESS_OF;
3795 break;
3796
3797 case COND_EXPR:
3798 flags &= ~OEP_ADDRESS_OF;
3799 break;
3800
3801 case WIDEN_MULT_PLUS_EXPR:
3802 case WIDEN_MULT_MINUS_EXPR:
3803 {
3804 /* The multiplication operands are commutative. */
3805 inchash::hash one, two;
3806 hash_operand (TREE_OPERAND (t, 0), one, flags);
3807 hash_operand (TREE_OPERAND (t, 1), two, flags);
3808 hstate.add_commutative (one, two);
3809 hash_operand (TREE_OPERAND (t, 2), two, flags);
3810 return;
3811 }
3812
3813 case CALL_EXPR:
3814 if (CALL_EXPR_FN (t) == NULL_TREE)
3815 hstate.add_int (CALL_EXPR_IFN (t));
3816 break;
3817
3818 case TARGET_EXPR:
3819 /* For TARGET_EXPR, just hash on the TARGET_EXPR_SLOT.
3820 Usually different TARGET_EXPRs just should use
3821 different temporaries in their slots. */
3822 hash_operand (TARGET_EXPR_SLOT (t), hstate, flags);
3823 return;
3824
3825 /* Virtual table call. */
3826 case OBJ_TYPE_REF:
3827 inchash::add_expr (OBJ_TYPE_REF_EXPR (t), hstate, flags);
3828 inchash::add_expr (OBJ_TYPE_REF_TOKEN (t), hstate, flags);
3829 inchash::add_expr (OBJ_TYPE_REF_OBJECT (t), hstate, flags);
3830 return;
3831 default:
3832 break;
3833 }
3834
3835 /* Don't hash the type, that can lead to having nodes which
3836 compare equal according to operand_equal_p, but which
3837 have different hash codes. */
3838 if (code == NON_LVALUE_EXPR)
3839 {
3840 /* Make sure to include signness in the hash computation. */
3841 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
3842 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
3843 }
3844
3845 else if (commutative_tree_code (code))
3846 {
3847 /* It's a commutative expression. We want to hash it the same
3848 however it appears. We do this by first hashing both operands
3849 and then rehashing based on the order of their independent
3850 hashes. */
3851 inchash::hash one, two;
3852 hash_operand (TREE_OPERAND (t, 0), one, flags);
3853 hash_operand (TREE_OPERAND (t, 1), two, flags);
3854 hstate.add_commutative (one, two);
3855 }
3856 else
3857 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
3858 hash_operand (TREE_OPERAND (t, i), hstate,
3859 i == 0 ? flags : sflags);
3860 }
3861 return;
3862 }
3863 }
3864
3865 bool
3866 operand_compare::verify_hash_value (const_tree arg0, const_tree arg1,
3867 unsigned int flags, bool *ret)
3868 {
3869 /* When checking, verify at the outermost operand_equal_p call that
3870 if operand_equal_p returns non-zero then ARG0 and ARG1 has the same
3871 hash value. */
3872 if (flag_checking && !(flags & OEP_NO_HASH_CHECK))
3873 {
3874 if (operand_equal_p (arg0, arg1, flags | OEP_NO_HASH_CHECK))
3875 {
3876 if (arg0 != arg1)
3877 {
3878 inchash::hash hstate0 (0), hstate1 (0);
3879 hash_operand (arg0, hstate0, flags | OEP_HASH_CHECK);
3880 hash_operand (arg1, hstate1, flags | OEP_HASH_CHECK);
3881 hashval_t h0 = hstate0.end ();
3882 hashval_t h1 = hstate1.end ();
3883 gcc_assert (h0 == h1);
3884 }
3885 *ret = true;
3886 }
3887 else
3888 *ret = false;
3889
3890 return true;
3891 }
3892
3893 return false;
3894 }
3895
3896
3897 static operand_compare default_compare_instance;
3898
3899 /* Conveinece wrapper around operand_compare class because usually we do
3900 not need to play with the valueizer. */
3901
3902 bool
3903 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
3904 {
3905 return default_compare_instance.operand_equal_p (arg0, arg1, flags);
3906 }
3907
3908 namespace inchash
3909 {
3910
3911 /* Generate a hash value for an expression. This can be used iteratively
3912 by passing a previous result as the HSTATE argument.
3913
3914 This function is intended to produce the same hash for expressions which
3915 would compare equal using operand_equal_p. */
3916 void
3917 add_expr (const_tree t, inchash::hash &hstate, unsigned int flags)
3918 {
3919 default_compare_instance.hash_operand (t, hstate, flags);
3920 }
3921
3922 }
3923 \f
3924 /* Similar to operand_equal_p, but see if ARG0 might be a variant of ARG1
3925 with a different signedness or a narrower precision. */
3926
3927 static bool
3928 operand_equal_for_comparison_p (tree arg0, tree arg1)
3929 {
3930 if (operand_equal_p (arg0, arg1, 0))
3931 return true;
3932
3933 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3934 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3935 return false;
3936
3937 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3938 and see if the inner values are the same. This removes any
3939 signedness comparison, which doesn't matter here. */
3940 tree op0 = arg0;
3941 tree op1 = arg1;
3942 STRIP_NOPS (op0);
3943 STRIP_NOPS (op1);
3944 if (operand_equal_p (op0, op1, 0))
3945 return true;
3946
3947 /* Discard a single widening conversion from ARG1 and see if the inner
3948 value is the same as ARG0. */
3949 if (CONVERT_EXPR_P (arg1)
3950 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0)))
3951 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))
3952 < TYPE_PRECISION (TREE_TYPE (arg1))
3953 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
3954 return true;
3955
3956 return false;
3957 }
3958 \f
3959 /* See if ARG is an expression that is either a comparison or is performing
3960 arithmetic on comparisons. The comparisons must only be comparing
3961 two different values, which will be stored in *CVAL1 and *CVAL2; if
3962 they are nonzero it means that some operands have already been found.
3963 No variables may be used anywhere else in the expression except in the
3964 comparisons.
3965
3966 If this is true, return 1. Otherwise, return zero. */
3967
3968 static bool
3969 twoval_comparison_p (tree arg, tree *cval1, tree *cval2)
3970 {
3971 enum tree_code code = TREE_CODE (arg);
3972 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3973
3974 /* We can handle some of the tcc_expression cases here. */
3975 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3976 tclass = tcc_unary;
3977 else if (tclass == tcc_expression
3978 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3979 || code == COMPOUND_EXPR))
3980 tclass = tcc_binary;
3981
3982 switch (tclass)
3983 {
3984 case tcc_unary:
3985 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2);
3986
3987 case tcc_binary:
3988 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
3989 && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2));
3990
3991 case tcc_constant:
3992 return true;
3993
3994 case tcc_expression:
3995 if (code == COND_EXPR)
3996 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
3997 && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2)
3998 && twoval_comparison_p (TREE_OPERAND (arg, 2), cval1, cval2));
3999 return false;
4000
4001 case tcc_comparison:
4002 /* First see if we can handle the first operand, then the second. For
4003 the second operand, we know *CVAL1 can't be zero. It must be that
4004 one side of the comparison is each of the values; test for the
4005 case where this isn't true by failing if the two operands
4006 are the same. */
4007
4008 if (operand_equal_p (TREE_OPERAND (arg, 0),
4009 TREE_OPERAND (arg, 1), 0))
4010 return false;
4011
4012 if (*cval1 == 0)
4013 *cval1 = TREE_OPERAND (arg, 0);
4014 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
4015 ;
4016 else if (*cval2 == 0)
4017 *cval2 = TREE_OPERAND (arg, 0);
4018 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
4019 ;
4020 else
4021 return false;
4022
4023 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
4024 ;
4025 else if (*cval2 == 0)
4026 *cval2 = TREE_OPERAND (arg, 1);
4027 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
4028 ;
4029 else
4030 return false;
4031
4032 return true;
4033
4034 default:
4035 return false;
4036 }
4037 }
4038 \f
4039 /* ARG is a tree that is known to contain just arithmetic operations and
4040 comparisons. Evaluate the operations in the tree substituting NEW0 for
4041 any occurrence of OLD0 as an operand of a comparison and likewise for
4042 NEW1 and OLD1. */
4043
4044 static tree
4045 eval_subst (location_t loc, tree arg, tree old0, tree new0,
4046 tree old1, tree new1)
4047 {
4048 tree type = TREE_TYPE (arg);
4049 enum tree_code code = TREE_CODE (arg);
4050 enum tree_code_class tclass = TREE_CODE_CLASS (code);
4051
4052 /* We can handle some of the tcc_expression cases here. */
4053 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
4054 tclass = tcc_unary;
4055 else if (tclass == tcc_expression
4056 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
4057 tclass = tcc_binary;
4058
4059 switch (tclass)
4060 {
4061 case tcc_unary:
4062 return fold_build1_loc (loc, code, type,
4063 eval_subst (loc, TREE_OPERAND (arg, 0),
4064 old0, new0, old1, new1));
4065
4066 case tcc_binary:
4067 return fold_build2_loc (loc, code, type,
4068 eval_subst (loc, TREE_OPERAND (arg, 0),
4069 old0, new0, old1, new1),
4070 eval_subst (loc, TREE_OPERAND (arg, 1),
4071 old0, new0, old1, new1));
4072
4073 case tcc_expression:
4074 switch (code)
4075 {
4076 case SAVE_EXPR:
4077 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
4078 old1, new1);
4079
4080 case COMPOUND_EXPR:
4081 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
4082 old1, new1);
4083
4084 case COND_EXPR:
4085 return fold_build3_loc (loc, code, type,
4086 eval_subst (loc, TREE_OPERAND (arg, 0),
4087 old0, new0, old1, new1),
4088 eval_subst (loc, TREE_OPERAND (arg, 1),
4089 old0, new0, old1, new1),
4090 eval_subst (loc, TREE_OPERAND (arg, 2),
4091 old0, new0, old1, new1));
4092 default:
4093 break;
4094 }
4095 /* Fall through - ??? */
4096
4097 case tcc_comparison:
4098 {
4099 tree arg0 = TREE_OPERAND (arg, 0);
4100 tree arg1 = TREE_OPERAND (arg, 1);
4101
4102 /* We need to check both for exact equality and tree equality. The
4103 former will be true if the operand has a side-effect. In that
4104 case, we know the operand occurred exactly once. */
4105
4106 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
4107 arg0 = new0;
4108 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
4109 arg0 = new1;
4110
4111 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
4112 arg1 = new0;
4113 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
4114 arg1 = new1;
4115
4116 return fold_build2_loc (loc, code, type, arg0, arg1);
4117 }
4118
4119 default:
4120 return arg;
4121 }
4122 }
4123 \f
4124 /* Return a tree for the case when the result of an expression is RESULT
4125 converted to TYPE and OMITTED was previously an operand of the expression
4126 but is now not needed (e.g., we folded OMITTED * 0).
4127
4128 If OMITTED has side effects, we must evaluate it. Otherwise, just do
4129 the conversion of RESULT to TYPE. */
4130
4131 tree
4132 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
4133 {
4134 tree t = fold_convert_loc (loc, type, result);
4135
4136 /* If the resulting operand is an empty statement, just return the omitted
4137 statement casted to void. */
4138 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
4139 return build1_loc (loc, NOP_EXPR, void_type_node,
4140 fold_ignored_result (omitted));
4141
4142 if (TREE_SIDE_EFFECTS (omitted))
4143 return build2_loc (loc, COMPOUND_EXPR, type,
4144 fold_ignored_result (omitted), t);
4145
4146 return non_lvalue_loc (loc, t);
4147 }
4148
4149 /* Return a tree for the case when the result of an expression is RESULT
4150 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
4151 of the expression but are now not needed.
4152
4153 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
4154 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
4155 evaluated before OMITTED2. Otherwise, if neither has side effects,
4156 just do the conversion of RESULT to TYPE. */
4157
4158 tree
4159 omit_two_operands_loc (location_t loc, tree type, tree result,
4160 tree omitted1, tree omitted2)
4161 {
4162 tree t = fold_convert_loc (loc, type, result);
4163
4164 if (TREE_SIDE_EFFECTS (omitted2))
4165 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
4166 if (TREE_SIDE_EFFECTS (omitted1))
4167 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
4168
4169 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
4170 }
4171
4172 \f
4173 /* Return a simplified tree node for the truth-negation of ARG. This
4174 never alters ARG itself. We assume that ARG is an operation that
4175 returns a truth value (0 or 1).
4176
4177 FIXME: one would think we would fold the result, but it causes
4178 problems with the dominator optimizer. */
4179
4180 static tree
4181 fold_truth_not_expr (location_t loc, tree arg)
4182 {
4183 tree type = TREE_TYPE (arg);
4184 enum tree_code code = TREE_CODE (arg);
4185 location_t loc1, loc2;
4186
4187 /* If this is a comparison, we can simply invert it, except for
4188 floating-point non-equality comparisons, in which case we just
4189 enclose a TRUTH_NOT_EXPR around what we have. */
4190
4191 if (TREE_CODE_CLASS (code) == tcc_comparison)
4192 {
4193 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
4194 if (FLOAT_TYPE_P (op_type)
4195 && flag_trapping_math
4196 && code != ORDERED_EXPR && code != UNORDERED_EXPR
4197 && code != NE_EXPR && code != EQ_EXPR)
4198 return NULL_TREE;
4199
4200 code = invert_tree_comparison (code, HONOR_NANS (op_type));
4201 if (code == ERROR_MARK)
4202 return NULL_TREE;
4203
4204 tree ret = build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
4205 TREE_OPERAND (arg, 1));
4206 if (TREE_NO_WARNING (arg))
4207 TREE_NO_WARNING (ret) = 1;
4208 return ret;
4209 }
4210
4211 switch (code)
4212 {
4213 case INTEGER_CST:
4214 return constant_boolean_node (integer_zerop (arg), type);
4215
4216 case TRUTH_AND_EXPR:
4217 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4218 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4219 return build2_loc (loc, TRUTH_OR_EXPR, type,
4220 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4221 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4222
4223 case TRUTH_OR_EXPR:
4224 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4225 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4226 return build2_loc (loc, TRUTH_AND_EXPR, type,
4227 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4228 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4229
4230 case TRUTH_XOR_EXPR:
4231 /* Here we can invert either operand. We invert the first operand
4232 unless the second operand is a TRUTH_NOT_EXPR in which case our
4233 result is the XOR of the first operand with the inside of the
4234 negation of the second operand. */
4235
4236 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
4237 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
4238 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
4239 else
4240 return build2_loc (loc, TRUTH_XOR_EXPR, type,
4241 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
4242 TREE_OPERAND (arg, 1));
4243
4244 case TRUTH_ANDIF_EXPR:
4245 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4246 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4247 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
4248 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4249 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4250
4251 case TRUTH_ORIF_EXPR:
4252 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4253 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4254 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
4255 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4256 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4257
4258 case TRUTH_NOT_EXPR:
4259 return TREE_OPERAND (arg, 0);
4260
4261 case COND_EXPR:
4262 {
4263 tree arg1 = TREE_OPERAND (arg, 1);
4264 tree arg2 = TREE_OPERAND (arg, 2);
4265
4266 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4267 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
4268
4269 /* A COND_EXPR may have a throw as one operand, which
4270 then has void type. Just leave void operands
4271 as they are. */
4272 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
4273 VOID_TYPE_P (TREE_TYPE (arg1))
4274 ? arg1 : invert_truthvalue_loc (loc1, arg1),
4275 VOID_TYPE_P (TREE_TYPE (arg2))
4276 ? arg2 : invert_truthvalue_loc (loc2, arg2));
4277 }
4278
4279 case COMPOUND_EXPR:
4280 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4281 return build2_loc (loc, COMPOUND_EXPR, type,
4282 TREE_OPERAND (arg, 0),
4283 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
4284
4285 case NON_LVALUE_EXPR:
4286 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4287 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
4288
4289 CASE_CONVERT:
4290 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
4291 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
4292
4293 /* fall through */
4294
4295 case FLOAT_EXPR:
4296 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4297 return build1_loc (loc, TREE_CODE (arg), type,
4298 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
4299
4300 case BIT_AND_EXPR:
4301 if (!integer_onep (TREE_OPERAND (arg, 1)))
4302 return NULL_TREE;
4303 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
4304
4305 case SAVE_EXPR:
4306 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
4307
4308 case CLEANUP_POINT_EXPR:
4309 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4310 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
4311 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
4312
4313 default:
4314 return NULL_TREE;
4315 }
4316 }
4317
4318 /* Fold the truth-negation of ARG. This never alters ARG itself. We
4319 assume that ARG is an operation that returns a truth value (0 or 1
4320 for scalars, 0 or -1 for vectors). Return the folded expression if
4321 folding is successful. Otherwise, return NULL_TREE. */
4322
4323 static tree
4324 fold_invert_truthvalue (location_t loc, tree arg)
4325 {
4326 tree type = TREE_TYPE (arg);
4327 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
4328 ? BIT_NOT_EXPR
4329 : TRUTH_NOT_EXPR,
4330 type, arg);
4331 }
4332
4333 /* Return a simplified tree node for the truth-negation of ARG. This
4334 never alters ARG itself. We assume that ARG is an operation that
4335 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
4336
4337 tree
4338 invert_truthvalue_loc (location_t loc, tree arg)
4339 {
4340 if (TREE_CODE (arg) == ERROR_MARK)
4341 return arg;
4342
4343 tree type = TREE_TYPE (arg);
4344 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
4345 ? BIT_NOT_EXPR
4346 : TRUTH_NOT_EXPR,
4347 type, arg);
4348 }
4349 \f
4350 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
4351 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero
4352 and uses reverse storage order if REVERSEP is nonzero. ORIG_INNER
4353 is the original memory reference used to preserve the alias set of
4354 the access. */
4355
4356 static tree
4357 make_bit_field_ref (location_t loc, tree inner, tree orig_inner, tree type,
4358 HOST_WIDE_INT bitsize, poly_int64 bitpos,
4359 int unsignedp, int reversep)
4360 {
4361 tree result, bftype;
4362
4363 /* Attempt not to lose the access path if possible. */
4364 if (TREE_CODE (orig_inner) == COMPONENT_REF)
4365 {
4366 tree ninner = TREE_OPERAND (orig_inner, 0);
4367 machine_mode nmode;
4368 poly_int64 nbitsize, nbitpos;
4369 tree noffset;
4370 int nunsignedp, nreversep, nvolatilep = 0;
4371 tree base = get_inner_reference (ninner, &nbitsize, &nbitpos,
4372 &noffset, &nmode, &nunsignedp,
4373 &nreversep, &nvolatilep);
4374 if (base == inner
4375 && noffset == NULL_TREE
4376 && known_subrange_p (bitpos, bitsize, nbitpos, nbitsize)
4377 && !reversep
4378 && !nreversep
4379 && !nvolatilep)
4380 {
4381 inner = ninner;
4382 bitpos -= nbitpos;
4383 }
4384 }
4385
4386 alias_set_type iset = get_alias_set (orig_inner);
4387 if (iset == 0 && get_alias_set (inner) != iset)
4388 inner = fold_build2 (MEM_REF, TREE_TYPE (inner),
4389 build_fold_addr_expr (inner),
4390 build_int_cst (ptr_type_node, 0));
4391
4392 if (known_eq (bitpos, 0) && !reversep)
4393 {
4394 tree size = TYPE_SIZE (TREE_TYPE (inner));
4395 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
4396 || POINTER_TYPE_P (TREE_TYPE (inner)))
4397 && tree_fits_shwi_p (size)
4398 && tree_to_shwi (size) == bitsize)
4399 return fold_convert_loc (loc, type, inner);
4400 }
4401
4402 bftype = type;
4403 if (TYPE_PRECISION (bftype) != bitsize
4404 || TYPE_UNSIGNED (bftype) == !unsignedp)
4405 bftype = build_nonstandard_integer_type (bitsize, 0);
4406
4407 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
4408 bitsize_int (bitsize), bitsize_int (bitpos));
4409 REF_REVERSE_STORAGE_ORDER (result) = reversep;
4410
4411 if (bftype != type)
4412 result = fold_convert_loc (loc, type, result);
4413
4414 return result;
4415 }
4416
4417 /* Optimize a bit-field compare.
4418
4419 There are two cases: First is a compare against a constant and the
4420 second is a comparison of two items where the fields are at the same
4421 bit position relative to the start of a chunk (byte, halfword, word)
4422 large enough to contain it. In these cases we can avoid the shift
4423 implicit in bitfield extractions.
4424
4425 For constants, we emit a compare of the shifted constant with the
4426 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
4427 compared. For two fields at the same position, we do the ANDs with the
4428 similar mask and compare the result of the ANDs.
4429
4430 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
4431 COMPARE_TYPE is the type of the comparison, and LHS and RHS
4432 are the left and right operands of the comparison, respectively.
4433
4434 If the optimization described above can be done, we return the resulting
4435 tree. Otherwise we return zero. */
4436
4437 static tree
4438 optimize_bit_field_compare (location_t loc, enum tree_code code,
4439 tree compare_type, tree lhs, tree rhs)
4440 {
4441 poly_int64 plbitpos, plbitsize, rbitpos, rbitsize;
4442 HOST_WIDE_INT lbitpos, lbitsize, nbitpos, nbitsize;
4443 tree type = TREE_TYPE (lhs);
4444 tree unsigned_type;
4445 int const_p = TREE_CODE (rhs) == INTEGER_CST;
4446 machine_mode lmode, rmode;
4447 scalar_int_mode nmode;
4448 int lunsignedp, runsignedp;
4449 int lreversep, rreversep;
4450 int lvolatilep = 0, rvolatilep = 0;
4451 tree linner, rinner = NULL_TREE;
4452 tree mask;
4453 tree offset;
4454
4455 /* Get all the information about the extractions being done. If the bit size
4456 is the same as the size of the underlying object, we aren't doing an
4457 extraction at all and so can do nothing. We also don't want to
4458 do anything if the inner expression is a PLACEHOLDER_EXPR since we
4459 then will no longer be able to replace it. */
4460 linner = get_inner_reference (lhs, &plbitsize, &plbitpos, &offset, &lmode,
4461 &lunsignedp, &lreversep, &lvolatilep);
4462 if (linner == lhs
4463 || !known_size_p (plbitsize)
4464 || !plbitsize.is_constant (&lbitsize)
4465 || !plbitpos.is_constant (&lbitpos)
4466 || known_eq (lbitsize, GET_MODE_BITSIZE (lmode))
4467 || offset != 0
4468 || TREE_CODE (linner) == PLACEHOLDER_EXPR
4469 || lvolatilep)
4470 return 0;
4471
4472 if (const_p)
4473 rreversep = lreversep;
4474 else
4475 {
4476 /* If this is not a constant, we can only do something if bit positions,
4477 sizes, signedness and storage order are the same. */
4478 rinner
4479 = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
4480 &runsignedp, &rreversep, &rvolatilep);
4481
4482 if (rinner == rhs
4483 || maybe_ne (lbitpos, rbitpos)
4484 || maybe_ne (lbitsize, rbitsize)
4485 || lunsignedp != runsignedp
4486 || lreversep != rreversep
4487 || offset != 0
4488 || TREE_CODE (rinner) == PLACEHOLDER_EXPR
4489 || rvolatilep)
4490 return 0;
4491 }
4492
4493 /* Honor the C++ memory model and mimic what RTL expansion does. */
4494 poly_uint64 bitstart = 0;
4495 poly_uint64 bitend = 0;
4496 if (TREE_CODE (lhs) == COMPONENT_REF)
4497 {
4498 get_bit_range (&bitstart, &bitend, lhs, &plbitpos, &offset);
4499 if (!plbitpos.is_constant (&lbitpos) || offset != NULL_TREE)
4500 return 0;
4501 }
4502
4503 /* See if we can find a mode to refer to this field. We should be able to,
4504 but fail if we can't. */
4505 if (!get_best_mode (lbitsize, lbitpos, bitstart, bitend,
4506 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
4507 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
4508 TYPE_ALIGN (TREE_TYPE (rinner))),
4509 BITS_PER_WORD, false, &nmode))
4510 return 0;
4511
4512 /* Set signed and unsigned types of the precision of this mode for the
4513 shifts below. */
4514 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
4515
4516 /* Compute the bit position and size for the new reference and our offset
4517 within it. If the new reference is the same size as the original, we
4518 won't optimize anything, so return zero. */
4519 nbitsize = GET_MODE_BITSIZE (nmode);
4520 nbitpos = lbitpos & ~ (nbitsize - 1);
4521 lbitpos -= nbitpos;
4522 if (nbitsize == lbitsize)
4523 return 0;
4524
4525 if (lreversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
4526 lbitpos = nbitsize - lbitsize - lbitpos;
4527
4528 /* Make the mask to be used against the extracted field. */
4529 mask = build_int_cst_type (unsigned_type, -1);
4530 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
4531 mask = const_binop (RSHIFT_EXPR, mask,
4532 size_int (nbitsize - lbitsize - lbitpos));
4533
4534 if (! const_p)
4535 {
4536 if (nbitpos < 0)
4537 return 0;
4538
4539 /* If not comparing with constant, just rework the comparison
4540 and return. */
4541 tree t1 = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4542 nbitsize, nbitpos, 1, lreversep);
4543 t1 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t1, mask);
4544 tree t2 = make_bit_field_ref (loc, rinner, rhs, unsigned_type,
4545 nbitsize, nbitpos, 1, rreversep);
4546 t2 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t2, mask);
4547 return fold_build2_loc (loc, code, compare_type, t1, t2);
4548 }
4549
4550 /* Otherwise, we are handling the constant case. See if the constant is too
4551 big for the field. Warn and return a tree for 0 (false) if so. We do
4552 this not only for its own sake, but to avoid having to test for this
4553 error case below. If we didn't, we might generate wrong code.
4554
4555 For unsigned fields, the constant shifted right by the field length should
4556 be all zero. For signed fields, the high-order bits should agree with
4557 the sign bit. */
4558
4559 if (lunsignedp)
4560 {
4561 if (wi::lrshift (wi::to_wide (rhs), lbitsize) != 0)
4562 {
4563 warning (0, "comparison is always %d due to width of bit-field",
4564 code == NE_EXPR);
4565 return constant_boolean_node (code == NE_EXPR, compare_type);
4566 }
4567 }
4568 else
4569 {
4570 wide_int tem = wi::arshift (wi::to_wide (rhs), lbitsize - 1);
4571 if (tem != 0 && tem != -1)
4572 {
4573 warning (0, "comparison is always %d due to width of bit-field",
4574 code == NE_EXPR);
4575 return constant_boolean_node (code == NE_EXPR, compare_type);
4576 }
4577 }
4578
4579 if (nbitpos < 0)
4580 return 0;
4581
4582 /* Single-bit compares should always be against zero. */
4583 if (lbitsize == 1 && ! integer_zerop (rhs))
4584 {
4585 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4586 rhs = build_int_cst (type, 0);
4587 }
4588
4589 /* Make a new bitfield reference, shift the constant over the
4590 appropriate number of bits and mask it with the computed mask
4591 (in case this was a signed field). If we changed it, make a new one. */
4592 lhs = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4593 nbitsize, nbitpos, 1, lreversep);
4594
4595 rhs = const_binop (BIT_AND_EXPR,
4596 const_binop (LSHIFT_EXPR,
4597 fold_convert_loc (loc, unsigned_type, rhs),
4598 size_int (lbitpos)),
4599 mask);
4600
4601 lhs = build2_loc (loc, code, compare_type,
4602 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
4603 return lhs;
4604 }
4605 \f
4606 /* Subroutine for fold_truth_andor_1: decode a field reference.
4607
4608 If EXP is a comparison reference, we return the innermost reference.
4609
4610 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4611 set to the starting bit number.
4612
4613 If the innermost field can be completely contained in a mode-sized
4614 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4615
4616 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4617 otherwise it is not changed.
4618
4619 *PUNSIGNEDP is set to the signedness of the field.
4620
4621 *PREVERSEP is set to the storage order of the field.
4622
4623 *PMASK is set to the mask used. This is either contained in a
4624 BIT_AND_EXPR or derived from the width of the field.
4625
4626 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4627
4628 Return 0 if this is not a component reference or is one that we can't
4629 do anything with. */
4630
4631 static tree
4632 decode_field_reference (location_t loc, tree *exp_, HOST_WIDE_INT *pbitsize,
4633 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
4634 int *punsignedp, int *preversep, int *pvolatilep,
4635 tree *pmask, tree *pand_mask)
4636 {
4637 tree exp = *exp_;
4638 tree outer_type = 0;
4639 tree and_mask = 0;
4640 tree mask, inner, offset;
4641 tree unsigned_type;
4642 unsigned int precision;
4643
4644 /* All the optimizations using this function assume integer fields.
4645 There are problems with FP fields since the type_for_size call
4646 below can fail for, e.g., XFmode. */
4647 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4648 return NULL_TREE;
4649
4650 /* We are interested in the bare arrangement of bits, so strip everything
4651 that doesn't affect the machine mode. However, record the type of the
4652 outermost expression if it may matter below. */
4653 if (CONVERT_EXPR_P (exp)
4654 || TREE_CODE (exp) == NON_LVALUE_EXPR)
4655 outer_type = TREE_TYPE (exp);
4656 STRIP_NOPS (exp);
4657
4658 if (TREE_CODE (exp) == BIT_AND_EXPR)
4659 {
4660 and_mask = TREE_OPERAND (exp, 1);
4661 exp = TREE_OPERAND (exp, 0);
4662 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4663 if (TREE_CODE (and_mask) != INTEGER_CST)
4664 return NULL_TREE;
4665 }
4666
4667 poly_int64 poly_bitsize, poly_bitpos;
4668 inner = get_inner_reference (exp, &poly_bitsize, &poly_bitpos, &offset,
4669 pmode, punsignedp, preversep, pvolatilep);
4670 if ((inner == exp && and_mask == 0)
4671 || !poly_bitsize.is_constant (pbitsize)
4672 || !poly_bitpos.is_constant (pbitpos)
4673 || *pbitsize < 0
4674 || offset != 0
4675 || TREE_CODE (inner) == PLACEHOLDER_EXPR
4676 /* Reject out-of-bound accesses (PR79731). */
4677 || (! AGGREGATE_TYPE_P (TREE_TYPE (inner))
4678 && compare_tree_int (TYPE_SIZE (TREE_TYPE (inner)),
4679 *pbitpos + *pbitsize) < 0))
4680 return NULL_TREE;
4681
4682 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4683 if (unsigned_type == NULL_TREE)
4684 return NULL_TREE;
4685
4686 *exp_ = exp;
4687
4688 /* If the number of bits in the reference is the same as the bitsize of
4689 the outer type, then the outer type gives the signedness. Otherwise
4690 (in case of a small bitfield) the signedness is unchanged. */
4691 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4692 *punsignedp = TYPE_UNSIGNED (outer_type);
4693
4694 /* Compute the mask to access the bitfield. */
4695 precision = TYPE_PRECISION (unsigned_type);
4696
4697 mask = build_int_cst_type (unsigned_type, -1);
4698
4699 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4700 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4701
4702 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4703 if (and_mask != 0)
4704 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4705 fold_convert_loc (loc, unsigned_type, and_mask), mask);
4706
4707 *pmask = mask;
4708 *pand_mask = and_mask;
4709 return inner;
4710 }
4711
4712 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4713 bit positions and MASK is SIGNED. */
4714
4715 static bool
4716 all_ones_mask_p (const_tree mask, unsigned int size)
4717 {
4718 tree type = TREE_TYPE (mask);
4719 unsigned int precision = TYPE_PRECISION (type);
4720
4721 /* If this function returns true when the type of the mask is
4722 UNSIGNED, then there will be errors. In particular see
4723 gcc.c-torture/execute/990326-1.c. There does not appear to be
4724 any documentation paper trail as to why this is so. But the pre
4725 wide-int worked with that restriction and it has been preserved
4726 here. */
4727 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
4728 return false;
4729
4730 return wi::mask (size, false, precision) == wi::to_wide (mask);
4731 }
4732
4733 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4734 represents the sign bit of EXP's type. If EXP represents a sign
4735 or zero extension, also test VAL against the unextended type.
4736 The return value is the (sub)expression whose sign bit is VAL,
4737 or NULL_TREE otherwise. */
4738
4739 tree
4740 sign_bit_p (tree exp, const_tree val)
4741 {
4742 int width;
4743 tree t;
4744
4745 /* Tree EXP must have an integral type. */
4746 t = TREE_TYPE (exp);
4747 if (! INTEGRAL_TYPE_P (t))
4748 return NULL_TREE;
4749
4750 /* Tree VAL must be an integer constant. */
4751 if (TREE_CODE (val) != INTEGER_CST
4752 || TREE_OVERFLOW (val))
4753 return NULL_TREE;
4754
4755 width = TYPE_PRECISION (t);
4756 if (wi::only_sign_bit_p (wi::to_wide (val), width))
4757 return exp;
4758
4759 /* Handle extension from a narrower type. */
4760 if (TREE_CODE (exp) == NOP_EXPR
4761 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4762 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4763
4764 return NULL_TREE;
4765 }
4766
4767 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
4768 to be evaluated unconditionally. */
4769
4770 static bool
4771 simple_operand_p (const_tree exp)
4772 {
4773 /* Strip any conversions that don't change the machine mode. */
4774 STRIP_NOPS (exp);
4775
4776 return (CONSTANT_CLASS_P (exp)
4777 || TREE_CODE (exp) == SSA_NAME
4778 || (DECL_P (exp)
4779 && ! TREE_ADDRESSABLE (exp)
4780 && ! TREE_THIS_VOLATILE (exp)
4781 && ! DECL_NONLOCAL (exp)
4782 /* Don't regard global variables as simple. They may be
4783 allocated in ways unknown to the compiler (shared memory,
4784 #pragma weak, etc). */
4785 && ! TREE_PUBLIC (exp)
4786 && ! DECL_EXTERNAL (exp)
4787 /* Weakrefs are not safe to be read, since they can be NULL.
4788 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4789 have DECL_WEAK flag set. */
4790 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4791 /* Loading a static variable is unduly expensive, but global
4792 registers aren't expensive. */
4793 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4794 }
4795
4796 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4797 to be evaluated unconditionally.
4798 I addition to simple_operand_p, we assume that comparisons, conversions,
4799 and logic-not operations are simple, if their operands are simple, too. */
4800
4801 static bool
4802 simple_operand_p_2 (tree exp)
4803 {
4804 enum tree_code code;
4805
4806 if (TREE_SIDE_EFFECTS (exp) || generic_expr_could_trap_p (exp))
4807 return false;
4808
4809 while (CONVERT_EXPR_P (exp))
4810 exp = TREE_OPERAND (exp, 0);
4811
4812 code = TREE_CODE (exp);
4813
4814 if (TREE_CODE_CLASS (code) == tcc_comparison)
4815 return (simple_operand_p (TREE_OPERAND (exp, 0))
4816 && simple_operand_p (TREE_OPERAND (exp, 1)));
4817
4818 if (code == TRUTH_NOT_EXPR)
4819 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4820
4821 return simple_operand_p (exp);
4822 }
4823
4824 \f
4825 /* The following functions are subroutines to fold_range_test and allow it to
4826 try to change a logical combination of comparisons into a range test.
4827
4828 For example, both
4829 X == 2 || X == 3 || X == 4 || X == 5
4830 and
4831 X >= 2 && X <= 5
4832 are converted to
4833 (unsigned) (X - 2) <= 3
4834
4835 We describe each set of comparisons as being either inside or outside
4836 a range, using a variable named like IN_P, and then describe the
4837 range with a lower and upper bound. If one of the bounds is omitted,
4838 it represents either the highest or lowest value of the type.
4839
4840 In the comments below, we represent a range by two numbers in brackets
4841 preceded by a "+" to designate being inside that range, or a "-" to
4842 designate being outside that range, so the condition can be inverted by
4843 flipping the prefix. An omitted bound is represented by a "-". For
4844 example, "- [-, 10]" means being outside the range starting at the lowest
4845 possible value and ending at 10, in other words, being greater than 10.
4846 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4847 always false.
4848
4849 We set up things so that the missing bounds are handled in a consistent
4850 manner so neither a missing bound nor "true" and "false" need to be
4851 handled using a special case. */
4852
4853 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4854 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4855 and UPPER1_P are nonzero if the respective argument is an upper bound
4856 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4857 must be specified for a comparison. ARG1 will be converted to ARG0's
4858 type if both are specified. */
4859
4860 static tree
4861 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4862 tree arg1, int upper1_p)
4863 {
4864 tree tem;
4865 int result;
4866 int sgn0, sgn1;
4867
4868 /* If neither arg represents infinity, do the normal operation.
4869 Else, if not a comparison, return infinity. Else handle the special
4870 comparison rules. Note that most of the cases below won't occur, but
4871 are handled for consistency. */
4872
4873 if (arg0 != 0 && arg1 != 0)
4874 {
4875 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4876 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4877 STRIP_NOPS (tem);
4878 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4879 }
4880
4881 if (TREE_CODE_CLASS (code) != tcc_comparison)
4882 return 0;
4883
4884 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4885 for neither. In real maths, we cannot assume open ended ranges are
4886 the same. But, this is computer arithmetic, where numbers are finite.
4887 We can therefore make the transformation of any unbounded range with
4888 the value Z, Z being greater than any representable number. This permits
4889 us to treat unbounded ranges as equal. */
4890 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4891 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4892 switch (code)
4893 {
4894 case EQ_EXPR:
4895 result = sgn0 == sgn1;
4896 break;
4897 case NE_EXPR:
4898 result = sgn0 != sgn1;
4899 break;
4900 case LT_EXPR:
4901 result = sgn0 < sgn1;
4902 break;
4903 case LE_EXPR:
4904 result = sgn0 <= sgn1;
4905 break;
4906 case GT_EXPR:
4907 result = sgn0 > sgn1;
4908 break;
4909 case GE_EXPR:
4910 result = sgn0 >= sgn1;
4911 break;
4912 default:
4913 gcc_unreachable ();
4914 }
4915
4916 return constant_boolean_node (result, type);
4917 }
4918 \f
4919 /* Helper routine for make_range. Perform one step for it, return
4920 new expression if the loop should continue or NULL_TREE if it should
4921 stop. */
4922
4923 tree
4924 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4925 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4926 bool *strict_overflow_p)
4927 {
4928 tree arg0_type = TREE_TYPE (arg0);
4929 tree n_low, n_high, low = *p_low, high = *p_high;
4930 int in_p = *p_in_p, n_in_p;
4931
4932 switch (code)
4933 {
4934 case TRUTH_NOT_EXPR:
4935 /* We can only do something if the range is testing for zero. */
4936 if (low == NULL_TREE || high == NULL_TREE
4937 || ! integer_zerop (low) || ! integer_zerop (high))
4938 return NULL_TREE;
4939 *p_in_p = ! in_p;
4940 return arg0;
4941
4942 case EQ_EXPR: case NE_EXPR:
4943 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4944 /* We can only do something if the range is testing for zero
4945 and if the second operand is an integer constant. Note that
4946 saying something is "in" the range we make is done by
4947 complementing IN_P since it will set in the initial case of
4948 being not equal to zero; "out" is leaving it alone. */
4949 if (low == NULL_TREE || high == NULL_TREE
4950 || ! integer_zerop (low) || ! integer_zerop (high)
4951 || TREE_CODE (arg1) != INTEGER_CST)
4952 return NULL_TREE;
4953
4954 switch (code)
4955 {
4956 case NE_EXPR: /* - [c, c] */
4957 low = high = arg1;
4958 break;
4959 case EQ_EXPR: /* + [c, c] */
4960 in_p = ! in_p, low = high = arg1;
4961 break;
4962 case GT_EXPR: /* - [-, c] */
4963 low = 0, high = arg1;
4964 break;
4965 case GE_EXPR: /* + [c, -] */
4966 in_p = ! in_p, low = arg1, high = 0;
4967 break;
4968 case LT_EXPR: /* - [c, -] */
4969 low = arg1, high = 0;
4970 break;
4971 case LE_EXPR: /* + [-, c] */
4972 in_p = ! in_p, low = 0, high = arg1;
4973 break;
4974 default:
4975 gcc_unreachable ();
4976 }
4977
4978 /* If this is an unsigned comparison, we also know that EXP is
4979 greater than or equal to zero. We base the range tests we make
4980 on that fact, so we record it here so we can parse existing
4981 range tests. We test arg0_type since often the return type
4982 of, e.g. EQ_EXPR, is boolean. */
4983 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4984 {
4985 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4986 in_p, low, high, 1,
4987 build_int_cst (arg0_type, 0),
4988 NULL_TREE))
4989 return NULL_TREE;
4990
4991 in_p = n_in_p, low = n_low, high = n_high;
4992
4993 /* If the high bound is missing, but we have a nonzero low
4994 bound, reverse the range so it goes from zero to the low bound
4995 minus 1. */
4996 if (high == 0 && low && ! integer_zerop (low))
4997 {
4998 in_p = ! in_p;
4999 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
5000 build_int_cst (TREE_TYPE (low), 1), 0);
5001 low = build_int_cst (arg0_type, 0);
5002 }
5003 }
5004
5005 *p_low = low;
5006 *p_high = high;
5007 *p_in_p = in_p;
5008 return arg0;
5009
5010 case NEGATE_EXPR:
5011 /* If flag_wrapv and ARG0_TYPE is signed, make sure
5012 low and high are non-NULL, then normalize will DTRT. */
5013 if (!TYPE_UNSIGNED (arg0_type)
5014 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
5015 {
5016 if (low == NULL_TREE)
5017 low = TYPE_MIN_VALUE (arg0_type);
5018 if (high == NULL_TREE)
5019 high = TYPE_MAX_VALUE (arg0_type);
5020 }
5021
5022 /* (-x) IN [a,b] -> x in [-b, -a] */
5023 n_low = range_binop (MINUS_EXPR, exp_type,
5024 build_int_cst (exp_type, 0),
5025 0, high, 1);
5026 n_high = range_binop (MINUS_EXPR, exp_type,
5027 build_int_cst (exp_type, 0),
5028 0, low, 0);
5029 if (n_high != 0 && TREE_OVERFLOW (n_high))
5030 return NULL_TREE;
5031 goto normalize;
5032
5033 case BIT_NOT_EXPR:
5034 /* ~ X -> -X - 1 */
5035 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
5036 build_int_cst (exp_type, 1));
5037
5038 case PLUS_EXPR:
5039 case MINUS_EXPR:
5040 if (TREE_CODE (arg1) != INTEGER_CST)
5041 return NULL_TREE;
5042
5043 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
5044 move a constant to the other side. */
5045 if (!TYPE_UNSIGNED (arg0_type)
5046 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
5047 return NULL_TREE;
5048
5049 /* If EXP is signed, any overflow in the computation is undefined,
5050 so we don't worry about it so long as our computations on
5051 the bounds don't overflow. For unsigned, overflow is defined
5052 and this is exactly the right thing. */
5053 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
5054 arg0_type, low, 0, arg1, 0);
5055 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
5056 arg0_type, high, 1, arg1, 0);
5057 if ((n_low != 0 && TREE_OVERFLOW (n_low))
5058 || (n_high != 0 && TREE_OVERFLOW (n_high)))
5059 return NULL_TREE;
5060
5061 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
5062 *strict_overflow_p = true;
5063
5064 normalize:
5065 /* Check for an unsigned range which has wrapped around the maximum
5066 value thus making n_high < n_low, and normalize it. */
5067 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
5068 {
5069 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
5070 build_int_cst (TREE_TYPE (n_high), 1), 0);
5071 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
5072 build_int_cst (TREE_TYPE (n_low), 1), 0);
5073
5074 /* If the range is of the form +/- [ x+1, x ], we won't
5075 be able to normalize it. But then, it represents the
5076 whole range or the empty set, so make it
5077 +/- [ -, - ]. */
5078 if (tree_int_cst_equal (n_low, low)
5079 && tree_int_cst_equal (n_high, high))
5080 low = high = 0;
5081 else
5082 in_p = ! in_p;
5083 }
5084 else
5085 low = n_low, high = n_high;
5086
5087 *p_low = low;
5088 *p_high = high;
5089 *p_in_p = in_p;
5090 return arg0;
5091
5092 CASE_CONVERT:
5093 case NON_LVALUE_EXPR:
5094 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
5095 return NULL_TREE;
5096
5097 if (! INTEGRAL_TYPE_P (arg0_type)
5098 || (low != 0 && ! int_fits_type_p (low, arg0_type))
5099 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
5100 return NULL_TREE;
5101
5102 n_low = low, n_high = high;
5103
5104 if (n_low != 0)
5105 n_low = fold_convert_loc (loc, arg0_type, n_low);
5106
5107 if (n_high != 0)
5108 n_high = fold_convert_loc (loc, arg0_type, n_high);
5109
5110 /* If we're converting arg0 from an unsigned type, to exp,
5111 a signed type, we will be doing the comparison as unsigned.
5112 The tests above have already verified that LOW and HIGH
5113 are both positive.
5114
5115 So we have to ensure that we will handle large unsigned
5116 values the same way that the current signed bounds treat
5117 negative values. */
5118
5119 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
5120 {
5121 tree high_positive;
5122 tree equiv_type;
5123 /* For fixed-point modes, we need to pass the saturating flag
5124 as the 2nd parameter. */
5125 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
5126 equiv_type
5127 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
5128 TYPE_SATURATING (arg0_type));
5129 else
5130 equiv_type
5131 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
5132
5133 /* A range without an upper bound is, naturally, unbounded.
5134 Since convert would have cropped a very large value, use
5135 the max value for the destination type. */
5136 high_positive
5137 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
5138 : TYPE_MAX_VALUE (arg0_type);
5139
5140 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
5141 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
5142 fold_convert_loc (loc, arg0_type,
5143 high_positive),
5144 build_int_cst (arg0_type, 1));
5145
5146 /* If the low bound is specified, "and" the range with the
5147 range for which the original unsigned value will be
5148 positive. */
5149 if (low != 0)
5150 {
5151 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
5152 1, fold_convert_loc (loc, arg0_type,
5153 integer_zero_node),
5154 high_positive))
5155 return NULL_TREE;
5156
5157 in_p = (n_in_p == in_p);
5158 }
5159 else
5160 {
5161 /* Otherwise, "or" the range with the range of the input
5162 that will be interpreted as negative. */
5163 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
5164 1, fold_convert_loc (loc, arg0_type,
5165 integer_zero_node),
5166 high_positive))
5167 return NULL_TREE;
5168
5169 in_p = (in_p != n_in_p);
5170 }
5171 }
5172
5173 *p_low = n_low;
5174 *p_high = n_high;
5175 *p_in_p = in_p;
5176 return arg0;
5177
5178 default:
5179 return NULL_TREE;
5180 }
5181 }
5182
5183 /* Given EXP, a logical expression, set the range it is testing into
5184 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
5185 actually being tested. *PLOW and *PHIGH will be made of the same
5186 type as the returned expression. If EXP is not a comparison, we
5187 will most likely not be returning a useful value and range. Set
5188 *STRICT_OVERFLOW_P to true if the return value is only valid
5189 because signed overflow is undefined; otherwise, do not change
5190 *STRICT_OVERFLOW_P. */
5191
5192 tree
5193 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
5194 bool *strict_overflow_p)
5195 {
5196 enum tree_code code;
5197 tree arg0, arg1 = NULL_TREE;
5198 tree exp_type, nexp;
5199 int in_p;
5200 tree low, high;
5201 location_t loc = EXPR_LOCATION (exp);
5202
5203 /* Start with simply saying "EXP != 0" and then look at the code of EXP
5204 and see if we can refine the range. Some of the cases below may not
5205 happen, but it doesn't seem worth worrying about this. We "continue"
5206 the outer loop when we've changed something; otherwise we "break"
5207 the switch, which will "break" the while. */
5208
5209 in_p = 0;
5210 low = high = build_int_cst (TREE_TYPE (exp), 0);
5211
5212 while (1)
5213 {
5214 code = TREE_CODE (exp);
5215 exp_type = TREE_TYPE (exp);
5216 arg0 = NULL_TREE;
5217
5218 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
5219 {
5220 if (TREE_OPERAND_LENGTH (exp) > 0)
5221 arg0 = TREE_OPERAND (exp, 0);
5222 if (TREE_CODE_CLASS (code) == tcc_binary
5223 || TREE_CODE_CLASS (code) == tcc_comparison
5224 || (TREE_CODE_CLASS (code) == tcc_expression
5225 && TREE_OPERAND_LENGTH (exp) > 1))
5226 arg1 = TREE_OPERAND (exp, 1);
5227 }
5228 if (arg0 == NULL_TREE)
5229 break;
5230
5231 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
5232 &high, &in_p, strict_overflow_p);
5233 if (nexp == NULL_TREE)
5234 break;
5235 exp = nexp;
5236 }
5237
5238 /* If EXP is a constant, we can evaluate whether this is true or false. */
5239 if (TREE_CODE (exp) == INTEGER_CST)
5240 {
5241 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
5242 exp, 0, low, 0))
5243 && integer_onep (range_binop (LE_EXPR, integer_type_node,
5244 exp, 1, high, 1)));
5245 low = high = 0;
5246 exp = 0;
5247 }
5248
5249 *pin_p = in_p, *plow = low, *phigh = high;
5250 return exp;
5251 }
5252
5253 /* Returns TRUE if [LOW, HIGH] range check can be optimized to
5254 a bitwise check i.e. when
5255 LOW == 0xXX...X00...0
5256 HIGH == 0xXX...X11...1
5257 Return corresponding mask in MASK and stem in VALUE. */
5258
5259 static bool
5260 maskable_range_p (const_tree low, const_tree high, tree type, tree *mask,
5261 tree *value)
5262 {
5263 if (TREE_CODE (low) != INTEGER_CST
5264 || TREE_CODE (high) != INTEGER_CST)
5265 return false;
5266
5267 unsigned prec = TYPE_PRECISION (type);
5268 wide_int lo = wi::to_wide (low, prec);
5269 wide_int hi = wi::to_wide (high, prec);
5270
5271 wide_int end_mask = lo ^ hi;
5272 if ((end_mask & (end_mask + 1)) != 0
5273 || (lo & end_mask) != 0)
5274 return false;
5275
5276 wide_int stem_mask = ~end_mask;
5277 wide_int stem = lo & stem_mask;
5278 if (stem != (hi & stem_mask))
5279 return false;
5280
5281 *mask = wide_int_to_tree (type, stem_mask);
5282 *value = wide_int_to_tree (type, stem);
5283
5284 return true;
5285 }
5286 \f
5287 /* Helper routine for build_range_check and match.pd. Return the type to
5288 perform the check or NULL if it shouldn't be optimized. */
5289
5290 tree
5291 range_check_type (tree etype)
5292 {
5293 /* First make sure that arithmetics in this type is valid, then make sure
5294 that it wraps around. */
5295 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
5296 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype), 1);
5297
5298 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_UNSIGNED (etype))
5299 {
5300 tree utype, minv, maxv;
5301
5302 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
5303 for the type in question, as we rely on this here. */
5304 utype = unsigned_type_for (etype);
5305 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
5306 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
5307 build_int_cst (TREE_TYPE (maxv), 1), 1);
5308 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
5309
5310 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
5311 minv, 1, maxv, 1)))
5312 etype = utype;
5313 else
5314 return NULL_TREE;
5315 }
5316 else if (POINTER_TYPE_P (etype))
5317 etype = unsigned_type_for (etype);
5318 return etype;
5319 }
5320
5321 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
5322 type, TYPE, return an expression to test if EXP is in (or out of, depending
5323 on IN_P) the range. Return 0 if the test couldn't be created. */
5324
5325 tree
5326 build_range_check (location_t loc, tree type, tree exp, int in_p,
5327 tree low, tree high)
5328 {
5329 tree etype = TREE_TYPE (exp), mask, value;
5330
5331 /* Disable this optimization for function pointer expressions
5332 on targets that require function pointer canonicalization. */
5333 if (targetm.have_canonicalize_funcptr_for_compare ()
5334 && POINTER_TYPE_P (etype)
5335 && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (etype)))
5336 return NULL_TREE;
5337
5338 if (! in_p)
5339 {
5340 value = build_range_check (loc, type, exp, 1, low, high);
5341 if (value != 0)
5342 return invert_truthvalue_loc (loc, value);
5343
5344 return 0;
5345 }
5346
5347 if (low == 0 && high == 0)
5348 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
5349
5350 if (low == 0)
5351 return fold_build2_loc (loc, LE_EXPR, type, exp,
5352 fold_convert_loc (loc, etype, high));
5353
5354 if (high == 0)
5355 return fold_build2_loc (loc, GE_EXPR, type, exp,
5356 fold_convert_loc (loc, etype, low));
5357
5358 if (operand_equal_p (low, high, 0))
5359 return fold_build2_loc (loc, EQ_EXPR, type, exp,
5360 fold_convert_loc (loc, etype, low));
5361
5362 if (TREE_CODE (exp) == BIT_AND_EXPR
5363 && maskable_range_p (low, high, etype, &mask, &value))
5364 return fold_build2_loc (loc, EQ_EXPR, type,
5365 fold_build2_loc (loc, BIT_AND_EXPR, etype,
5366 exp, mask),
5367 value);
5368
5369 if (integer_zerop (low))
5370 {
5371 if (! TYPE_UNSIGNED (etype))
5372 {
5373 etype = unsigned_type_for (etype);
5374 high = fold_convert_loc (loc, etype, high);
5375 exp = fold_convert_loc (loc, etype, exp);
5376 }
5377 return build_range_check (loc, type, exp, 1, 0, high);
5378 }
5379
5380 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
5381 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
5382 {
5383 int prec = TYPE_PRECISION (etype);
5384
5385 if (wi::mask <widest_int> (prec - 1, false) == wi::to_widest (high))
5386 {
5387 if (TYPE_UNSIGNED (etype))
5388 {
5389 tree signed_etype = signed_type_for (etype);
5390 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
5391 etype
5392 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
5393 else
5394 etype = signed_etype;
5395 exp = fold_convert_loc (loc, etype, exp);
5396 }
5397 return fold_build2_loc (loc, GT_EXPR, type, exp,
5398 build_int_cst (etype, 0));
5399 }
5400 }
5401
5402 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
5403 This requires wrap-around arithmetics for the type of the expression. */
5404 etype = range_check_type (etype);
5405 if (etype == NULL_TREE)
5406 return NULL_TREE;
5407
5408 high = fold_convert_loc (loc, etype, high);
5409 low = fold_convert_loc (loc, etype, low);
5410 exp = fold_convert_loc (loc, etype, exp);
5411
5412 value = const_binop (MINUS_EXPR, high, low);
5413
5414 if (value != 0 && !TREE_OVERFLOW (value))
5415 return build_range_check (loc, type,
5416 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
5417 1, build_int_cst (etype, 0), value);
5418
5419 return 0;
5420 }
5421 \f
5422 /* Return the predecessor of VAL in its type, handling the infinite case. */
5423
5424 static tree
5425 range_predecessor (tree val)
5426 {
5427 tree type = TREE_TYPE (val);
5428
5429 if (INTEGRAL_TYPE_P (type)
5430 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
5431 return 0;
5432 else
5433 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
5434 build_int_cst (TREE_TYPE (val), 1), 0);
5435 }
5436
5437 /* Return the successor of VAL in its type, handling the infinite case. */
5438
5439 static tree
5440 range_successor (tree val)
5441 {
5442 tree type = TREE_TYPE (val);
5443
5444 if (INTEGRAL_TYPE_P (type)
5445 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
5446 return 0;
5447 else
5448 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
5449 build_int_cst (TREE_TYPE (val), 1), 0);
5450 }
5451
5452 /* Given two ranges, see if we can merge them into one. Return 1 if we
5453 can, 0 if we can't. Set the output range into the specified parameters. */
5454
5455 bool
5456 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
5457 tree high0, int in1_p, tree low1, tree high1)
5458 {
5459 int no_overlap;
5460 int subset;
5461 int temp;
5462 tree tem;
5463 int in_p;
5464 tree low, high;
5465 int lowequal = ((low0 == 0 && low1 == 0)
5466 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5467 low0, 0, low1, 0)));
5468 int highequal = ((high0 == 0 && high1 == 0)
5469 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5470 high0, 1, high1, 1)));
5471
5472 /* Make range 0 be the range that starts first, or ends last if they
5473 start at the same value. Swap them if it isn't. */
5474 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
5475 low0, 0, low1, 0))
5476 || (lowequal
5477 && integer_onep (range_binop (GT_EXPR, integer_type_node,
5478 high1, 1, high0, 1))))
5479 {
5480 temp = in0_p, in0_p = in1_p, in1_p = temp;
5481 tem = low0, low0 = low1, low1 = tem;
5482 tem = high0, high0 = high1, high1 = tem;
5483 }
5484
5485 /* If the second range is != high1 where high1 is the type maximum of
5486 the type, try first merging with < high1 range. */
5487 if (low1
5488 && high1
5489 && TREE_CODE (low1) == INTEGER_CST
5490 && (TREE_CODE (TREE_TYPE (low1)) == INTEGER_TYPE
5491 || (TREE_CODE (TREE_TYPE (low1)) == ENUMERAL_TYPE
5492 && known_eq (TYPE_PRECISION (TREE_TYPE (low1)),
5493 GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low1))))))
5494 && operand_equal_p (low1, high1, 0))
5495 {
5496 if (tree_int_cst_equal (low1, TYPE_MAX_VALUE (TREE_TYPE (low1)))
5497 && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5498 !in1_p, NULL_TREE, range_predecessor (low1)))
5499 return true;
5500 /* Similarly for the second range != low1 where low1 is the type minimum
5501 of the type, try first merging with > low1 range. */
5502 if (tree_int_cst_equal (low1, TYPE_MIN_VALUE (TREE_TYPE (low1)))
5503 && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5504 !in1_p, range_successor (low1), NULL_TREE))
5505 return true;
5506 }
5507
5508 /* Now flag two cases, whether the ranges are disjoint or whether the
5509 second range is totally subsumed in the first. Note that the tests
5510 below are simplified by the ones above. */
5511 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
5512 high0, 1, low1, 0));
5513 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
5514 high1, 1, high0, 1));
5515
5516 /* We now have four cases, depending on whether we are including or
5517 excluding the two ranges. */
5518 if (in0_p && in1_p)
5519 {
5520 /* If they don't overlap, the result is false. If the second range
5521 is a subset it is the result. Otherwise, the range is from the start
5522 of the second to the end of the first. */
5523 if (no_overlap)
5524 in_p = 0, low = high = 0;
5525 else if (subset)
5526 in_p = 1, low = low1, high = high1;
5527 else
5528 in_p = 1, low = low1, high = high0;
5529 }
5530
5531 else if (in0_p && ! in1_p)
5532 {
5533 /* If they don't overlap, the result is the first range. If they are
5534 equal, the result is false. If the second range is a subset of the
5535 first, and the ranges begin at the same place, we go from just after
5536 the end of the second range to the end of the first. If the second
5537 range is not a subset of the first, or if it is a subset and both
5538 ranges end at the same place, the range starts at the start of the
5539 first range and ends just before the second range.
5540 Otherwise, we can't describe this as a single range. */
5541 if (no_overlap)
5542 in_p = 1, low = low0, high = high0;
5543 else if (lowequal && highequal)
5544 in_p = 0, low = high = 0;
5545 else if (subset && lowequal)
5546 {
5547 low = range_successor (high1);
5548 high = high0;
5549 in_p = 1;
5550 if (low == 0)
5551 {
5552 /* We are in the weird situation where high0 > high1 but
5553 high1 has no successor. Punt. */
5554 return 0;
5555 }
5556 }
5557 else if (! subset || highequal)
5558 {
5559 low = low0;
5560 high = range_predecessor (low1);
5561 in_p = 1;
5562 if (high == 0)
5563 {
5564 /* low0 < low1 but low1 has no predecessor. Punt. */
5565 return 0;
5566 }
5567 }
5568 else
5569 return 0;
5570 }
5571
5572 else if (! in0_p && in1_p)
5573 {
5574 /* If they don't overlap, the result is the second range. If the second
5575 is a subset of the first, the result is false. Otherwise,
5576 the range starts just after the first range and ends at the
5577 end of the second. */
5578 if (no_overlap)
5579 in_p = 1, low = low1, high = high1;
5580 else if (subset || highequal)
5581 in_p = 0, low = high = 0;
5582 else
5583 {
5584 low = range_successor (high0);
5585 high = high1;
5586 in_p = 1;
5587 if (low == 0)
5588 {
5589 /* high1 > high0 but high0 has no successor. Punt. */
5590 return 0;
5591 }
5592 }
5593 }
5594
5595 else
5596 {
5597 /* The case where we are excluding both ranges. Here the complex case
5598 is if they don't overlap. In that case, the only time we have a
5599 range is if they are adjacent. If the second is a subset of the
5600 first, the result is the first. Otherwise, the range to exclude
5601 starts at the beginning of the first range and ends at the end of the
5602 second. */
5603 if (no_overlap)
5604 {
5605 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
5606 range_successor (high0),
5607 1, low1, 0)))
5608 in_p = 0, low = low0, high = high1;
5609 else
5610 {
5611 /* Canonicalize - [min, x] into - [-, x]. */
5612 if (low0 && TREE_CODE (low0) == INTEGER_CST)
5613 switch (TREE_CODE (TREE_TYPE (low0)))
5614 {
5615 case ENUMERAL_TYPE:
5616 if (maybe_ne (TYPE_PRECISION (TREE_TYPE (low0)),
5617 GET_MODE_BITSIZE
5618 (TYPE_MODE (TREE_TYPE (low0)))))
5619 break;
5620 /* FALLTHROUGH */
5621 case INTEGER_TYPE:
5622 if (tree_int_cst_equal (low0,
5623 TYPE_MIN_VALUE (TREE_TYPE (low0))))
5624 low0 = 0;
5625 break;
5626 case POINTER_TYPE:
5627 if (TYPE_UNSIGNED (TREE_TYPE (low0))
5628 && integer_zerop (low0))
5629 low0 = 0;
5630 break;
5631 default:
5632 break;
5633 }
5634
5635 /* Canonicalize - [x, max] into - [x, -]. */
5636 if (high1 && TREE_CODE (high1) == INTEGER_CST)
5637 switch (TREE_CODE (TREE_TYPE (high1)))
5638 {
5639 case ENUMERAL_TYPE:
5640 if (maybe_ne (TYPE_PRECISION (TREE_TYPE (high1)),
5641 GET_MODE_BITSIZE
5642 (TYPE_MODE (TREE_TYPE (high1)))))
5643 break;
5644 /* FALLTHROUGH */
5645 case INTEGER_TYPE:
5646 if (tree_int_cst_equal (high1,
5647 TYPE_MAX_VALUE (TREE_TYPE (high1))))
5648 high1 = 0;
5649 break;
5650 case POINTER_TYPE:
5651 if (TYPE_UNSIGNED (TREE_TYPE (high1))
5652 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
5653 high1, 1,
5654 build_int_cst (TREE_TYPE (high1), 1),
5655 1)))
5656 high1 = 0;
5657 break;
5658 default:
5659 break;
5660 }
5661
5662 /* The ranges might be also adjacent between the maximum and
5663 minimum values of the given type. For
5664 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5665 return + [x + 1, y - 1]. */
5666 if (low0 == 0 && high1 == 0)
5667 {
5668 low = range_successor (high0);
5669 high = range_predecessor (low1);
5670 if (low == 0 || high == 0)
5671 return 0;
5672
5673 in_p = 1;
5674 }
5675 else
5676 return 0;
5677 }
5678 }
5679 else if (subset)
5680 in_p = 0, low = low0, high = high0;
5681 else
5682 in_p = 0, low = low0, high = high1;
5683 }
5684
5685 *pin_p = in_p, *plow = low, *phigh = high;
5686 return 1;
5687 }
5688 \f
5689
5690 /* Subroutine of fold, looking inside expressions of the form
5691 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
5692 of the COND_EXPR. This function is being used also to optimize
5693 A op B ? C : A, by reversing the comparison first.
5694
5695 Return a folded expression whose code is not a COND_EXPR
5696 anymore, or NULL_TREE if no folding opportunity is found. */
5697
5698 static tree
5699 fold_cond_expr_with_comparison (location_t loc, tree type,
5700 tree arg0, tree arg1, tree arg2)
5701 {
5702 enum tree_code comp_code = TREE_CODE (arg0);
5703 tree arg00 = TREE_OPERAND (arg0, 0);
5704 tree arg01 = TREE_OPERAND (arg0, 1);
5705 tree arg1_type = TREE_TYPE (arg1);
5706 tree tem;
5707
5708 STRIP_NOPS (arg1);
5709 STRIP_NOPS (arg2);
5710
5711 /* If we have A op 0 ? A : -A, consider applying the following
5712 transformations:
5713
5714 A == 0? A : -A same as -A
5715 A != 0? A : -A same as A
5716 A >= 0? A : -A same as abs (A)
5717 A > 0? A : -A same as abs (A)
5718 A <= 0? A : -A same as -abs (A)
5719 A < 0? A : -A same as -abs (A)
5720
5721 None of these transformations work for modes with signed
5722 zeros. If A is +/-0, the first two transformations will
5723 change the sign of the result (from +0 to -0, or vice
5724 versa). The last four will fix the sign of the result,
5725 even though the original expressions could be positive or
5726 negative, depending on the sign of A.
5727
5728 Note that all these transformations are correct if A is
5729 NaN, since the two alternatives (A and -A) are also NaNs. */
5730 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5731 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5732 ? real_zerop (arg01)
5733 : integer_zerop (arg01))
5734 && ((TREE_CODE (arg2) == NEGATE_EXPR
5735 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5736 /* In the case that A is of the form X-Y, '-A' (arg2) may
5737 have already been folded to Y-X, check for that. */
5738 || (TREE_CODE (arg1) == MINUS_EXPR
5739 && TREE_CODE (arg2) == MINUS_EXPR
5740 && operand_equal_p (TREE_OPERAND (arg1, 0),
5741 TREE_OPERAND (arg2, 1), 0)
5742 && operand_equal_p (TREE_OPERAND (arg1, 1),
5743 TREE_OPERAND (arg2, 0), 0))))
5744 switch (comp_code)
5745 {
5746 case EQ_EXPR:
5747 case UNEQ_EXPR:
5748 tem = fold_convert_loc (loc, arg1_type, arg1);
5749 return fold_convert_loc (loc, type, negate_expr (tem));
5750 case NE_EXPR:
5751 case LTGT_EXPR:
5752 return fold_convert_loc (loc, type, arg1);
5753 case UNGE_EXPR:
5754 case UNGT_EXPR:
5755 if (flag_trapping_math)
5756 break;
5757 /* Fall through. */
5758 case GE_EXPR:
5759 case GT_EXPR:
5760 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5761 break;
5762 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5763 return fold_convert_loc (loc, type, tem);
5764 case UNLE_EXPR:
5765 case UNLT_EXPR:
5766 if (flag_trapping_math)
5767 break;
5768 /* FALLTHRU */
5769 case LE_EXPR:
5770 case LT_EXPR:
5771 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5772 break;
5773 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg1))
5774 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
5775 {
5776 /* A <= 0 ? A : -A for A INT_MIN is valid, but -abs(INT_MIN)
5777 is not, invokes UB both in abs and in the negation of it.
5778 So, use ABSU_EXPR instead. */
5779 tree utype = unsigned_type_for (TREE_TYPE (arg1));
5780 tem = fold_build1_loc (loc, ABSU_EXPR, utype, arg1);
5781 tem = negate_expr (tem);
5782 return fold_convert_loc (loc, type, tem);
5783 }
5784 else
5785 {
5786 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5787 return negate_expr (fold_convert_loc (loc, type, tem));
5788 }
5789 default:
5790 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5791 break;
5792 }
5793
5794 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5795 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5796 both transformations are correct when A is NaN: A != 0
5797 is then true, and A == 0 is false. */
5798
5799 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5800 && integer_zerop (arg01) && integer_zerop (arg2))
5801 {
5802 if (comp_code == NE_EXPR)
5803 return fold_convert_loc (loc, type, arg1);
5804 else if (comp_code == EQ_EXPR)
5805 return build_zero_cst (type);
5806 }
5807
5808 /* Try some transformations of A op B ? A : B.
5809
5810 A == B? A : B same as B
5811 A != B? A : B same as A
5812 A >= B? A : B same as max (A, B)
5813 A > B? A : B same as max (B, A)
5814 A <= B? A : B same as min (A, B)
5815 A < B? A : B same as min (B, A)
5816
5817 As above, these transformations don't work in the presence
5818 of signed zeros. For example, if A and B are zeros of
5819 opposite sign, the first two transformations will change
5820 the sign of the result. In the last four, the original
5821 expressions give different results for (A=+0, B=-0) and
5822 (A=-0, B=+0), but the transformed expressions do not.
5823
5824 The first two transformations are correct if either A or B
5825 is a NaN. In the first transformation, the condition will
5826 be false, and B will indeed be chosen. In the case of the
5827 second transformation, the condition A != B will be true,
5828 and A will be chosen.
5829
5830 The conversions to max() and min() are not correct if B is
5831 a number and A is not. The conditions in the original
5832 expressions will be false, so all four give B. The min()
5833 and max() versions would give a NaN instead. */
5834 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5835 && operand_equal_for_comparison_p (arg01, arg2)
5836 /* Avoid these transformations if the COND_EXPR may be used
5837 as an lvalue in the C++ front-end. PR c++/19199. */
5838 && (in_gimple_form
5839 || VECTOR_TYPE_P (type)
5840 || (! lang_GNU_CXX ()
5841 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5842 || ! maybe_lvalue_p (arg1)
5843 || ! maybe_lvalue_p (arg2)))
5844 {
5845 tree comp_op0 = arg00;
5846 tree comp_op1 = arg01;
5847 tree comp_type = TREE_TYPE (comp_op0);
5848
5849 switch (comp_code)
5850 {
5851 case EQ_EXPR:
5852 return fold_convert_loc (loc, type, arg2);
5853 case NE_EXPR:
5854 return fold_convert_loc (loc, type, arg1);
5855 case LE_EXPR:
5856 case LT_EXPR:
5857 case UNLE_EXPR:
5858 case UNLT_EXPR:
5859 /* In C++ a ?: expression can be an lvalue, so put the
5860 operand which will be used if they are equal first
5861 so that we can convert this back to the
5862 corresponding COND_EXPR. */
5863 if (!HONOR_NANS (arg1))
5864 {
5865 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5866 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5867 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5868 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5869 : fold_build2_loc (loc, MIN_EXPR, comp_type,
5870 comp_op1, comp_op0);
5871 return fold_convert_loc (loc, type, tem);
5872 }
5873 break;
5874 case GE_EXPR:
5875 case GT_EXPR:
5876 case UNGE_EXPR:
5877 case UNGT_EXPR:
5878 if (!HONOR_NANS (arg1))
5879 {
5880 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5881 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5882 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5883 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5884 : fold_build2_loc (loc, MAX_EXPR, comp_type,
5885 comp_op1, comp_op0);
5886 return fold_convert_loc (loc, type, tem);
5887 }
5888 break;
5889 case UNEQ_EXPR:
5890 if (!HONOR_NANS (arg1))
5891 return fold_convert_loc (loc, type, arg2);
5892 break;
5893 case LTGT_EXPR:
5894 if (!HONOR_NANS (arg1))
5895 return fold_convert_loc (loc, type, arg1);
5896 break;
5897 default:
5898 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5899 break;
5900 }
5901 }
5902
5903 return NULL_TREE;
5904 }
5905
5906
5907 \f
5908 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5909 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5910 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5911 false) >= 2)
5912 #endif
5913
5914 /* EXP is some logical combination of boolean tests. See if we can
5915 merge it into some range test. Return the new tree if so. */
5916
5917 static tree
5918 fold_range_test (location_t loc, enum tree_code code, tree type,
5919 tree op0, tree op1)
5920 {
5921 int or_op = (code == TRUTH_ORIF_EXPR
5922 || code == TRUTH_OR_EXPR);
5923 int in0_p, in1_p, in_p;
5924 tree low0, low1, low, high0, high1, high;
5925 bool strict_overflow_p = false;
5926 tree tem, lhs, rhs;
5927 const char * const warnmsg = G_("assuming signed overflow does not occur "
5928 "when simplifying range test");
5929
5930 if (!INTEGRAL_TYPE_P (type))
5931 return 0;
5932
5933 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5934 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5935
5936 /* If this is an OR operation, invert both sides; we will invert
5937 again at the end. */
5938 if (or_op)
5939 in0_p = ! in0_p, in1_p = ! in1_p;
5940
5941 /* If both expressions are the same, if we can merge the ranges, and we
5942 can build the range test, return it or it inverted. If one of the
5943 ranges is always true or always false, consider it to be the same
5944 expression as the other. */
5945 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5946 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5947 in1_p, low1, high1)
5948 && (tem = (build_range_check (loc, type,
5949 lhs != 0 ? lhs
5950 : rhs != 0 ? rhs : integer_zero_node,
5951 in_p, low, high))) != 0)
5952 {
5953 if (strict_overflow_p)
5954 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5955 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5956 }
5957
5958 /* On machines where the branch cost is expensive, if this is a
5959 short-circuited branch and the underlying object on both sides
5960 is the same, make a non-short-circuit operation. */
5961 bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
5962 if (param_logical_op_non_short_circuit != -1)
5963 logical_op_non_short_circuit
5964 = param_logical_op_non_short_circuit;
5965 if (logical_op_non_short_circuit
5966 && !flag_sanitize_coverage
5967 && lhs != 0 && rhs != 0
5968 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5969 && operand_equal_p (lhs, rhs, 0))
5970 {
5971 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5972 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5973 which cases we can't do this. */
5974 if (simple_operand_p (lhs))
5975 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5976 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5977 type, op0, op1);
5978
5979 else if (!lang_hooks.decls.global_bindings_p ()
5980 && !CONTAINS_PLACEHOLDER_P (lhs))
5981 {
5982 tree common = save_expr (lhs);
5983
5984 if ((lhs = build_range_check (loc, type, common,
5985 or_op ? ! in0_p : in0_p,
5986 low0, high0)) != 0
5987 && (rhs = build_range_check (loc, type, common,
5988 or_op ? ! in1_p : in1_p,
5989 low1, high1)) != 0)
5990 {
5991 if (strict_overflow_p)
5992 fold_overflow_warning (warnmsg,
5993 WARN_STRICT_OVERFLOW_COMPARISON);
5994 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5995 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5996 type, lhs, rhs);
5997 }
5998 }
5999 }
6000
6001 return 0;
6002 }
6003 \f
6004 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
6005 bit value. Arrange things so the extra bits will be set to zero if and
6006 only if C is signed-extended to its full width. If MASK is nonzero,
6007 it is an INTEGER_CST that should be AND'ed with the extra bits. */
6008
6009 static tree
6010 unextend (tree c, int p, int unsignedp, tree mask)
6011 {
6012 tree type = TREE_TYPE (c);
6013 int modesize = GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (type));
6014 tree temp;
6015
6016 if (p == modesize || unsignedp)
6017 return c;
6018
6019 /* We work by getting just the sign bit into the low-order bit, then
6020 into the high-order bit, then sign-extend. We then XOR that value
6021 with C. */
6022 temp = build_int_cst (TREE_TYPE (c),
6023 wi::extract_uhwi (wi::to_wide (c), p - 1, 1));
6024
6025 /* We must use a signed type in order to get an arithmetic right shift.
6026 However, we must also avoid introducing accidental overflows, so that
6027 a subsequent call to integer_zerop will work. Hence we must
6028 do the type conversion here. At this point, the constant is either
6029 zero or one, and the conversion to a signed type can never overflow.
6030 We could get an overflow if this conversion is done anywhere else. */
6031 if (TYPE_UNSIGNED (type))
6032 temp = fold_convert (signed_type_for (type), temp);
6033
6034 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
6035 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
6036 if (mask != 0)
6037 temp = const_binop (BIT_AND_EXPR, temp,
6038 fold_convert (TREE_TYPE (c), mask));
6039 /* If necessary, convert the type back to match the type of C. */
6040 if (TYPE_UNSIGNED (type))
6041 temp = fold_convert (type, temp);
6042
6043 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
6044 }
6045 \f
6046 /* For an expression that has the form
6047 (A && B) || ~B
6048 or
6049 (A || B) && ~B,
6050 we can drop one of the inner expressions and simplify to
6051 A || ~B
6052 or
6053 A && ~B
6054 LOC is the location of the resulting expression. OP is the inner
6055 logical operation; the left-hand side in the examples above, while CMPOP
6056 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
6057 removing a condition that guards another, as in
6058 (A != NULL && A->...) || A == NULL
6059 which we must not transform. If RHS_ONLY is true, only eliminate the
6060 right-most operand of the inner logical operation. */
6061
6062 static tree
6063 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
6064 bool rhs_only)
6065 {
6066 tree type = TREE_TYPE (cmpop);
6067 enum tree_code code = TREE_CODE (cmpop);
6068 enum tree_code truthop_code = TREE_CODE (op);
6069 tree lhs = TREE_OPERAND (op, 0);
6070 tree rhs = TREE_OPERAND (op, 1);
6071 tree orig_lhs = lhs, orig_rhs = rhs;
6072 enum tree_code rhs_code = TREE_CODE (rhs);
6073 enum tree_code lhs_code = TREE_CODE (lhs);
6074 enum tree_code inv_code;
6075
6076 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
6077 return NULL_TREE;
6078
6079 if (TREE_CODE_CLASS (code) != tcc_comparison)
6080 return NULL_TREE;
6081
6082 if (rhs_code == truthop_code)
6083 {
6084 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
6085 if (newrhs != NULL_TREE)
6086 {
6087 rhs = newrhs;
6088 rhs_code = TREE_CODE (rhs);
6089 }
6090 }
6091 if (lhs_code == truthop_code && !rhs_only)
6092 {
6093 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
6094 if (newlhs != NULL_TREE)
6095 {
6096 lhs = newlhs;
6097 lhs_code = TREE_CODE (lhs);
6098 }
6099 }
6100
6101 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
6102 if (inv_code == rhs_code
6103 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
6104 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
6105 return lhs;
6106 if (!rhs_only && inv_code == lhs_code
6107 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
6108 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
6109 return rhs;
6110 if (rhs != orig_rhs || lhs != orig_lhs)
6111 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
6112 lhs, rhs);
6113 return NULL_TREE;
6114 }
6115
6116 /* Find ways of folding logical expressions of LHS and RHS:
6117 Try to merge two comparisons to the same innermost item.
6118 Look for range tests like "ch >= '0' && ch <= '9'".
6119 Look for combinations of simple terms on machines with expensive branches
6120 and evaluate the RHS unconditionally.
6121
6122 For example, if we have p->a == 2 && p->b == 4 and we can make an
6123 object large enough to span both A and B, we can do this with a comparison
6124 against the object ANDed with the a mask.
6125
6126 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
6127 operations to do this with one comparison.
6128
6129 We check for both normal comparisons and the BIT_AND_EXPRs made this by
6130 function and the one above.
6131
6132 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
6133 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
6134
6135 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
6136 two operands.
6137
6138 We return the simplified tree or 0 if no optimization is possible. */
6139
6140 static tree
6141 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
6142 tree lhs, tree rhs)
6143 {
6144 /* If this is the "or" of two comparisons, we can do something if
6145 the comparisons are NE_EXPR. If this is the "and", we can do something
6146 if the comparisons are EQ_EXPR. I.e.,
6147 (a->b == 2 && a->c == 4) can become (a->new == NEW).
6148
6149 WANTED_CODE is this operation code. For single bit fields, we can
6150 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
6151 comparison for one-bit fields. */
6152
6153 enum tree_code wanted_code;
6154 enum tree_code lcode, rcode;
6155 tree ll_arg, lr_arg, rl_arg, rr_arg;
6156 tree ll_inner, lr_inner, rl_inner, rr_inner;
6157 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
6158 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
6159 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
6160 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
6161 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
6162 int ll_reversep, lr_reversep, rl_reversep, rr_reversep;
6163 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
6164 scalar_int_mode lnmode, rnmode;
6165 tree ll_mask, lr_mask, rl_mask, rr_mask;
6166 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
6167 tree l_const, r_const;
6168 tree lntype, rntype, result;
6169 HOST_WIDE_INT first_bit, end_bit;
6170 int volatilep;
6171
6172 /* Start by getting the comparison codes. Fail if anything is volatile.
6173 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
6174 it were surrounded with a NE_EXPR. */
6175
6176 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
6177 return 0;
6178
6179 lcode = TREE_CODE (lhs);
6180 rcode = TREE_CODE (rhs);
6181
6182 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
6183 {
6184 lhs = build2 (NE_EXPR, truth_type, lhs,
6185 build_int_cst (TREE_TYPE (lhs), 0));
6186 lcode = NE_EXPR;
6187 }
6188
6189 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
6190 {
6191 rhs = build2 (NE_EXPR, truth_type, rhs,
6192 build_int_cst (TREE_TYPE (rhs), 0));
6193 rcode = NE_EXPR;
6194 }
6195
6196 if (TREE_CODE_CLASS (lcode) != tcc_comparison
6197 || TREE_CODE_CLASS (rcode) != tcc_comparison)
6198 return 0;
6199
6200 ll_arg = TREE_OPERAND (lhs, 0);
6201 lr_arg = TREE_OPERAND (lhs, 1);
6202 rl_arg = TREE_OPERAND (rhs, 0);
6203 rr_arg = TREE_OPERAND (rhs, 1);
6204
6205 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
6206 if (simple_operand_p (ll_arg)
6207 && simple_operand_p (lr_arg))
6208 {
6209 if (operand_equal_p (ll_arg, rl_arg, 0)
6210 && operand_equal_p (lr_arg, rr_arg, 0))
6211 {
6212 result = combine_comparisons (loc, code, lcode, rcode,
6213 truth_type, ll_arg, lr_arg);
6214 if (result)
6215 return result;
6216 }
6217 else if (operand_equal_p (ll_arg, rr_arg, 0)
6218 && operand_equal_p (lr_arg, rl_arg, 0))
6219 {
6220 result = combine_comparisons (loc, code, lcode,
6221 swap_tree_comparison (rcode),
6222 truth_type, ll_arg, lr_arg);
6223 if (result)
6224 return result;
6225 }
6226 }
6227
6228 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
6229 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
6230
6231 /* If the RHS can be evaluated unconditionally and its operands are
6232 simple, it wins to evaluate the RHS unconditionally on machines
6233 with expensive branches. In this case, this isn't a comparison
6234 that can be merged. */
6235
6236 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
6237 false) >= 2
6238 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
6239 && simple_operand_p (rl_arg)
6240 && simple_operand_p (rr_arg))
6241 {
6242 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
6243 if (code == TRUTH_OR_EXPR
6244 && lcode == NE_EXPR && integer_zerop (lr_arg)
6245 && rcode == NE_EXPR && integer_zerop (rr_arg)
6246 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
6247 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
6248 return build2_loc (loc, NE_EXPR, truth_type,
6249 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
6250 ll_arg, rl_arg),
6251 build_int_cst (TREE_TYPE (ll_arg), 0));
6252
6253 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
6254 if (code == TRUTH_AND_EXPR
6255 && lcode == EQ_EXPR && integer_zerop (lr_arg)
6256 && rcode == EQ_EXPR && integer_zerop (rr_arg)
6257 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
6258 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
6259 return build2_loc (loc, EQ_EXPR, truth_type,
6260 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
6261 ll_arg, rl_arg),
6262 build_int_cst (TREE_TYPE (ll_arg), 0));
6263 }
6264
6265 /* See if the comparisons can be merged. Then get all the parameters for
6266 each side. */
6267
6268 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
6269 || (rcode != EQ_EXPR && rcode != NE_EXPR))
6270 return 0;
6271
6272 ll_reversep = lr_reversep = rl_reversep = rr_reversep = 0;
6273 volatilep = 0;
6274 ll_inner = decode_field_reference (loc, &ll_arg,
6275 &ll_bitsize, &ll_bitpos, &ll_mode,
6276 &ll_unsignedp, &ll_reversep, &volatilep,
6277 &ll_mask, &ll_and_mask);
6278 lr_inner = decode_field_reference (loc, &lr_arg,
6279 &lr_bitsize, &lr_bitpos, &lr_mode,
6280 &lr_unsignedp, &lr_reversep, &volatilep,
6281 &lr_mask, &lr_and_mask);
6282 rl_inner = decode_field_reference (loc, &rl_arg,
6283 &rl_bitsize, &rl_bitpos, &rl_mode,
6284 &rl_unsignedp, &rl_reversep, &volatilep,
6285 &rl_mask, &rl_and_mask);
6286 rr_inner = decode_field_reference (loc, &rr_arg,
6287 &rr_bitsize, &rr_bitpos, &rr_mode,
6288 &rr_unsignedp, &rr_reversep, &volatilep,
6289 &rr_mask, &rr_and_mask);
6290
6291 /* It must be true that the inner operation on the lhs of each
6292 comparison must be the same if we are to be able to do anything.
6293 Then see if we have constants. If not, the same must be true for
6294 the rhs's. */
6295 if (volatilep
6296 || ll_reversep != rl_reversep
6297 || ll_inner == 0 || rl_inner == 0
6298 || ! operand_equal_p (ll_inner, rl_inner, 0))
6299 return 0;
6300
6301 if (TREE_CODE (lr_arg) == INTEGER_CST
6302 && TREE_CODE (rr_arg) == INTEGER_CST)
6303 {
6304 l_const = lr_arg, r_const = rr_arg;
6305 lr_reversep = ll_reversep;
6306 }
6307 else if (lr_reversep != rr_reversep
6308 || lr_inner == 0 || rr_inner == 0
6309 || ! operand_equal_p (lr_inner, rr_inner, 0))
6310 return 0;
6311 else
6312 l_const = r_const = 0;
6313
6314 /* If either comparison code is not correct for our logical operation,
6315 fail. However, we can convert a one-bit comparison against zero into
6316 the opposite comparison against that bit being set in the field. */
6317
6318 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
6319 if (lcode != wanted_code)
6320 {
6321 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
6322 {
6323 /* Make the left operand unsigned, since we are only interested
6324 in the value of one bit. Otherwise we are doing the wrong
6325 thing below. */
6326 ll_unsignedp = 1;
6327 l_const = ll_mask;
6328 }
6329 else
6330 return 0;
6331 }
6332
6333 /* This is analogous to the code for l_const above. */
6334 if (rcode != wanted_code)
6335 {
6336 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
6337 {
6338 rl_unsignedp = 1;
6339 r_const = rl_mask;
6340 }
6341 else
6342 return 0;
6343 }
6344
6345 /* See if we can find a mode that contains both fields being compared on
6346 the left. If we can't, fail. Otherwise, update all constants and masks
6347 to be relative to a field of that size. */
6348 first_bit = MIN (ll_bitpos, rl_bitpos);
6349 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
6350 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
6351 TYPE_ALIGN (TREE_TYPE (ll_inner)), BITS_PER_WORD,
6352 volatilep, &lnmode))
6353 return 0;
6354
6355 lnbitsize = GET_MODE_BITSIZE (lnmode);
6356 lnbitpos = first_bit & ~ (lnbitsize - 1);
6357 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
6358 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
6359
6360 if (ll_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
6361 {
6362 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
6363 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
6364 }
6365
6366 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
6367 size_int (xll_bitpos));
6368 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
6369 size_int (xrl_bitpos));
6370
6371 if (l_const)
6372 {
6373 l_const = fold_convert_loc (loc, lntype, l_const);
6374 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
6375 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
6376 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
6377 fold_build1_loc (loc, BIT_NOT_EXPR,
6378 lntype, ll_mask))))
6379 {
6380 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6381
6382 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6383 }
6384 }
6385 if (r_const)
6386 {
6387 r_const = fold_convert_loc (loc, lntype, r_const);
6388 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
6389 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
6390 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
6391 fold_build1_loc (loc, BIT_NOT_EXPR,
6392 lntype, rl_mask))))
6393 {
6394 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6395
6396 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6397 }
6398 }
6399
6400 /* If the right sides are not constant, do the same for it. Also,
6401 disallow this optimization if a size, signedness or storage order
6402 mismatch occurs between the left and right sides. */
6403 if (l_const == 0)
6404 {
6405 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
6406 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
6407 || ll_reversep != lr_reversep
6408 /* Make sure the two fields on the right
6409 correspond to the left without being swapped. */
6410 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
6411 return 0;
6412
6413 first_bit = MIN (lr_bitpos, rr_bitpos);
6414 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
6415 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
6416 TYPE_ALIGN (TREE_TYPE (lr_inner)), BITS_PER_WORD,
6417 volatilep, &rnmode))
6418 return 0;
6419
6420 rnbitsize = GET_MODE_BITSIZE (rnmode);
6421 rnbitpos = first_bit & ~ (rnbitsize - 1);
6422 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
6423 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
6424
6425 if (lr_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
6426 {
6427 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
6428 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
6429 }
6430
6431 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6432 rntype, lr_mask),
6433 size_int (xlr_bitpos));
6434 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6435 rntype, rr_mask),
6436 size_int (xrr_bitpos));
6437
6438 /* Make a mask that corresponds to both fields being compared.
6439 Do this for both items being compared. If the operands are the
6440 same size and the bits being compared are in the same position
6441 then we can do this by masking both and comparing the masked
6442 results. */
6443 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6444 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
6445 if (lnbitsize == rnbitsize
6446 && xll_bitpos == xlr_bitpos
6447 && lnbitpos >= 0
6448 && rnbitpos >= 0)
6449 {
6450 lhs = make_bit_field_ref (loc, ll_inner, ll_arg,
6451 lntype, lnbitsize, lnbitpos,
6452 ll_unsignedp || rl_unsignedp, ll_reversep);
6453 if (! all_ones_mask_p (ll_mask, lnbitsize))
6454 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
6455
6456 rhs = make_bit_field_ref (loc, lr_inner, lr_arg,
6457 rntype, rnbitsize, rnbitpos,
6458 lr_unsignedp || rr_unsignedp, lr_reversep);
6459 if (! all_ones_mask_p (lr_mask, rnbitsize))
6460 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
6461
6462 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6463 }
6464
6465 /* There is still another way we can do something: If both pairs of
6466 fields being compared are adjacent, we may be able to make a wider
6467 field containing them both.
6468
6469 Note that we still must mask the lhs/rhs expressions. Furthermore,
6470 the mask must be shifted to account for the shift done by
6471 make_bit_field_ref. */
6472 if (((ll_bitsize + ll_bitpos == rl_bitpos
6473 && lr_bitsize + lr_bitpos == rr_bitpos)
6474 || (ll_bitpos == rl_bitpos + rl_bitsize
6475 && lr_bitpos == rr_bitpos + rr_bitsize))
6476 && ll_bitpos >= 0
6477 && rl_bitpos >= 0
6478 && lr_bitpos >= 0
6479 && rr_bitpos >= 0)
6480 {
6481 tree type;
6482
6483 lhs = make_bit_field_ref (loc, ll_inner, ll_arg, lntype,
6484 ll_bitsize + rl_bitsize,
6485 MIN (ll_bitpos, rl_bitpos),
6486 ll_unsignedp, ll_reversep);
6487 rhs = make_bit_field_ref (loc, lr_inner, lr_arg, rntype,
6488 lr_bitsize + rr_bitsize,
6489 MIN (lr_bitpos, rr_bitpos),
6490 lr_unsignedp, lr_reversep);
6491
6492 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
6493 size_int (MIN (xll_bitpos, xrl_bitpos)));
6494 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
6495 size_int (MIN (xlr_bitpos, xrr_bitpos)));
6496
6497 /* Convert to the smaller type before masking out unwanted bits. */
6498 type = lntype;
6499 if (lntype != rntype)
6500 {
6501 if (lnbitsize > rnbitsize)
6502 {
6503 lhs = fold_convert_loc (loc, rntype, lhs);
6504 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
6505 type = rntype;
6506 }
6507 else if (lnbitsize < rnbitsize)
6508 {
6509 rhs = fold_convert_loc (loc, lntype, rhs);
6510 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
6511 type = lntype;
6512 }
6513 }
6514
6515 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
6516 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
6517
6518 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
6519 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
6520
6521 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6522 }
6523
6524 return 0;
6525 }
6526
6527 /* Handle the case of comparisons with constants. If there is something in
6528 common between the masks, those bits of the constants must be the same.
6529 If not, the condition is always false. Test for this to avoid generating
6530 incorrect code below. */
6531 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
6532 if (! integer_zerop (result)
6533 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
6534 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
6535 {
6536 if (wanted_code == NE_EXPR)
6537 {
6538 warning (0, "%<or%> of unmatched not-equal tests is always 1");
6539 return constant_boolean_node (true, truth_type);
6540 }
6541 else
6542 {
6543 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
6544 return constant_boolean_node (false, truth_type);
6545 }
6546 }
6547
6548 if (lnbitpos < 0)
6549 return 0;
6550
6551 /* Construct the expression we will return. First get the component
6552 reference we will make. Unless the mask is all ones the width of
6553 that field, perform the mask operation. Then compare with the
6554 merged constant. */
6555 result = make_bit_field_ref (loc, ll_inner, ll_arg,
6556 lntype, lnbitsize, lnbitpos,
6557 ll_unsignedp || rl_unsignedp, ll_reversep);
6558
6559 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6560 if (! all_ones_mask_p (ll_mask, lnbitsize))
6561 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
6562
6563 return build2_loc (loc, wanted_code, truth_type, result,
6564 const_binop (BIT_IOR_EXPR, l_const, r_const));
6565 }
6566 \f
6567 /* T is an integer expression that is being multiplied, divided, or taken a
6568 modulus (CODE says which and what kind of divide or modulus) by a
6569 constant C. See if we can eliminate that operation by folding it with
6570 other operations already in T. WIDE_TYPE, if non-null, is a type that
6571 should be used for the computation if wider than our type.
6572
6573 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6574 (X * 2) + (Y * 4). We must, however, be assured that either the original
6575 expression would not overflow or that overflow is undefined for the type
6576 in the language in question.
6577
6578 If we return a non-null expression, it is an equivalent form of the
6579 original computation, but need not be in the original type.
6580
6581 We set *STRICT_OVERFLOW_P to true if the return values depends on
6582 signed overflow being undefined. Otherwise we do not change
6583 *STRICT_OVERFLOW_P. */
6584
6585 static tree
6586 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6587 bool *strict_overflow_p)
6588 {
6589 /* To avoid exponential search depth, refuse to allow recursion past
6590 three levels. Beyond that (1) it's highly unlikely that we'll find
6591 something interesting and (2) we've probably processed it before
6592 when we built the inner expression. */
6593
6594 static int depth;
6595 tree ret;
6596
6597 if (depth > 3)
6598 return NULL;
6599
6600 depth++;
6601 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6602 depth--;
6603
6604 return ret;
6605 }
6606
6607 static tree
6608 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6609 bool *strict_overflow_p)
6610 {
6611 tree type = TREE_TYPE (t);
6612 enum tree_code tcode = TREE_CODE (t);
6613 tree ctype = (wide_type != 0
6614 && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (wide_type))
6615 > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type)))
6616 ? wide_type : type);
6617 tree t1, t2;
6618 int same_p = tcode == code;
6619 tree op0 = NULL_TREE, op1 = NULL_TREE;
6620 bool sub_strict_overflow_p;
6621
6622 /* Don't deal with constants of zero here; they confuse the code below. */
6623 if (integer_zerop (c))
6624 return NULL_TREE;
6625
6626 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6627 op0 = TREE_OPERAND (t, 0);
6628
6629 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6630 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6631
6632 /* Note that we need not handle conditional operations here since fold
6633 already handles those cases. So just do arithmetic here. */
6634 switch (tcode)
6635 {
6636 case INTEGER_CST:
6637 /* For a constant, we can always simplify if we are a multiply
6638 or (for divide and modulus) if it is a multiple of our constant. */
6639 if (code == MULT_EXPR
6640 || wi::multiple_of_p (wi::to_wide (t), wi::to_wide (c),
6641 TYPE_SIGN (type)))
6642 {
6643 tree tem = const_binop (code, fold_convert (ctype, t),
6644 fold_convert (ctype, c));
6645 /* If the multiplication overflowed, we lost information on it.
6646 See PR68142 and PR69845. */
6647 if (TREE_OVERFLOW (tem))
6648 return NULL_TREE;
6649 return tem;
6650 }
6651 break;
6652
6653 CASE_CONVERT: case NON_LVALUE_EXPR:
6654 /* If op0 is an expression ... */
6655 if ((COMPARISON_CLASS_P (op0)
6656 || UNARY_CLASS_P (op0)
6657 || BINARY_CLASS_P (op0)
6658 || VL_EXP_CLASS_P (op0)
6659 || EXPRESSION_CLASS_P (op0))
6660 /* ... and has wrapping overflow, and its type is smaller
6661 than ctype, then we cannot pass through as widening. */
6662 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6663 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0)))
6664 && (TYPE_PRECISION (ctype)
6665 > TYPE_PRECISION (TREE_TYPE (op0))))
6666 /* ... or this is a truncation (t is narrower than op0),
6667 then we cannot pass through this narrowing. */
6668 || (TYPE_PRECISION (type)
6669 < TYPE_PRECISION (TREE_TYPE (op0)))
6670 /* ... or signedness changes for division or modulus,
6671 then we cannot pass through this conversion. */
6672 || (code != MULT_EXPR
6673 && (TYPE_UNSIGNED (ctype)
6674 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6675 /* ... or has undefined overflow while the converted to
6676 type has not, we cannot do the operation in the inner type
6677 as that would introduce undefined overflow. */
6678 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6679 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0)))
6680 && !TYPE_OVERFLOW_UNDEFINED (type))))
6681 break;
6682
6683 /* Pass the constant down and see if we can make a simplification. If
6684 we can, replace this expression with the inner simplification for
6685 possible later conversion to our or some other type. */
6686 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6687 && TREE_CODE (t2) == INTEGER_CST
6688 && !TREE_OVERFLOW (t2)
6689 && (t1 = extract_muldiv (op0, t2, code,
6690 code == MULT_EXPR ? ctype : NULL_TREE,
6691 strict_overflow_p)) != 0)
6692 return t1;
6693 break;
6694
6695 case ABS_EXPR:
6696 /* If widening the type changes it from signed to unsigned, then we
6697 must avoid building ABS_EXPR itself as unsigned. */
6698 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6699 {
6700 tree cstype = (*signed_type_for) (ctype);
6701 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6702 != 0)
6703 {
6704 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6705 return fold_convert (ctype, t1);
6706 }
6707 break;
6708 }
6709 /* If the constant is negative, we cannot simplify this. */
6710 if (tree_int_cst_sgn (c) == -1)
6711 break;
6712 /* FALLTHROUGH */
6713 case NEGATE_EXPR:
6714 /* For division and modulus, type can't be unsigned, as e.g.
6715 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6716 For signed types, even with wrapping overflow, this is fine. */
6717 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6718 break;
6719 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6720 != 0)
6721 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6722 break;
6723
6724 case MIN_EXPR: case MAX_EXPR:
6725 /* If widening the type changes the signedness, then we can't perform
6726 this optimization as that changes the result. */
6727 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6728 break;
6729
6730 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6731 sub_strict_overflow_p = false;
6732 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6733 &sub_strict_overflow_p)) != 0
6734 && (t2 = extract_muldiv (op1, c, code, wide_type,
6735 &sub_strict_overflow_p)) != 0)
6736 {
6737 if (tree_int_cst_sgn (c) < 0)
6738 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6739 if (sub_strict_overflow_p)
6740 *strict_overflow_p = true;
6741 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6742 fold_convert (ctype, t2));
6743 }
6744 break;
6745
6746 case LSHIFT_EXPR: case RSHIFT_EXPR:
6747 /* If the second operand is constant, this is a multiplication
6748 or floor division, by a power of two, so we can treat it that
6749 way unless the multiplier or divisor overflows. Signed
6750 left-shift overflow is implementation-defined rather than
6751 undefined in C90, so do not convert signed left shift into
6752 multiplication. */
6753 if (TREE_CODE (op1) == INTEGER_CST
6754 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6755 /* const_binop may not detect overflow correctly,
6756 so check for it explicitly here. */
6757 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
6758 wi::to_wide (op1))
6759 && (t1 = fold_convert (ctype,
6760 const_binop (LSHIFT_EXPR, size_one_node,
6761 op1))) != 0
6762 && !TREE_OVERFLOW (t1))
6763 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6764 ? MULT_EXPR : FLOOR_DIV_EXPR,
6765 ctype,
6766 fold_convert (ctype, op0),
6767 t1),
6768 c, code, wide_type, strict_overflow_p);
6769 break;
6770
6771 case PLUS_EXPR: case MINUS_EXPR:
6772 /* See if we can eliminate the operation on both sides. If we can, we
6773 can return a new PLUS or MINUS. If we can't, the only remaining
6774 cases where we can do anything are if the second operand is a
6775 constant. */
6776 sub_strict_overflow_p = false;
6777 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6778 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6779 if (t1 != 0 && t2 != 0
6780 && TYPE_OVERFLOW_WRAPS (ctype)
6781 && (code == MULT_EXPR
6782 /* If not multiplication, we can only do this if both operands
6783 are divisible by c. */
6784 || (multiple_of_p (ctype, op0, c)
6785 && multiple_of_p (ctype, op1, c))))
6786 {
6787 if (sub_strict_overflow_p)
6788 *strict_overflow_p = true;
6789 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6790 fold_convert (ctype, t2));
6791 }
6792
6793 /* If this was a subtraction, negate OP1 and set it to be an addition.
6794 This simplifies the logic below. */
6795 if (tcode == MINUS_EXPR)
6796 {
6797 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6798 /* If OP1 was not easily negatable, the constant may be OP0. */
6799 if (TREE_CODE (op0) == INTEGER_CST)
6800 {
6801 std::swap (op0, op1);
6802 std::swap (t1, t2);
6803 }
6804 }
6805
6806 if (TREE_CODE (op1) != INTEGER_CST)
6807 break;
6808
6809 /* If either OP1 or C are negative, this optimization is not safe for
6810 some of the division and remainder types while for others we need
6811 to change the code. */
6812 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6813 {
6814 if (code == CEIL_DIV_EXPR)
6815 code = FLOOR_DIV_EXPR;
6816 else if (code == FLOOR_DIV_EXPR)
6817 code = CEIL_DIV_EXPR;
6818 else if (code != MULT_EXPR
6819 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6820 break;
6821 }
6822
6823 /* If it's a multiply or a division/modulus operation of a multiple
6824 of our constant, do the operation and verify it doesn't overflow. */
6825 if (code == MULT_EXPR
6826 || wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6827 TYPE_SIGN (type)))
6828 {
6829 op1 = const_binop (code, fold_convert (ctype, op1),
6830 fold_convert (ctype, c));
6831 /* We allow the constant to overflow with wrapping semantics. */
6832 if (op1 == 0
6833 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6834 break;
6835 }
6836 else
6837 break;
6838
6839 /* If we have an unsigned type, we cannot widen the operation since it
6840 will change the result if the original computation overflowed. */
6841 if (TYPE_UNSIGNED (ctype) && ctype != type)
6842 break;
6843
6844 /* The last case is if we are a multiply. In that case, we can
6845 apply the distributive law to commute the multiply and addition
6846 if the multiplication of the constants doesn't overflow
6847 and overflow is defined. With undefined overflow
6848 op0 * c might overflow, while (op0 + orig_op1) * c doesn't.
6849 But fold_plusminus_mult_expr would factor back any power-of-two
6850 value so do not distribute in the first place in this case. */
6851 if (code == MULT_EXPR
6852 && TYPE_OVERFLOW_WRAPS (ctype)
6853 && !(tree_fits_shwi_p (c) && pow2p_hwi (absu_hwi (tree_to_shwi (c)))))
6854 return fold_build2 (tcode, ctype,
6855 fold_build2 (code, ctype,
6856 fold_convert (ctype, op0),
6857 fold_convert (ctype, c)),
6858 op1);
6859
6860 break;
6861
6862 case MULT_EXPR:
6863 /* We have a special case here if we are doing something like
6864 (C * 8) % 4 since we know that's zero. */
6865 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6866 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6867 /* If the multiplication can overflow we cannot optimize this. */
6868 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6869 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6870 && wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6871 TYPE_SIGN (type)))
6872 {
6873 *strict_overflow_p = true;
6874 return omit_one_operand (type, integer_zero_node, op0);
6875 }
6876
6877 /* ... fall through ... */
6878
6879 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6880 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6881 /* If we can extract our operation from the LHS, do so and return a
6882 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6883 do something only if the second operand is a constant. */
6884 if (same_p
6885 && TYPE_OVERFLOW_WRAPS (ctype)
6886 && (t1 = extract_muldiv (op0, c, code, wide_type,
6887 strict_overflow_p)) != 0)
6888 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6889 fold_convert (ctype, op1));
6890 else if (tcode == MULT_EXPR && code == MULT_EXPR
6891 && TYPE_OVERFLOW_WRAPS (ctype)
6892 && (t1 = extract_muldiv (op1, c, code, wide_type,
6893 strict_overflow_p)) != 0)
6894 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6895 fold_convert (ctype, t1));
6896 else if (TREE_CODE (op1) != INTEGER_CST)
6897 return 0;
6898
6899 /* If these are the same operation types, we can associate them
6900 assuming no overflow. */
6901 if (tcode == code)
6902 {
6903 bool overflow_p = false;
6904 wi::overflow_type overflow_mul;
6905 signop sign = TYPE_SIGN (ctype);
6906 unsigned prec = TYPE_PRECISION (ctype);
6907 wide_int mul = wi::mul (wi::to_wide (op1, prec),
6908 wi::to_wide (c, prec),
6909 sign, &overflow_mul);
6910 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6911 if (overflow_mul
6912 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6913 overflow_p = true;
6914 if (!overflow_p)
6915 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6916 wide_int_to_tree (ctype, mul));
6917 }
6918
6919 /* If these operations "cancel" each other, we have the main
6920 optimizations of this pass, which occur when either constant is a
6921 multiple of the other, in which case we replace this with either an
6922 operation or CODE or TCODE.
6923
6924 If we have an unsigned type, we cannot do this since it will change
6925 the result if the original computation overflowed. */
6926 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6927 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6928 || (tcode == MULT_EXPR
6929 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6930 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6931 && code != MULT_EXPR)))
6932 {
6933 if (wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6934 TYPE_SIGN (type)))
6935 {
6936 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6937 *strict_overflow_p = true;
6938 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6939 fold_convert (ctype,
6940 const_binop (TRUNC_DIV_EXPR,
6941 op1, c)));
6942 }
6943 else if (wi::multiple_of_p (wi::to_wide (c), wi::to_wide (op1),
6944 TYPE_SIGN (type)))
6945 {
6946 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6947 *strict_overflow_p = true;
6948 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6949 fold_convert (ctype,
6950 const_binop (TRUNC_DIV_EXPR,
6951 c, op1)));
6952 }
6953 }
6954 break;
6955
6956 default:
6957 break;
6958 }
6959
6960 return 0;
6961 }
6962 \f
6963 /* Return a node which has the indicated constant VALUE (either 0 or
6964 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6965 and is of the indicated TYPE. */
6966
6967 tree
6968 constant_boolean_node (bool value, tree type)
6969 {
6970 if (type == integer_type_node)
6971 return value ? integer_one_node : integer_zero_node;
6972 else if (type == boolean_type_node)
6973 return value ? boolean_true_node : boolean_false_node;
6974 else if (TREE_CODE (type) == VECTOR_TYPE)
6975 return build_vector_from_val (type,
6976 build_int_cst (TREE_TYPE (type),
6977 value ? -1 : 0));
6978 else
6979 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6980 }
6981
6982
6983 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6984 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6985 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6986 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6987 COND is the first argument to CODE; otherwise (as in the example
6988 given here), it is the second argument. TYPE is the type of the
6989 original expression. Return NULL_TREE if no simplification is
6990 possible. */
6991
6992 static tree
6993 fold_binary_op_with_conditional_arg (location_t loc,
6994 enum tree_code code,
6995 tree type, tree op0, tree op1,
6996 tree cond, tree arg, int cond_first_p)
6997 {
6998 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6999 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
7000 tree test, true_value, false_value;
7001 tree lhs = NULL_TREE;
7002 tree rhs = NULL_TREE;
7003 enum tree_code cond_code = COND_EXPR;
7004
7005 /* Do not move possibly trapping operations into the conditional as this
7006 pessimizes code and causes gimplification issues when applied late. */
7007 if (operation_could_trap_p (code, FLOAT_TYPE_P (type),
7008 ANY_INTEGRAL_TYPE_P (type)
7009 && TYPE_OVERFLOW_TRAPS (type), op1))
7010 return NULL_TREE;
7011
7012 if (TREE_CODE (cond) == COND_EXPR
7013 || TREE_CODE (cond) == VEC_COND_EXPR)
7014 {
7015 test = TREE_OPERAND (cond, 0);
7016 true_value = TREE_OPERAND (cond, 1);
7017 false_value = TREE_OPERAND (cond, 2);
7018 /* If this operand throws an expression, then it does not make
7019 sense to try to perform a logical or arithmetic operation
7020 involving it. */
7021 if (VOID_TYPE_P (TREE_TYPE (true_value)))
7022 lhs = true_value;
7023 if (VOID_TYPE_P (TREE_TYPE (false_value)))
7024 rhs = false_value;
7025 }
7026 else if (!(TREE_CODE (type) != VECTOR_TYPE
7027 && TREE_CODE (TREE_TYPE (cond)) == VECTOR_TYPE))
7028 {
7029 tree testtype = TREE_TYPE (cond);
7030 test = cond;
7031 true_value = constant_boolean_node (true, testtype);
7032 false_value = constant_boolean_node (false, testtype);
7033 }
7034 else
7035 /* Detect the case of mixing vector and scalar types - bail out. */
7036 return NULL_TREE;
7037
7038 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
7039 cond_code = VEC_COND_EXPR;
7040
7041 /* This transformation is only worthwhile if we don't have to wrap ARG
7042 in a SAVE_EXPR and the operation can be simplified without recursing
7043 on at least one of the branches once its pushed inside the COND_EXPR. */
7044 if (!TREE_CONSTANT (arg)
7045 && (TREE_SIDE_EFFECTS (arg)
7046 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
7047 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
7048 return NULL_TREE;
7049
7050 arg = fold_convert_loc (loc, arg_type, arg);
7051 if (lhs == 0)
7052 {
7053 true_value = fold_convert_loc (loc, cond_type, true_value);
7054 if (cond_first_p)
7055 lhs = fold_build2_loc (loc, code, type, true_value, arg);
7056 else
7057 lhs = fold_build2_loc (loc, code, type, arg, true_value);
7058 }
7059 if (rhs == 0)
7060 {
7061 false_value = fold_convert_loc (loc, cond_type, false_value);
7062 if (cond_first_p)
7063 rhs = fold_build2_loc (loc, code, type, false_value, arg);
7064 else
7065 rhs = fold_build2_loc (loc, code, type, arg, false_value);
7066 }
7067
7068 /* Check that we have simplified at least one of the branches. */
7069 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
7070 return NULL_TREE;
7071
7072 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
7073 }
7074
7075 \f
7076 /* Subroutine of fold() that checks for the addition of +/- 0.0.
7077
7078 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
7079 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
7080 ADDEND is the same as X.
7081
7082 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
7083 and finite. The problematic cases are when X is zero, and its mode
7084 has signed zeros. In the case of rounding towards -infinity,
7085 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
7086 modes, X + 0 is not the same as X because -0 + 0 is 0. */
7087
7088 bool
7089 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
7090 {
7091 if (!real_zerop (addend))
7092 return false;
7093
7094 /* Don't allow the fold with -fsignaling-nans. */
7095 if (HONOR_SNANS (type))
7096 return false;
7097
7098 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
7099 if (!HONOR_SIGNED_ZEROS (type))
7100 return true;
7101
7102 /* There is no case that is safe for all rounding modes. */
7103 if (HONOR_SIGN_DEPENDENT_ROUNDING (type))
7104 return false;
7105
7106 /* In a vector or complex, we would need to check the sign of all zeros. */
7107 if (TREE_CODE (addend) == VECTOR_CST)
7108 addend = uniform_vector_p (addend);
7109 if (!addend || TREE_CODE (addend) != REAL_CST)
7110 return false;
7111
7112 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
7113 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
7114 negate = !negate;
7115
7116 /* The mode has signed zeros, and we have to honor their sign.
7117 In this situation, there is only one case we can return true for.
7118 X - 0 is the same as X with default rounding. */
7119 return negate;
7120 }
7121
7122 /* Subroutine of match.pd that optimizes comparisons of a division by
7123 a nonzero integer constant against an integer constant, i.e.
7124 X/C1 op C2.
7125
7126 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
7127 GE_EXPR or LE_EXPR. ARG01 and ARG1 must be a INTEGER_CST. */
7128
7129 enum tree_code
7130 fold_div_compare (enum tree_code code, tree c1, tree c2, tree *lo,
7131 tree *hi, bool *neg_overflow)
7132 {
7133 tree prod, tmp, type = TREE_TYPE (c1);
7134 signop sign = TYPE_SIGN (type);
7135 wi::overflow_type overflow;
7136
7137 /* We have to do this the hard way to detect unsigned overflow.
7138 prod = int_const_binop (MULT_EXPR, c1, c2); */
7139 wide_int val = wi::mul (wi::to_wide (c1), wi::to_wide (c2), sign, &overflow);
7140 prod = force_fit_type (type, val, -1, overflow);
7141 *neg_overflow = false;
7142
7143 if (sign == UNSIGNED)
7144 {
7145 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
7146 *lo = prod;
7147
7148 /* Likewise *hi = int_const_binop (PLUS_EXPR, prod, tmp). */
7149 val = wi::add (wi::to_wide (prod), wi::to_wide (tmp), sign, &overflow);
7150 *hi = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (prod));
7151 }
7152 else if (tree_int_cst_sgn (c1) >= 0)
7153 {
7154 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
7155 switch (tree_int_cst_sgn (c2))
7156 {
7157 case -1:
7158 *neg_overflow = true;
7159 *lo = int_const_binop (MINUS_EXPR, prod, tmp);
7160 *hi = prod;
7161 break;
7162
7163 case 0:
7164 *lo = fold_negate_const (tmp, type);
7165 *hi = tmp;
7166 break;
7167
7168 case 1:
7169 *hi = int_const_binop (PLUS_EXPR, prod, tmp);
7170 *lo = prod;
7171 break;
7172
7173 default:
7174 gcc_unreachable ();
7175 }
7176 }
7177 else
7178 {
7179 /* A negative divisor reverses the relational operators. */
7180 code = swap_tree_comparison (code);
7181
7182 tmp = int_const_binop (PLUS_EXPR, c1, build_int_cst (type, 1));
7183 switch (tree_int_cst_sgn (c2))
7184 {
7185 case -1:
7186 *hi = int_const_binop (MINUS_EXPR, prod, tmp);
7187 *lo = prod;
7188 break;
7189
7190 case 0:
7191 *hi = fold_negate_const (tmp, type);
7192 *lo = tmp;
7193 break;
7194
7195 case 1:
7196 *neg_overflow = true;
7197 *lo = int_const_binop (PLUS_EXPR, prod, tmp);
7198 *hi = prod;
7199 break;
7200
7201 default:
7202 gcc_unreachable ();
7203 }
7204 }
7205
7206 if (code != EQ_EXPR && code != NE_EXPR)
7207 return code;
7208
7209 if (TREE_OVERFLOW (*lo)
7210 || operand_equal_p (*lo, TYPE_MIN_VALUE (type), 0))
7211 *lo = NULL_TREE;
7212 if (TREE_OVERFLOW (*hi)
7213 || operand_equal_p (*hi, TYPE_MAX_VALUE (type), 0))
7214 *hi = NULL_TREE;
7215
7216 return code;
7217 }
7218
7219
7220 /* If CODE with arguments ARG0 and ARG1 represents a single bit
7221 equality/inequality test, then return a simplified form of the test
7222 using a sign testing. Otherwise return NULL. TYPE is the desired
7223 result type. */
7224
7225 static tree
7226 fold_single_bit_test_into_sign_test (location_t loc,
7227 enum tree_code code, tree arg0, tree arg1,
7228 tree result_type)
7229 {
7230 /* If this is testing a single bit, we can optimize the test. */
7231 if ((code == NE_EXPR || code == EQ_EXPR)
7232 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7233 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7234 {
7235 /* If we have (A & C) != 0 where C is the sign bit of A, convert
7236 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
7237 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
7238
7239 if (arg00 != NULL_TREE
7240 /* This is only a win if casting to a signed type is cheap,
7241 i.e. when arg00's type is not a partial mode. */
7242 && type_has_mode_precision_p (TREE_TYPE (arg00)))
7243 {
7244 tree stype = signed_type_for (TREE_TYPE (arg00));
7245 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
7246 result_type,
7247 fold_convert_loc (loc, stype, arg00),
7248 build_int_cst (stype, 0));
7249 }
7250 }
7251
7252 return NULL_TREE;
7253 }
7254
7255 /* If CODE with arguments ARG0 and ARG1 represents a single bit
7256 equality/inequality test, then return a simplified form of
7257 the test using shifts and logical operations. Otherwise return
7258 NULL. TYPE is the desired result type. */
7259
7260 tree
7261 fold_single_bit_test (location_t loc, enum tree_code code,
7262 tree arg0, tree arg1, tree result_type)
7263 {
7264 /* If this is testing a single bit, we can optimize the test. */
7265 if ((code == NE_EXPR || code == EQ_EXPR)
7266 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7267 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7268 {
7269 tree inner = TREE_OPERAND (arg0, 0);
7270 tree type = TREE_TYPE (arg0);
7271 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
7272 scalar_int_mode operand_mode = SCALAR_INT_TYPE_MODE (type);
7273 int ops_unsigned;
7274 tree signed_type, unsigned_type, intermediate_type;
7275 tree tem, one;
7276
7277 /* First, see if we can fold the single bit test into a sign-bit
7278 test. */
7279 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
7280 result_type);
7281 if (tem)
7282 return tem;
7283
7284 /* Otherwise we have (A & C) != 0 where C is a single bit,
7285 convert that into ((A >> C2) & 1). Where C2 = log2(C).
7286 Similarly for (A & C) == 0. */
7287
7288 /* If INNER is a right shift of a constant and it plus BITNUM does
7289 not overflow, adjust BITNUM and INNER. */
7290 if (TREE_CODE (inner) == RSHIFT_EXPR
7291 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
7292 && bitnum < TYPE_PRECISION (type)
7293 && wi::ltu_p (wi::to_wide (TREE_OPERAND (inner, 1)),
7294 TYPE_PRECISION (type) - bitnum))
7295 {
7296 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
7297 inner = TREE_OPERAND (inner, 0);
7298 }
7299
7300 /* If we are going to be able to omit the AND below, we must do our
7301 operations as unsigned. If we must use the AND, we have a choice.
7302 Normally unsigned is faster, but for some machines signed is. */
7303 ops_unsigned = (load_extend_op (operand_mode) == SIGN_EXTEND
7304 && !flag_syntax_only) ? 0 : 1;
7305
7306 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
7307 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
7308 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
7309 inner = fold_convert_loc (loc, intermediate_type, inner);
7310
7311 if (bitnum != 0)
7312 inner = build2 (RSHIFT_EXPR, intermediate_type,
7313 inner, size_int (bitnum));
7314
7315 one = build_int_cst (intermediate_type, 1);
7316
7317 if (code == EQ_EXPR)
7318 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
7319
7320 /* Put the AND last so it can combine with more things. */
7321 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
7322
7323 /* Make sure to return the proper type. */
7324 inner = fold_convert_loc (loc, result_type, inner);
7325
7326 return inner;
7327 }
7328 return NULL_TREE;
7329 }
7330
7331 /* Test whether it is preferable two swap two operands, ARG0 and
7332 ARG1, for example because ARG0 is an integer constant and ARG1
7333 isn't. */
7334
7335 bool
7336 tree_swap_operands_p (const_tree arg0, const_tree arg1)
7337 {
7338 if (CONSTANT_CLASS_P (arg1))
7339 return 0;
7340 if (CONSTANT_CLASS_P (arg0))
7341 return 1;
7342
7343 STRIP_NOPS (arg0);
7344 STRIP_NOPS (arg1);
7345
7346 if (TREE_CONSTANT (arg1))
7347 return 0;
7348 if (TREE_CONSTANT (arg0))
7349 return 1;
7350
7351 /* It is preferable to swap two SSA_NAME to ensure a canonical form
7352 for commutative and comparison operators. Ensuring a canonical
7353 form allows the optimizers to find additional redundancies without
7354 having to explicitly check for both orderings. */
7355 if (TREE_CODE (arg0) == SSA_NAME
7356 && TREE_CODE (arg1) == SSA_NAME
7357 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
7358 return 1;
7359
7360 /* Put SSA_NAMEs last. */
7361 if (TREE_CODE (arg1) == SSA_NAME)
7362 return 0;
7363 if (TREE_CODE (arg0) == SSA_NAME)
7364 return 1;
7365
7366 /* Put variables last. */
7367 if (DECL_P (arg1))
7368 return 0;
7369 if (DECL_P (arg0))
7370 return 1;
7371
7372 return 0;
7373 }
7374
7375
7376 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7377 means A >= Y && A != MAX, but in this case we know that
7378 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7379
7380 static tree
7381 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7382 {
7383 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7384
7385 if (TREE_CODE (bound) == LT_EXPR)
7386 a = TREE_OPERAND (bound, 0);
7387 else if (TREE_CODE (bound) == GT_EXPR)
7388 a = TREE_OPERAND (bound, 1);
7389 else
7390 return NULL_TREE;
7391
7392 typea = TREE_TYPE (a);
7393 if (!INTEGRAL_TYPE_P (typea)
7394 && !POINTER_TYPE_P (typea))
7395 return NULL_TREE;
7396
7397 if (TREE_CODE (ineq) == LT_EXPR)
7398 {
7399 a1 = TREE_OPERAND (ineq, 1);
7400 y = TREE_OPERAND (ineq, 0);
7401 }
7402 else if (TREE_CODE (ineq) == GT_EXPR)
7403 {
7404 a1 = TREE_OPERAND (ineq, 0);
7405 y = TREE_OPERAND (ineq, 1);
7406 }
7407 else
7408 return NULL_TREE;
7409
7410 if (TREE_TYPE (a1) != typea)
7411 return NULL_TREE;
7412
7413 if (POINTER_TYPE_P (typea))
7414 {
7415 /* Convert the pointer types into integer before taking the difference. */
7416 tree ta = fold_convert_loc (loc, ssizetype, a);
7417 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7418 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7419 }
7420 else
7421 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7422
7423 if (!diff || !integer_onep (diff))
7424 return NULL_TREE;
7425
7426 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7427 }
7428
7429 /* Fold a sum or difference of at least one multiplication.
7430 Returns the folded tree or NULL if no simplification could be made. */
7431
7432 static tree
7433 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7434 tree arg0, tree arg1)
7435 {
7436 tree arg00, arg01, arg10, arg11;
7437 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7438
7439 /* (A * C) +- (B * C) -> (A+-B) * C.
7440 (A * C) +- A -> A * (C+-1).
7441 We are most concerned about the case where C is a constant,
7442 but other combinations show up during loop reduction. Since
7443 it is not difficult, try all four possibilities. */
7444
7445 if (TREE_CODE (arg0) == MULT_EXPR)
7446 {
7447 arg00 = TREE_OPERAND (arg0, 0);
7448 arg01 = TREE_OPERAND (arg0, 1);
7449 }
7450 else if (TREE_CODE (arg0) == INTEGER_CST)
7451 {
7452 arg00 = build_one_cst (type);
7453 arg01 = arg0;
7454 }
7455 else
7456 {
7457 /* We cannot generate constant 1 for fract. */
7458 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7459 return NULL_TREE;
7460 arg00 = arg0;
7461 arg01 = build_one_cst (type);
7462 }
7463 if (TREE_CODE (arg1) == MULT_EXPR)
7464 {
7465 arg10 = TREE_OPERAND (arg1, 0);
7466 arg11 = TREE_OPERAND (arg1, 1);
7467 }
7468 else if (TREE_CODE (arg1) == INTEGER_CST)
7469 {
7470 arg10 = build_one_cst (type);
7471 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7472 the purpose of this canonicalization. */
7473 if (wi::neg_p (wi::to_wide (arg1), TYPE_SIGN (TREE_TYPE (arg1)))
7474 && negate_expr_p (arg1)
7475 && code == PLUS_EXPR)
7476 {
7477 arg11 = negate_expr (arg1);
7478 code = MINUS_EXPR;
7479 }
7480 else
7481 arg11 = arg1;
7482 }
7483 else
7484 {
7485 /* We cannot generate constant 1 for fract. */
7486 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7487 return NULL_TREE;
7488 arg10 = arg1;
7489 arg11 = build_one_cst (type);
7490 }
7491 same = NULL_TREE;
7492
7493 /* Prefer factoring a common non-constant. */
7494 if (operand_equal_p (arg00, arg10, 0))
7495 same = arg00, alt0 = arg01, alt1 = arg11;
7496 else if (operand_equal_p (arg01, arg11, 0))
7497 same = arg01, alt0 = arg00, alt1 = arg10;
7498 else if (operand_equal_p (arg00, arg11, 0))
7499 same = arg00, alt0 = arg01, alt1 = arg10;
7500 else if (operand_equal_p (arg01, arg10, 0))
7501 same = arg01, alt0 = arg00, alt1 = arg11;
7502
7503 /* No identical multiplicands; see if we can find a common
7504 power-of-two factor in non-power-of-two multiplies. This
7505 can help in multi-dimensional array access. */
7506 else if (tree_fits_shwi_p (arg01) && tree_fits_shwi_p (arg11))
7507 {
7508 HOST_WIDE_INT int01 = tree_to_shwi (arg01);
7509 HOST_WIDE_INT int11 = tree_to_shwi (arg11);
7510 HOST_WIDE_INT tmp;
7511 bool swap = false;
7512 tree maybe_same;
7513
7514 /* Move min of absolute values to int11. */
7515 if (absu_hwi (int01) < absu_hwi (int11))
7516 {
7517 tmp = int01, int01 = int11, int11 = tmp;
7518 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7519 maybe_same = arg01;
7520 swap = true;
7521 }
7522 else
7523 maybe_same = arg11;
7524
7525 const unsigned HOST_WIDE_INT factor = absu_hwi (int11);
7526 if (factor > 1
7527 && pow2p_hwi (factor)
7528 && (int01 & (factor - 1)) == 0
7529 /* The remainder should not be a constant, otherwise we
7530 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7531 increased the number of multiplications necessary. */
7532 && TREE_CODE (arg10) != INTEGER_CST)
7533 {
7534 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7535 build_int_cst (TREE_TYPE (arg00),
7536 int01 / int11));
7537 alt1 = arg10;
7538 same = maybe_same;
7539 if (swap)
7540 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7541 }
7542 }
7543
7544 if (!same)
7545 return NULL_TREE;
7546
7547 if (! ANY_INTEGRAL_TYPE_P (type)
7548 || TYPE_OVERFLOW_WRAPS (type)
7549 /* We are neither factoring zero nor minus one. */
7550 || TREE_CODE (same) == INTEGER_CST)
7551 return fold_build2_loc (loc, MULT_EXPR, type,
7552 fold_build2_loc (loc, code, type,
7553 fold_convert_loc (loc, type, alt0),
7554 fold_convert_loc (loc, type, alt1)),
7555 fold_convert_loc (loc, type, same));
7556
7557 /* Same may be zero and thus the operation 'code' may overflow. Likewise
7558 same may be minus one and thus the multiplication may overflow. Perform
7559 the sum operation in an unsigned type. */
7560 tree utype = unsigned_type_for (type);
7561 tree tem = fold_build2_loc (loc, code, utype,
7562 fold_convert_loc (loc, utype, alt0),
7563 fold_convert_loc (loc, utype, alt1));
7564 /* If the sum evaluated to a constant that is not -INF the multiplication
7565 cannot overflow. */
7566 if (TREE_CODE (tem) == INTEGER_CST
7567 && (wi::to_wide (tem)
7568 != wi::min_value (TYPE_PRECISION (utype), SIGNED)))
7569 return fold_build2_loc (loc, MULT_EXPR, type,
7570 fold_convert (type, tem), same);
7571
7572 /* Do not resort to unsigned multiplication because
7573 we lose the no-overflow property of the expression. */
7574 return NULL_TREE;
7575 }
7576
7577 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7578 specified by EXPR into the buffer PTR of length LEN bytes.
7579 Return the number of bytes placed in the buffer, or zero
7580 upon failure. */
7581
7582 static int
7583 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7584 {
7585 tree type = TREE_TYPE (expr);
7586 int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
7587 int byte, offset, word, words;
7588 unsigned char value;
7589
7590 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7591 return 0;
7592 if (off == -1)
7593 off = 0;
7594
7595 if (ptr == NULL)
7596 /* Dry run. */
7597 return MIN (len, total_bytes - off);
7598
7599 words = total_bytes / UNITS_PER_WORD;
7600
7601 for (byte = 0; byte < total_bytes; byte++)
7602 {
7603 int bitpos = byte * BITS_PER_UNIT;
7604 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7605 number of bytes. */
7606 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7607
7608 if (total_bytes > UNITS_PER_WORD)
7609 {
7610 word = byte / UNITS_PER_WORD;
7611 if (WORDS_BIG_ENDIAN)
7612 word = (words - 1) - word;
7613 offset = word * UNITS_PER_WORD;
7614 if (BYTES_BIG_ENDIAN)
7615 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7616 else
7617 offset += byte % UNITS_PER_WORD;
7618 }
7619 else
7620 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7621 if (offset >= off && offset - off < len)
7622 ptr[offset - off] = value;
7623 }
7624 return MIN (len, total_bytes - off);
7625 }
7626
7627
7628 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7629 specified by EXPR into the buffer PTR of length LEN bytes.
7630 Return the number of bytes placed in the buffer, or zero
7631 upon failure. */
7632
7633 static int
7634 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7635 {
7636 tree type = TREE_TYPE (expr);
7637 scalar_mode mode = SCALAR_TYPE_MODE (type);
7638 int total_bytes = GET_MODE_SIZE (mode);
7639 FIXED_VALUE_TYPE value;
7640 tree i_value, i_type;
7641
7642 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7643 return 0;
7644
7645 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7646
7647 if (NULL_TREE == i_type || TYPE_PRECISION (i_type) != total_bytes)
7648 return 0;
7649
7650 value = TREE_FIXED_CST (expr);
7651 i_value = double_int_to_tree (i_type, value.data);
7652
7653 return native_encode_int (i_value, ptr, len, off);
7654 }
7655
7656
7657 /* Subroutine of native_encode_expr. Encode the REAL_CST
7658 specified by EXPR into the buffer PTR of length LEN bytes.
7659 Return the number of bytes placed in the buffer, or zero
7660 upon failure. */
7661
7662 static int
7663 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7664 {
7665 tree type = TREE_TYPE (expr);
7666 int total_bytes = GET_MODE_SIZE (SCALAR_FLOAT_TYPE_MODE (type));
7667 int byte, offset, word, words, bitpos;
7668 unsigned char value;
7669
7670 /* There are always 32 bits in each long, no matter the size of
7671 the hosts long. We handle floating point representations with
7672 up to 192 bits. */
7673 long tmp[6];
7674
7675 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7676 return 0;
7677 if (off == -1)
7678 off = 0;
7679
7680 if (ptr == NULL)
7681 /* Dry run. */
7682 return MIN (len, total_bytes - off);
7683
7684 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7685
7686 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7687
7688 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7689 bitpos += BITS_PER_UNIT)
7690 {
7691 byte = (bitpos / BITS_PER_UNIT) & 3;
7692 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7693
7694 if (UNITS_PER_WORD < 4)
7695 {
7696 word = byte / UNITS_PER_WORD;
7697 if (WORDS_BIG_ENDIAN)
7698 word = (words - 1) - word;
7699 offset = word * UNITS_PER_WORD;
7700 if (BYTES_BIG_ENDIAN)
7701 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7702 else
7703 offset += byte % UNITS_PER_WORD;
7704 }
7705 else
7706 {
7707 offset = byte;
7708 if (BYTES_BIG_ENDIAN)
7709 {
7710 /* Reverse bytes within each long, or within the entire float
7711 if it's smaller than a long (for HFmode). */
7712 offset = MIN (3, total_bytes - 1) - offset;
7713 gcc_assert (offset >= 0);
7714 }
7715 }
7716 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7717 if (offset >= off
7718 && offset - off < len)
7719 ptr[offset - off] = value;
7720 }
7721 return MIN (len, total_bytes - off);
7722 }
7723
7724 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7725 specified by EXPR into the buffer PTR of length LEN bytes.
7726 Return the number of bytes placed in the buffer, or zero
7727 upon failure. */
7728
7729 static int
7730 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7731 {
7732 int rsize, isize;
7733 tree part;
7734
7735 part = TREE_REALPART (expr);
7736 rsize = native_encode_expr (part, ptr, len, off);
7737 if (off == -1 && rsize == 0)
7738 return 0;
7739 part = TREE_IMAGPART (expr);
7740 if (off != -1)
7741 off = MAX (0, off - GET_MODE_SIZE (SCALAR_TYPE_MODE (TREE_TYPE (part))));
7742 isize = native_encode_expr (part, ptr ? ptr + rsize : NULL,
7743 len - rsize, off);
7744 if (off == -1 && isize != rsize)
7745 return 0;
7746 return rsize + isize;
7747 }
7748
7749 /* Like native_encode_vector, but only encode the first COUNT elements.
7750 The other arguments are as for native_encode_vector. */
7751
7752 static int
7753 native_encode_vector_part (const_tree expr, unsigned char *ptr, int len,
7754 int off, unsigned HOST_WIDE_INT count)
7755 {
7756 tree itype = TREE_TYPE (TREE_TYPE (expr));
7757 if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (expr))
7758 && TYPE_PRECISION (itype) <= BITS_PER_UNIT)
7759 {
7760 /* This is the only case in which elements can be smaller than a byte.
7761 Element 0 is always in the lsb of the containing byte. */
7762 unsigned int elt_bits = TYPE_PRECISION (itype);
7763 int total_bytes = CEIL (elt_bits * count, BITS_PER_UNIT);
7764 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7765 return 0;
7766
7767 if (off == -1)
7768 off = 0;
7769
7770 /* Zero the buffer and then set bits later where necessary. */
7771 int extract_bytes = MIN (len, total_bytes - off);
7772 if (ptr)
7773 memset (ptr, 0, extract_bytes);
7774
7775 unsigned int elts_per_byte = BITS_PER_UNIT / elt_bits;
7776 unsigned int first_elt = off * elts_per_byte;
7777 unsigned int extract_elts = extract_bytes * elts_per_byte;
7778 for (unsigned int i = 0; i < extract_elts; ++i)
7779 {
7780 tree elt = VECTOR_CST_ELT (expr, first_elt + i);
7781 if (TREE_CODE (elt) != INTEGER_CST)
7782 return 0;
7783
7784 if (ptr && wi::extract_uhwi (wi::to_wide (elt), 0, 1))
7785 {
7786 unsigned int bit = i * elt_bits;
7787 ptr[bit / BITS_PER_UNIT] |= 1 << (bit % BITS_PER_UNIT);
7788 }
7789 }
7790 return extract_bytes;
7791 }
7792
7793 int offset = 0;
7794 int size = GET_MODE_SIZE (SCALAR_TYPE_MODE (itype));
7795 for (unsigned HOST_WIDE_INT i = 0; i < count; i++)
7796 {
7797 if (off >= size)
7798 {
7799 off -= size;
7800 continue;
7801 }
7802 tree elem = VECTOR_CST_ELT (expr, i);
7803 int res = native_encode_expr (elem, ptr ? ptr + offset : NULL,
7804 len - offset, off);
7805 if ((off == -1 && res != size) || res == 0)
7806 return 0;
7807 offset += res;
7808 if (offset >= len)
7809 return (off == -1 && i < count - 1) ? 0 : offset;
7810 if (off != -1)
7811 off = 0;
7812 }
7813 return offset;
7814 }
7815
7816 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7817 specified by EXPR into the buffer PTR of length LEN bytes.
7818 Return the number of bytes placed in the buffer, or zero
7819 upon failure. */
7820
7821 static int
7822 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7823 {
7824 unsigned HOST_WIDE_INT count;
7825 if (!VECTOR_CST_NELTS (expr).is_constant (&count))
7826 return 0;
7827 return native_encode_vector_part (expr, ptr, len, off, count);
7828 }
7829
7830
7831 /* Subroutine of native_encode_expr. Encode the STRING_CST
7832 specified by EXPR into the buffer PTR of length LEN bytes.
7833 Return the number of bytes placed in the buffer, or zero
7834 upon failure. */
7835
7836 static int
7837 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7838 {
7839 tree type = TREE_TYPE (expr);
7840
7841 /* Wide-char strings are encoded in target byte-order so native
7842 encoding them is trivial. */
7843 if (BITS_PER_UNIT != CHAR_BIT
7844 || TREE_CODE (type) != ARRAY_TYPE
7845 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7846 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7847 return 0;
7848
7849 HOST_WIDE_INT total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
7850 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7851 return 0;
7852 if (off == -1)
7853 off = 0;
7854 len = MIN (total_bytes - off, len);
7855 if (ptr == NULL)
7856 /* Dry run. */;
7857 else
7858 {
7859 int written = 0;
7860 if (off < TREE_STRING_LENGTH (expr))
7861 {
7862 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7863 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7864 }
7865 memset (ptr + written, 0, len - written);
7866 }
7867 return len;
7868 }
7869
7870
7871 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7872 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7873 buffer PTR of length LEN bytes. If PTR is NULL, don't actually store
7874 anything, just do a dry run. If OFF is not -1 then start
7875 the encoding at byte offset OFF and encode at most LEN bytes.
7876 Return the number of bytes placed in the buffer, or zero upon failure. */
7877
7878 int
7879 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7880 {
7881 /* We don't support starting at negative offset and -1 is special. */
7882 if (off < -1)
7883 return 0;
7884
7885 switch (TREE_CODE (expr))
7886 {
7887 case INTEGER_CST:
7888 return native_encode_int (expr, ptr, len, off);
7889
7890 case REAL_CST:
7891 return native_encode_real (expr, ptr, len, off);
7892
7893 case FIXED_CST:
7894 return native_encode_fixed (expr, ptr, len, off);
7895
7896 case COMPLEX_CST:
7897 return native_encode_complex (expr, ptr, len, off);
7898
7899 case VECTOR_CST:
7900 return native_encode_vector (expr, ptr, len, off);
7901
7902 case STRING_CST:
7903 return native_encode_string (expr, ptr, len, off);
7904
7905 default:
7906 return 0;
7907 }
7908 }
7909
7910 /* Similar to native_encode_expr, but also handle CONSTRUCTORs, VCEs,
7911 NON_LVALUE_EXPRs and nops. */
7912
7913 int
7914 native_encode_initializer (tree init, unsigned char *ptr, int len,
7915 int off)
7916 {
7917 /* We don't support starting at negative offset and -1 is special. */
7918 if (off < -1 || init == NULL_TREE)
7919 return 0;
7920
7921 STRIP_NOPS (init);
7922 switch (TREE_CODE (init))
7923 {
7924 case VIEW_CONVERT_EXPR:
7925 case NON_LVALUE_EXPR:
7926 return native_encode_initializer (TREE_OPERAND (init, 0), ptr, len, off);
7927 default:
7928 return native_encode_expr (init, ptr, len, off);
7929 case CONSTRUCTOR:
7930 tree type = TREE_TYPE (init);
7931 HOST_WIDE_INT total_bytes = int_size_in_bytes (type);
7932 if (total_bytes < 0)
7933 return 0;
7934 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7935 return 0;
7936 int o = off == -1 ? 0 : off;
7937 if (TREE_CODE (type) == ARRAY_TYPE)
7938 {
7939 HOST_WIDE_INT min_index;
7940 unsigned HOST_WIDE_INT cnt;
7941 HOST_WIDE_INT curpos = 0, fieldsize;
7942 constructor_elt *ce;
7943
7944 if (TYPE_DOMAIN (type) == NULL_TREE
7945 || !tree_fits_shwi_p (TYPE_MIN_VALUE (TYPE_DOMAIN (type))))
7946 return 0;
7947
7948 fieldsize = int_size_in_bytes (TREE_TYPE (type));
7949 if (fieldsize <= 0)
7950 return 0;
7951
7952 min_index = tree_to_shwi (TYPE_MIN_VALUE (TYPE_DOMAIN (type)));
7953 if (ptr != NULL)
7954 memset (ptr, '\0', MIN (total_bytes - off, len));
7955
7956 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
7957 {
7958 tree val = ce->value;
7959 tree index = ce->index;
7960 HOST_WIDE_INT pos = curpos, count = 0;
7961 bool full = false;
7962 if (index && TREE_CODE (index) == RANGE_EXPR)
7963 {
7964 if (!tree_fits_shwi_p (TREE_OPERAND (index, 0))
7965 || !tree_fits_shwi_p (TREE_OPERAND (index, 1)))
7966 return 0;
7967 pos = (tree_to_shwi (TREE_OPERAND (index, 0)) - min_index)
7968 * fieldsize;
7969 count = (tree_to_shwi (TREE_OPERAND (index, 1))
7970 - tree_to_shwi (TREE_OPERAND (index, 0)));
7971 }
7972 else if (index)
7973 {
7974 if (!tree_fits_shwi_p (index))
7975 return 0;
7976 pos = (tree_to_shwi (index) - min_index) * fieldsize;
7977 }
7978
7979 curpos = pos;
7980 if (val)
7981 do
7982 {
7983 if (off == -1
7984 || (curpos >= off
7985 && (curpos + fieldsize
7986 <= (HOST_WIDE_INT) off + len)))
7987 {
7988 if (full)
7989 {
7990 if (ptr)
7991 memcpy (ptr + (curpos - o), ptr + (pos - o),
7992 fieldsize);
7993 }
7994 else if (!native_encode_initializer (val,
7995 ptr
7996 ? ptr + curpos - o
7997 : NULL,
7998 fieldsize,
7999 off == -1 ? -1
8000 : 0))
8001 return 0;
8002 else
8003 {
8004 full = true;
8005 pos = curpos;
8006 }
8007 }
8008 else if (curpos + fieldsize > off
8009 && curpos < (HOST_WIDE_INT) off + len)
8010 {
8011 /* Partial overlap. */
8012 unsigned char *p = NULL;
8013 int no = 0;
8014 int l;
8015 if (curpos >= off)
8016 {
8017 if (ptr)
8018 p = ptr + curpos - off;
8019 l = MIN ((HOST_WIDE_INT) off + len - curpos,
8020 fieldsize);
8021 }
8022 else
8023 {
8024 p = ptr;
8025 no = off - curpos;
8026 l = len;
8027 }
8028 if (!native_encode_initializer (val, p, l, no))
8029 return 0;
8030 }
8031 curpos += fieldsize;
8032 }
8033 while (count-- != 0);
8034 }
8035 return MIN (total_bytes - off, len);
8036 }
8037 else if (TREE_CODE (type) == RECORD_TYPE
8038 || TREE_CODE (type) == UNION_TYPE)
8039 {
8040 unsigned HOST_WIDE_INT cnt;
8041 constructor_elt *ce;
8042
8043 if (ptr != NULL)
8044 memset (ptr, '\0', MIN (total_bytes - off, len));
8045 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
8046 {
8047 tree field = ce->index;
8048 tree val = ce->value;
8049 HOST_WIDE_INT pos, fieldsize;
8050 unsigned HOST_WIDE_INT bpos = 0, epos = 0;
8051
8052 if (field == NULL_TREE)
8053 return 0;
8054
8055 pos = int_byte_position (field);
8056 if (off != -1 && (HOST_WIDE_INT) off + len <= pos)
8057 continue;
8058
8059 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
8060 && TYPE_DOMAIN (TREE_TYPE (field))
8061 && ! TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (field))))
8062 return 0;
8063 if (DECL_SIZE_UNIT (field) == NULL_TREE
8064 || !tree_fits_shwi_p (DECL_SIZE_UNIT (field)))
8065 return 0;
8066 fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
8067 if (fieldsize == 0)
8068 continue;
8069
8070 if (DECL_BIT_FIELD (field))
8071 {
8072 if (!tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (field)))
8073 return 0;
8074 fieldsize = TYPE_PRECISION (TREE_TYPE (field));
8075 bpos = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
8076 if (bpos % BITS_PER_UNIT)
8077 bpos %= BITS_PER_UNIT;
8078 else
8079 bpos = 0;
8080 fieldsize += bpos;
8081 epos = fieldsize % BITS_PER_UNIT;
8082 fieldsize += BITS_PER_UNIT - 1;
8083 fieldsize /= BITS_PER_UNIT;
8084 }
8085
8086 if (off != -1 && pos + fieldsize <= off)
8087 continue;
8088
8089 if (val == NULL_TREE)
8090 continue;
8091
8092 if (DECL_BIT_FIELD (field))
8093 {
8094 /* FIXME: Handle PDP endian. */
8095 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
8096 return 0;
8097
8098 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
8099 if (repr == NULL_TREE
8100 || TREE_CODE (val) != INTEGER_CST
8101 || !INTEGRAL_TYPE_P (TREE_TYPE (repr)))
8102 return 0;
8103
8104 HOST_WIDE_INT rpos = int_byte_position (repr);
8105 if (rpos > pos)
8106 return 0;
8107 wide_int w = wi::to_wide (val,
8108 TYPE_PRECISION (TREE_TYPE (repr)));
8109 int diff = (TYPE_PRECISION (TREE_TYPE (repr))
8110 - TYPE_PRECISION (TREE_TYPE (field)));
8111 HOST_WIDE_INT bitoff = (pos - rpos) * BITS_PER_UNIT + bpos;
8112 if (!BYTES_BIG_ENDIAN)
8113 w = wi::lshift (w, bitoff);
8114 else
8115 w = wi::lshift (w, diff - bitoff);
8116 val = wide_int_to_tree (TREE_TYPE (repr), w);
8117
8118 unsigned char buf[MAX_BITSIZE_MODE_ANY_INT
8119 / BITS_PER_UNIT + 1];
8120 int l = native_encode_int (val, buf, sizeof buf, 0);
8121 if (l * BITS_PER_UNIT != TYPE_PRECISION (TREE_TYPE (repr)))
8122 return 0;
8123
8124 if (ptr == NULL)
8125 continue;
8126
8127 /* If the bitfield does not start at byte boundary, handle
8128 the partial byte at the start. */
8129 if (bpos
8130 && (off == -1 || (pos >= off && len >= 1)))
8131 {
8132 if (!BYTES_BIG_ENDIAN)
8133 {
8134 int mask = (1 << bpos) - 1;
8135 buf[pos - rpos] &= ~mask;
8136 buf[pos - rpos] |= ptr[pos - o] & mask;
8137 }
8138 else
8139 {
8140 int mask = (1 << (BITS_PER_UNIT - bpos)) - 1;
8141 buf[pos - rpos] &= mask;
8142 buf[pos - rpos] |= ptr[pos - o] & ~mask;
8143 }
8144 }
8145 /* If the bitfield does not end at byte boundary, handle
8146 the partial byte at the end. */
8147 if (epos
8148 && (off == -1
8149 || pos + fieldsize <= (HOST_WIDE_INT) off + len))
8150 {
8151 if (!BYTES_BIG_ENDIAN)
8152 {
8153 int mask = (1 << epos) - 1;
8154 buf[pos - rpos + fieldsize - 1] &= mask;
8155 buf[pos - rpos + fieldsize - 1]
8156 |= ptr[pos + fieldsize - 1 - o] & ~mask;
8157 }
8158 else
8159 {
8160 int mask = (1 << (BITS_PER_UNIT - epos)) - 1;
8161 buf[pos - rpos + fieldsize - 1] &= ~mask;
8162 buf[pos - rpos + fieldsize - 1]
8163 |= ptr[pos + fieldsize - 1 - o] & mask;
8164 }
8165 }
8166 if (off == -1
8167 || (pos >= off
8168 && (pos + fieldsize <= (HOST_WIDE_INT) off + len)))
8169 memcpy (ptr + pos - o, buf + (pos - rpos), fieldsize);
8170 else
8171 {
8172 /* Partial overlap. */
8173 HOST_WIDE_INT fsz = fieldsize;
8174 if (pos < off)
8175 {
8176 fsz -= (off - pos);
8177 pos = off;
8178 }
8179 if (pos + fsz > (HOST_WIDE_INT) off + len)
8180 fsz = (HOST_WIDE_INT) off + len - pos;
8181 memcpy (ptr + pos - off, buf + (pos - rpos), fsz);
8182 }
8183 continue;
8184 }
8185
8186 if (off == -1
8187 || (pos >= off
8188 && (pos + fieldsize <= (HOST_WIDE_INT) off + len)))
8189 {
8190 if (!native_encode_initializer (val, ptr ? ptr + pos - o
8191 : NULL,
8192 fieldsize,
8193 off == -1 ? -1 : 0))
8194 return 0;
8195 }
8196 else
8197 {
8198 /* Partial overlap. */
8199 unsigned char *p = NULL;
8200 int no = 0;
8201 int l;
8202 if (pos >= off)
8203 {
8204 if (ptr)
8205 p = ptr + pos - off;
8206 l = MIN ((HOST_WIDE_INT) off + len - pos,
8207 fieldsize);
8208 }
8209 else
8210 {
8211 p = ptr;
8212 no = off - pos;
8213 l = len;
8214 }
8215 if (!native_encode_initializer (val, p, l, no))
8216 return 0;
8217 }
8218 }
8219 return MIN (total_bytes - off, len);
8220 }
8221 return 0;
8222 }
8223 }
8224
8225
8226 /* Subroutine of native_interpret_expr. Interpret the contents of
8227 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
8228 If the buffer cannot be interpreted, return NULL_TREE. */
8229
8230 static tree
8231 native_interpret_int (tree type, const unsigned char *ptr, int len)
8232 {
8233 int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
8234
8235 if (total_bytes > len
8236 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
8237 return NULL_TREE;
8238
8239 wide_int result = wi::from_buffer (ptr, total_bytes);
8240
8241 return wide_int_to_tree (type, result);
8242 }
8243
8244
8245 /* Subroutine of native_interpret_expr. Interpret the contents of
8246 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
8247 If the buffer cannot be interpreted, return NULL_TREE. */
8248
8249 static tree
8250 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
8251 {
8252 scalar_mode mode = SCALAR_TYPE_MODE (type);
8253 int total_bytes = GET_MODE_SIZE (mode);
8254 double_int result;
8255 FIXED_VALUE_TYPE fixed_value;
8256
8257 if (total_bytes > len
8258 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
8259 return NULL_TREE;
8260
8261 result = double_int::from_buffer (ptr, total_bytes);
8262 fixed_value = fixed_from_double_int (result, mode);
8263
8264 return build_fixed (type, fixed_value);
8265 }
8266
8267
8268 /* Subroutine of native_interpret_expr. Interpret the contents of
8269 the buffer PTR of length LEN as a REAL_CST of type TYPE.
8270 If the buffer cannot be interpreted, return NULL_TREE. */
8271
8272 static tree
8273 native_interpret_real (tree type, const unsigned char *ptr, int len)
8274 {
8275 scalar_float_mode mode = SCALAR_FLOAT_TYPE_MODE (type);
8276 int total_bytes = GET_MODE_SIZE (mode);
8277 unsigned char value;
8278 /* There are always 32 bits in each long, no matter the size of
8279 the hosts long. We handle floating point representations with
8280 up to 192 bits. */
8281 REAL_VALUE_TYPE r;
8282 long tmp[6];
8283
8284 if (total_bytes > len || total_bytes > 24)
8285 return NULL_TREE;
8286 int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
8287
8288 memset (tmp, 0, sizeof (tmp));
8289 for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
8290 bitpos += BITS_PER_UNIT)
8291 {
8292 /* Both OFFSET and BYTE index within a long;
8293 bitpos indexes the whole float. */
8294 int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
8295 if (UNITS_PER_WORD < 4)
8296 {
8297 int word = byte / UNITS_PER_WORD;
8298 if (WORDS_BIG_ENDIAN)
8299 word = (words - 1) - word;
8300 offset = word * UNITS_PER_WORD;
8301 if (BYTES_BIG_ENDIAN)
8302 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
8303 else
8304 offset += byte % UNITS_PER_WORD;
8305 }
8306 else
8307 {
8308 offset = byte;
8309 if (BYTES_BIG_ENDIAN)
8310 {
8311 /* Reverse bytes within each long, or within the entire float
8312 if it's smaller than a long (for HFmode). */
8313 offset = MIN (3, total_bytes - 1) - offset;
8314 gcc_assert (offset >= 0);
8315 }
8316 }
8317 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
8318
8319 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
8320 }
8321
8322 real_from_target (&r, tmp, mode);
8323 return build_real (type, r);
8324 }
8325
8326
8327 /* Subroutine of native_interpret_expr. Interpret the contents of
8328 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
8329 If the buffer cannot be interpreted, return NULL_TREE. */
8330
8331 static tree
8332 native_interpret_complex (tree type, const unsigned char *ptr, int len)
8333 {
8334 tree etype, rpart, ipart;
8335 int size;
8336
8337 etype = TREE_TYPE (type);
8338 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
8339 if (size * 2 > len)
8340 return NULL_TREE;
8341 rpart = native_interpret_expr (etype, ptr, size);
8342 if (!rpart)
8343 return NULL_TREE;
8344 ipart = native_interpret_expr (etype, ptr+size, size);
8345 if (!ipart)
8346 return NULL_TREE;
8347 return build_complex (type, rpart, ipart);
8348 }
8349
8350 /* Read a vector of type TYPE from the target memory image given by BYTES,
8351 which contains LEN bytes. The vector is known to be encodable using
8352 NPATTERNS interleaved patterns with NELTS_PER_PATTERN elements each.
8353
8354 Return the vector on success, otherwise return null. */
8355
8356 static tree
8357 native_interpret_vector_part (tree type, const unsigned char *bytes,
8358 unsigned int len, unsigned int npatterns,
8359 unsigned int nelts_per_pattern)
8360 {
8361 tree elt_type = TREE_TYPE (type);
8362 if (VECTOR_BOOLEAN_TYPE_P (type)
8363 && TYPE_PRECISION (elt_type) <= BITS_PER_UNIT)
8364 {
8365 /* This is the only case in which elements can be smaller than a byte.
8366 Element 0 is always in the lsb of the containing byte. */
8367 unsigned int elt_bits = TYPE_PRECISION (elt_type);
8368 if (elt_bits * npatterns * nelts_per_pattern > len * BITS_PER_UNIT)
8369 return NULL_TREE;
8370
8371 tree_vector_builder builder (type, npatterns, nelts_per_pattern);
8372 for (unsigned int i = 0; i < builder.encoded_nelts (); ++i)
8373 {
8374 unsigned int bit_index = i * elt_bits;
8375 unsigned int byte_index = bit_index / BITS_PER_UNIT;
8376 unsigned int lsb = bit_index % BITS_PER_UNIT;
8377 builder.quick_push (bytes[byte_index] & (1 << lsb)
8378 ? build_all_ones_cst (elt_type)
8379 : build_zero_cst (elt_type));
8380 }
8381 return builder.build ();
8382 }
8383
8384 unsigned int elt_bytes = tree_to_uhwi (TYPE_SIZE_UNIT (elt_type));
8385 if (elt_bytes * npatterns * nelts_per_pattern > len)
8386 return NULL_TREE;
8387
8388 tree_vector_builder builder (type, npatterns, nelts_per_pattern);
8389 for (unsigned int i = 0; i < builder.encoded_nelts (); ++i)
8390 {
8391 tree elt = native_interpret_expr (elt_type, bytes, elt_bytes);
8392 if (!elt)
8393 return NULL_TREE;
8394 builder.quick_push (elt);
8395 bytes += elt_bytes;
8396 }
8397 return builder.build ();
8398 }
8399
8400 /* Subroutine of native_interpret_expr. Interpret the contents of
8401 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
8402 If the buffer cannot be interpreted, return NULL_TREE. */
8403
8404 static tree
8405 native_interpret_vector (tree type, const unsigned char *ptr, unsigned int len)
8406 {
8407 tree etype;
8408 unsigned int size;
8409 unsigned HOST_WIDE_INT count;
8410
8411 etype = TREE_TYPE (type);
8412 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
8413 if (!TYPE_VECTOR_SUBPARTS (type).is_constant (&count)
8414 || size * count > len)
8415 return NULL_TREE;
8416
8417 return native_interpret_vector_part (type, ptr, len, count, 1);
8418 }
8419
8420
8421 /* Subroutine of fold_view_convert_expr. Interpret the contents of
8422 the buffer PTR of length LEN as a constant of type TYPE. For
8423 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
8424 we return a REAL_CST, etc... If the buffer cannot be interpreted,
8425 return NULL_TREE. */
8426
8427 tree
8428 native_interpret_expr (tree type, const unsigned char *ptr, int len)
8429 {
8430 switch (TREE_CODE (type))
8431 {
8432 case INTEGER_TYPE:
8433 case ENUMERAL_TYPE:
8434 case BOOLEAN_TYPE:
8435 case POINTER_TYPE:
8436 case REFERENCE_TYPE:
8437 return native_interpret_int (type, ptr, len);
8438
8439 case REAL_TYPE:
8440 return native_interpret_real (type, ptr, len);
8441
8442 case FIXED_POINT_TYPE:
8443 return native_interpret_fixed (type, ptr, len);
8444
8445 case COMPLEX_TYPE:
8446 return native_interpret_complex (type, ptr, len);
8447
8448 case VECTOR_TYPE:
8449 return native_interpret_vector (type, ptr, len);
8450
8451 default:
8452 return NULL_TREE;
8453 }
8454 }
8455
8456 /* Returns true if we can interpret the contents of a native encoding
8457 as TYPE. */
8458
8459 bool
8460 can_native_interpret_type_p (tree type)
8461 {
8462 switch (TREE_CODE (type))
8463 {
8464 case INTEGER_TYPE:
8465 case ENUMERAL_TYPE:
8466 case BOOLEAN_TYPE:
8467 case POINTER_TYPE:
8468 case REFERENCE_TYPE:
8469 case FIXED_POINT_TYPE:
8470 case REAL_TYPE:
8471 case COMPLEX_TYPE:
8472 case VECTOR_TYPE:
8473 return true;
8474 default:
8475 return false;
8476 }
8477 }
8478
8479 /* Routines for manipulation of native_encode_expr encoded data if the encoded
8480 or extracted constant positions and/or sizes aren't byte aligned. */
8481
8482 /* Shift left the bytes in PTR of SZ elements by AMNT bits, carrying over the
8483 bits between adjacent elements. AMNT should be within
8484 [0, BITS_PER_UNIT).
8485 Example, AMNT = 2:
8486 00011111|11100000 << 2 = 01111111|10000000
8487 PTR[1] | PTR[0] PTR[1] | PTR[0]. */
8488
8489 void
8490 shift_bytes_in_array_left (unsigned char *ptr, unsigned int sz,
8491 unsigned int amnt)
8492 {
8493 if (amnt == 0)
8494 return;
8495
8496 unsigned char carry_over = 0U;
8497 unsigned char carry_mask = (~0U) << (unsigned char) (BITS_PER_UNIT - amnt);
8498 unsigned char clear_mask = (~0U) << amnt;
8499
8500 for (unsigned int i = 0; i < sz; i++)
8501 {
8502 unsigned prev_carry_over = carry_over;
8503 carry_over = (ptr[i] & carry_mask) >> (BITS_PER_UNIT - amnt);
8504
8505 ptr[i] <<= amnt;
8506 if (i != 0)
8507 {
8508 ptr[i] &= clear_mask;
8509 ptr[i] |= prev_carry_over;
8510 }
8511 }
8512 }
8513
8514 /* Like shift_bytes_in_array_left but for big-endian.
8515 Shift right the bytes in PTR of SZ elements by AMNT bits, carrying over the
8516 bits between adjacent elements. AMNT should be within
8517 [0, BITS_PER_UNIT).
8518 Example, AMNT = 2:
8519 00011111|11100000 >> 2 = 00000111|11111000
8520 PTR[0] | PTR[1] PTR[0] | PTR[1]. */
8521
8522 void
8523 shift_bytes_in_array_right (unsigned char *ptr, unsigned int sz,
8524 unsigned int amnt)
8525 {
8526 if (amnt == 0)
8527 return;
8528
8529 unsigned char carry_over = 0U;
8530 unsigned char carry_mask = ~(~0U << amnt);
8531
8532 for (unsigned int i = 0; i < sz; i++)
8533 {
8534 unsigned prev_carry_over = carry_over;
8535 carry_over = ptr[i] & carry_mask;
8536
8537 carry_over <<= (unsigned char) BITS_PER_UNIT - amnt;
8538 ptr[i] >>= amnt;
8539 ptr[i] |= prev_carry_over;
8540 }
8541 }
8542
8543 /* Try to view-convert VECTOR_CST EXPR to VECTOR_TYPE TYPE by operating
8544 directly on the VECTOR_CST encoding, in a way that works for variable-
8545 length vectors. Return the resulting VECTOR_CST on success or null
8546 on failure. */
8547
8548 static tree
8549 fold_view_convert_vector_encoding (tree type, tree expr)
8550 {
8551 tree expr_type = TREE_TYPE (expr);
8552 poly_uint64 type_bits, expr_bits;
8553 if (!poly_int_tree_p (TYPE_SIZE (type), &type_bits)
8554 || !poly_int_tree_p (TYPE_SIZE (expr_type), &expr_bits))
8555 return NULL_TREE;
8556
8557 poly_uint64 type_units = TYPE_VECTOR_SUBPARTS (type);
8558 poly_uint64 expr_units = TYPE_VECTOR_SUBPARTS (expr_type);
8559 unsigned int type_elt_bits = vector_element_size (type_bits, type_units);
8560 unsigned int expr_elt_bits = vector_element_size (expr_bits, expr_units);
8561
8562 /* We can only preserve the semantics of a stepped pattern if the new
8563 vector element is an integer of the same size. */
8564 if (VECTOR_CST_STEPPED_P (expr)
8565 && (!INTEGRAL_TYPE_P (type) || type_elt_bits != expr_elt_bits))
8566 return NULL_TREE;
8567
8568 /* The number of bits needed to encode one element from every pattern
8569 of the original vector. */
8570 unsigned int expr_sequence_bits
8571 = VECTOR_CST_NPATTERNS (expr) * expr_elt_bits;
8572
8573 /* The number of bits needed to encode one element from every pattern
8574 of the result. */
8575 unsigned int type_sequence_bits
8576 = least_common_multiple (expr_sequence_bits, type_elt_bits);
8577
8578 /* Don't try to read more bytes than are available, which can happen
8579 for constant-sized vectors if TYPE has larger elements than EXPR_TYPE.
8580 The general VIEW_CONVERT handling can cope with that case, so there's
8581 no point complicating things here. */
8582 unsigned int nelts_per_pattern = VECTOR_CST_NELTS_PER_PATTERN (expr);
8583 unsigned int buffer_bytes = CEIL (nelts_per_pattern * type_sequence_bits,
8584 BITS_PER_UNIT);
8585 unsigned int buffer_bits = buffer_bytes * BITS_PER_UNIT;
8586 if (known_gt (buffer_bits, expr_bits))
8587 return NULL_TREE;
8588
8589 /* Get enough bytes of EXPR to form the new encoding. */
8590 auto_vec<unsigned char, 128> buffer (buffer_bytes);
8591 buffer.quick_grow (buffer_bytes);
8592 if (native_encode_vector_part (expr, buffer.address (), buffer_bytes, 0,
8593 buffer_bits / expr_elt_bits)
8594 != (int) buffer_bytes)
8595 return NULL_TREE;
8596
8597 /* Reencode the bytes as TYPE. */
8598 unsigned int type_npatterns = type_sequence_bits / type_elt_bits;
8599 return native_interpret_vector_part (type, &buffer[0], buffer.length (),
8600 type_npatterns, nelts_per_pattern);
8601 }
8602
8603 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
8604 TYPE at compile-time. If we're unable to perform the conversion
8605 return NULL_TREE. */
8606
8607 static tree
8608 fold_view_convert_expr (tree type, tree expr)
8609 {
8610 /* We support up to 512-bit values (for V8DFmode). */
8611 unsigned char buffer[64];
8612 int len;
8613
8614 /* Check that the host and target are sane. */
8615 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
8616 return NULL_TREE;
8617
8618 if (VECTOR_TYPE_P (type) && TREE_CODE (expr) == VECTOR_CST)
8619 if (tree res = fold_view_convert_vector_encoding (type, expr))
8620 return res;
8621
8622 len = native_encode_expr (expr, buffer, sizeof (buffer));
8623 if (len == 0)
8624 return NULL_TREE;
8625
8626 return native_interpret_expr (type, buffer, len);
8627 }
8628
8629 /* Build an expression for the address of T. Folds away INDIRECT_REF
8630 to avoid confusing the gimplify process. */
8631
8632 tree
8633 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
8634 {
8635 /* The size of the object is not relevant when talking about its address. */
8636 if (TREE_CODE (t) == WITH_SIZE_EXPR)
8637 t = TREE_OPERAND (t, 0);
8638
8639 if (TREE_CODE (t) == INDIRECT_REF)
8640 {
8641 t = TREE_OPERAND (t, 0);
8642
8643 if (TREE_TYPE (t) != ptrtype)
8644 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
8645 }
8646 else if (TREE_CODE (t) == MEM_REF
8647 && integer_zerop (TREE_OPERAND (t, 1)))
8648 {
8649 t = TREE_OPERAND (t, 0);
8650
8651 if (TREE_TYPE (t) != ptrtype)
8652 t = fold_convert_loc (loc, ptrtype, t);
8653 }
8654 else if (TREE_CODE (t) == MEM_REF
8655 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
8656 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
8657 TREE_OPERAND (t, 0),
8658 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
8659 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
8660 {
8661 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
8662
8663 if (TREE_TYPE (t) != ptrtype)
8664 t = fold_convert_loc (loc, ptrtype, t);
8665 }
8666 else
8667 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
8668
8669 return t;
8670 }
8671
8672 /* Build an expression for the address of T. */
8673
8674 tree
8675 build_fold_addr_expr_loc (location_t loc, tree t)
8676 {
8677 tree ptrtype = build_pointer_type (TREE_TYPE (t));
8678
8679 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
8680 }
8681
8682 /* Fold a unary expression of code CODE and type TYPE with operand
8683 OP0. Return the folded expression if folding is successful.
8684 Otherwise, return NULL_TREE. */
8685
8686 tree
8687 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
8688 {
8689 tree tem;
8690 tree arg0;
8691 enum tree_code_class kind = TREE_CODE_CLASS (code);
8692
8693 gcc_assert (IS_EXPR_CODE_CLASS (kind)
8694 && TREE_CODE_LENGTH (code) == 1);
8695
8696 arg0 = op0;
8697 if (arg0)
8698 {
8699 if (CONVERT_EXPR_CODE_P (code)
8700 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
8701 {
8702 /* Don't use STRIP_NOPS, because signedness of argument type
8703 matters. */
8704 STRIP_SIGN_NOPS (arg0);
8705 }
8706 else
8707 {
8708 /* Strip any conversions that don't change the mode. This
8709 is safe for every expression, except for a comparison
8710 expression because its signedness is derived from its
8711 operands.
8712
8713 Note that this is done as an internal manipulation within
8714 the constant folder, in order to find the simplest
8715 representation of the arguments so that their form can be
8716 studied. In any cases, the appropriate type conversions
8717 should be put back in the tree that will get out of the
8718 constant folder. */
8719 STRIP_NOPS (arg0);
8720 }
8721
8722 if (CONSTANT_CLASS_P (arg0))
8723 {
8724 tree tem = const_unop (code, type, arg0);
8725 if (tem)
8726 {
8727 if (TREE_TYPE (tem) != type)
8728 tem = fold_convert_loc (loc, type, tem);
8729 return tem;
8730 }
8731 }
8732 }
8733
8734 tem = generic_simplify (loc, code, type, op0);
8735 if (tem)
8736 return tem;
8737
8738 if (TREE_CODE_CLASS (code) == tcc_unary)
8739 {
8740 if (TREE_CODE (arg0) == COMPOUND_EXPR)
8741 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
8742 fold_build1_loc (loc, code, type,
8743 fold_convert_loc (loc, TREE_TYPE (op0),
8744 TREE_OPERAND (arg0, 1))));
8745 else if (TREE_CODE (arg0) == COND_EXPR)
8746 {
8747 tree arg01 = TREE_OPERAND (arg0, 1);
8748 tree arg02 = TREE_OPERAND (arg0, 2);
8749 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
8750 arg01 = fold_build1_loc (loc, code, type,
8751 fold_convert_loc (loc,
8752 TREE_TYPE (op0), arg01));
8753 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
8754 arg02 = fold_build1_loc (loc, code, type,
8755 fold_convert_loc (loc,
8756 TREE_TYPE (op0), arg02));
8757 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
8758 arg01, arg02);
8759
8760 /* If this was a conversion, and all we did was to move into
8761 inside the COND_EXPR, bring it back out. But leave it if
8762 it is a conversion from integer to integer and the
8763 result precision is no wider than a word since such a
8764 conversion is cheap and may be optimized away by combine,
8765 while it couldn't if it were outside the COND_EXPR. Then return
8766 so we don't get into an infinite recursion loop taking the
8767 conversion out and then back in. */
8768
8769 if ((CONVERT_EXPR_CODE_P (code)
8770 || code == NON_LVALUE_EXPR)
8771 && TREE_CODE (tem) == COND_EXPR
8772 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
8773 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
8774 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
8775 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
8776 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
8777 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
8778 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8779 && (INTEGRAL_TYPE_P
8780 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
8781 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
8782 || flag_syntax_only))
8783 tem = build1_loc (loc, code, type,
8784 build3 (COND_EXPR,
8785 TREE_TYPE (TREE_OPERAND
8786 (TREE_OPERAND (tem, 1), 0)),
8787 TREE_OPERAND (tem, 0),
8788 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
8789 TREE_OPERAND (TREE_OPERAND (tem, 2),
8790 0)));
8791 return tem;
8792 }
8793 }
8794
8795 switch (code)
8796 {
8797 case NON_LVALUE_EXPR:
8798 if (!maybe_lvalue_p (op0))
8799 return fold_convert_loc (loc, type, op0);
8800 return NULL_TREE;
8801
8802 CASE_CONVERT:
8803 case FLOAT_EXPR:
8804 case FIX_TRUNC_EXPR:
8805 if (COMPARISON_CLASS_P (op0))
8806 {
8807 /* If we have (type) (a CMP b) and type is an integral type, return
8808 new expression involving the new type. Canonicalize
8809 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
8810 non-integral type.
8811 Do not fold the result as that would not simplify further, also
8812 folding again results in recursions. */
8813 if (TREE_CODE (type) == BOOLEAN_TYPE)
8814 return build2_loc (loc, TREE_CODE (op0), type,
8815 TREE_OPERAND (op0, 0),
8816 TREE_OPERAND (op0, 1));
8817 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
8818 && TREE_CODE (type) != VECTOR_TYPE)
8819 return build3_loc (loc, COND_EXPR, type, op0,
8820 constant_boolean_node (true, type),
8821 constant_boolean_node (false, type));
8822 }
8823
8824 /* Handle (T *)&A.B.C for A being of type T and B and C
8825 living at offset zero. This occurs frequently in
8826 C++ upcasting and then accessing the base. */
8827 if (TREE_CODE (op0) == ADDR_EXPR
8828 && POINTER_TYPE_P (type)
8829 && handled_component_p (TREE_OPERAND (op0, 0)))
8830 {
8831 poly_int64 bitsize, bitpos;
8832 tree offset;
8833 machine_mode mode;
8834 int unsignedp, reversep, volatilep;
8835 tree base
8836 = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
8837 &offset, &mode, &unsignedp, &reversep,
8838 &volatilep);
8839 /* If the reference was to a (constant) zero offset, we can use
8840 the address of the base if it has the same base type
8841 as the result type and the pointer type is unqualified. */
8842 if (!offset
8843 && known_eq (bitpos, 0)
8844 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
8845 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
8846 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
8847 return fold_convert_loc (loc, type,
8848 build_fold_addr_expr_loc (loc, base));
8849 }
8850
8851 if (TREE_CODE (op0) == MODIFY_EXPR
8852 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
8853 /* Detect assigning a bitfield. */
8854 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
8855 && DECL_BIT_FIELD
8856 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
8857 {
8858 /* Don't leave an assignment inside a conversion
8859 unless assigning a bitfield. */
8860 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
8861 /* First do the assignment, then return converted constant. */
8862 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
8863 TREE_NO_WARNING (tem) = 1;
8864 TREE_USED (tem) = 1;
8865 return tem;
8866 }
8867
8868 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8869 constants (if x has signed type, the sign bit cannot be set
8870 in c). This folds extension into the BIT_AND_EXPR.
8871 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8872 very likely don't have maximal range for their precision and this
8873 transformation effectively doesn't preserve non-maximal ranges. */
8874 if (TREE_CODE (type) == INTEGER_TYPE
8875 && TREE_CODE (op0) == BIT_AND_EXPR
8876 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
8877 {
8878 tree and_expr = op0;
8879 tree and0 = TREE_OPERAND (and_expr, 0);
8880 tree and1 = TREE_OPERAND (and_expr, 1);
8881 int change = 0;
8882
8883 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
8884 || (TYPE_PRECISION (type)
8885 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
8886 change = 1;
8887 else if (TYPE_PRECISION (TREE_TYPE (and1))
8888 <= HOST_BITS_PER_WIDE_INT
8889 && tree_fits_uhwi_p (and1))
8890 {
8891 unsigned HOST_WIDE_INT cst;
8892
8893 cst = tree_to_uhwi (and1);
8894 cst &= HOST_WIDE_INT_M1U
8895 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8896 change = (cst == 0);
8897 if (change
8898 && !flag_syntax_only
8899 && (load_extend_op (TYPE_MODE (TREE_TYPE (and0)))
8900 == ZERO_EXTEND))
8901 {
8902 tree uns = unsigned_type_for (TREE_TYPE (and0));
8903 and0 = fold_convert_loc (loc, uns, and0);
8904 and1 = fold_convert_loc (loc, uns, and1);
8905 }
8906 }
8907 if (change)
8908 {
8909 tem = force_fit_type (type, wi::to_widest (and1), 0,
8910 TREE_OVERFLOW (and1));
8911 return fold_build2_loc (loc, BIT_AND_EXPR, type,
8912 fold_convert_loc (loc, type, and0), tem);
8913 }
8914 }
8915
8916 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
8917 cast (T1)X will fold away. We assume that this happens when X itself
8918 is a cast. */
8919 if (POINTER_TYPE_P (type)
8920 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8921 && CONVERT_EXPR_P (TREE_OPERAND (arg0, 0)))
8922 {
8923 tree arg00 = TREE_OPERAND (arg0, 0);
8924 tree arg01 = TREE_OPERAND (arg0, 1);
8925
8926 return fold_build_pointer_plus_loc
8927 (loc, fold_convert_loc (loc, type, arg00), arg01);
8928 }
8929
8930 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8931 of the same precision, and X is an integer type not narrower than
8932 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8933 if (INTEGRAL_TYPE_P (type)
8934 && TREE_CODE (op0) == BIT_NOT_EXPR
8935 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8936 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8937 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8938 {
8939 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8940 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8941 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8942 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8943 fold_convert_loc (loc, type, tem));
8944 }
8945
8946 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8947 type of X and Y (integer types only). */
8948 if (INTEGRAL_TYPE_P (type)
8949 && TREE_CODE (op0) == MULT_EXPR
8950 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8951 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8952 {
8953 /* Be careful not to introduce new overflows. */
8954 tree mult_type;
8955 if (TYPE_OVERFLOW_WRAPS (type))
8956 mult_type = type;
8957 else
8958 mult_type = unsigned_type_for (type);
8959
8960 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8961 {
8962 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8963 fold_convert_loc (loc, mult_type,
8964 TREE_OPERAND (op0, 0)),
8965 fold_convert_loc (loc, mult_type,
8966 TREE_OPERAND (op0, 1)));
8967 return fold_convert_loc (loc, type, tem);
8968 }
8969 }
8970
8971 return NULL_TREE;
8972
8973 case VIEW_CONVERT_EXPR:
8974 if (TREE_CODE (op0) == MEM_REF)
8975 {
8976 if (TYPE_ALIGN (TREE_TYPE (op0)) != TYPE_ALIGN (type))
8977 type = build_aligned_type (type, TYPE_ALIGN (TREE_TYPE (op0)));
8978 tem = fold_build2_loc (loc, MEM_REF, type,
8979 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8980 REF_REVERSE_STORAGE_ORDER (tem) = REF_REVERSE_STORAGE_ORDER (op0);
8981 return tem;
8982 }
8983
8984 return NULL_TREE;
8985
8986 case NEGATE_EXPR:
8987 tem = fold_negate_expr (loc, arg0);
8988 if (tem)
8989 return fold_convert_loc (loc, type, tem);
8990 return NULL_TREE;
8991
8992 case ABS_EXPR:
8993 /* Convert fabs((double)float) into (double)fabsf(float). */
8994 if (TREE_CODE (arg0) == NOP_EXPR
8995 && TREE_CODE (type) == REAL_TYPE)
8996 {
8997 tree targ0 = strip_float_extensions (arg0);
8998 if (targ0 != arg0)
8999 return fold_convert_loc (loc, type,
9000 fold_build1_loc (loc, ABS_EXPR,
9001 TREE_TYPE (targ0),
9002 targ0));
9003 }
9004 return NULL_TREE;
9005
9006 case BIT_NOT_EXPR:
9007 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
9008 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9009 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
9010 fold_convert_loc (loc, type,
9011 TREE_OPERAND (arg0, 0)))))
9012 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
9013 fold_convert_loc (loc, type,
9014 TREE_OPERAND (arg0, 1)));
9015 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
9016 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
9017 fold_convert_loc (loc, type,
9018 TREE_OPERAND (arg0, 1)))))
9019 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
9020 fold_convert_loc (loc, type,
9021 TREE_OPERAND (arg0, 0)), tem);
9022
9023 return NULL_TREE;
9024
9025 case TRUTH_NOT_EXPR:
9026 /* Note that the operand of this must be an int
9027 and its values must be 0 or 1.
9028 ("true" is a fixed value perhaps depending on the language,
9029 but we don't handle values other than 1 correctly yet.) */
9030 tem = fold_truth_not_expr (loc, arg0);
9031 if (!tem)
9032 return NULL_TREE;
9033 return fold_convert_loc (loc, type, tem);
9034
9035 case INDIRECT_REF:
9036 /* Fold *&X to X if X is an lvalue. */
9037 if (TREE_CODE (op0) == ADDR_EXPR)
9038 {
9039 tree op00 = TREE_OPERAND (op0, 0);
9040 if ((VAR_P (op00)
9041 || TREE_CODE (op00) == PARM_DECL
9042 || TREE_CODE (op00) == RESULT_DECL)
9043 && !TREE_READONLY (op00))
9044 return op00;
9045 }
9046 return NULL_TREE;
9047
9048 default:
9049 return NULL_TREE;
9050 } /* switch (code) */
9051 }
9052
9053
9054 /* If the operation was a conversion do _not_ mark a resulting constant
9055 with TREE_OVERFLOW if the original constant was not. These conversions
9056 have implementation defined behavior and retaining the TREE_OVERFLOW
9057 flag here would confuse later passes such as VRP. */
9058 tree
9059 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
9060 tree type, tree op0)
9061 {
9062 tree res = fold_unary_loc (loc, code, type, op0);
9063 if (res
9064 && TREE_CODE (res) == INTEGER_CST
9065 && TREE_CODE (op0) == INTEGER_CST
9066 && CONVERT_EXPR_CODE_P (code))
9067 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
9068
9069 return res;
9070 }
9071
9072 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
9073 operands OP0 and OP1. LOC is the location of the resulting expression.
9074 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
9075 Return the folded expression if folding is successful. Otherwise,
9076 return NULL_TREE. */
9077 static tree
9078 fold_truth_andor (location_t loc, enum tree_code code, tree type,
9079 tree arg0, tree arg1, tree op0, tree op1)
9080 {
9081 tree tem;
9082
9083 /* We only do these simplifications if we are optimizing. */
9084 if (!optimize)
9085 return NULL_TREE;
9086
9087 /* Check for things like (A || B) && (A || C). We can convert this
9088 to A || (B && C). Note that either operator can be any of the four
9089 truth and/or operations and the transformation will still be
9090 valid. Also note that we only care about order for the
9091 ANDIF and ORIF operators. If B contains side effects, this
9092 might change the truth-value of A. */
9093 if (TREE_CODE (arg0) == TREE_CODE (arg1)
9094 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
9095 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
9096 || TREE_CODE (arg0) == TRUTH_AND_EXPR
9097 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
9098 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
9099 {
9100 tree a00 = TREE_OPERAND (arg0, 0);
9101 tree a01 = TREE_OPERAND (arg0, 1);
9102 tree a10 = TREE_OPERAND (arg1, 0);
9103 tree a11 = TREE_OPERAND (arg1, 1);
9104 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
9105 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
9106 && (code == TRUTH_AND_EXPR
9107 || code == TRUTH_OR_EXPR));
9108
9109 if (operand_equal_p (a00, a10, 0))
9110 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
9111 fold_build2_loc (loc, code, type, a01, a11));
9112 else if (commutative && operand_equal_p (a00, a11, 0))
9113 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
9114 fold_build2_loc (loc, code, type, a01, a10));
9115 else if (commutative && operand_equal_p (a01, a10, 0))
9116 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
9117 fold_build2_loc (loc, code, type, a00, a11));
9118
9119 /* This case if tricky because we must either have commutative
9120 operators or else A10 must not have side-effects. */
9121
9122 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
9123 && operand_equal_p (a01, a11, 0))
9124 return fold_build2_loc (loc, TREE_CODE (arg0), type,
9125 fold_build2_loc (loc, code, type, a00, a10),
9126 a01);
9127 }
9128
9129 /* See if we can build a range comparison. */
9130 if ((tem = fold_range_test (loc, code, type, op0, op1)) != 0)
9131 return tem;
9132
9133 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
9134 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
9135 {
9136 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
9137 if (tem)
9138 return fold_build2_loc (loc, code, type, tem, arg1);
9139 }
9140
9141 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
9142 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
9143 {
9144 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
9145 if (tem)
9146 return fold_build2_loc (loc, code, type, arg0, tem);
9147 }
9148
9149 /* Check for the possibility of merging component references. If our
9150 lhs is another similar operation, try to merge its rhs with our
9151 rhs. Then try to merge our lhs and rhs. */
9152 if (TREE_CODE (arg0) == code
9153 && (tem = fold_truth_andor_1 (loc, code, type,
9154 TREE_OPERAND (arg0, 1), arg1)) != 0)
9155 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9156
9157 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
9158 return tem;
9159
9160 bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
9161 if (param_logical_op_non_short_circuit != -1)
9162 logical_op_non_short_circuit
9163 = param_logical_op_non_short_circuit;
9164 if (logical_op_non_short_circuit
9165 && !flag_sanitize_coverage
9166 && (code == TRUTH_AND_EXPR
9167 || code == TRUTH_ANDIF_EXPR
9168 || code == TRUTH_OR_EXPR
9169 || code == TRUTH_ORIF_EXPR))
9170 {
9171 enum tree_code ncode, icode;
9172
9173 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
9174 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
9175 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
9176
9177 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
9178 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
9179 We don't want to pack more than two leafs to a non-IF AND/OR
9180 expression.
9181 If tree-code of left-hand operand isn't an AND/OR-IF code and not
9182 equal to IF-CODE, then we don't want to add right-hand operand.
9183 If the inner right-hand side of left-hand operand has
9184 side-effects, or isn't simple, then we can't add to it,
9185 as otherwise we might destroy if-sequence. */
9186 if (TREE_CODE (arg0) == icode
9187 && simple_operand_p_2 (arg1)
9188 /* Needed for sequence points to handle trappings, and
9189 side-effects. */
9190 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
9191 {
9192 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
9193 arg1);
9194 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
9195 tem);
9196 }
9197 /* Same as above but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
9198 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
9199 else if (TREE_CODE (arg1) == icode
9200 && simple_operand_p_2 (arg0)
9201 /* Needed for sequence points to handle trappings, and
9202 side-effects. */
9203 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
9204 {
9205 tem = fold_build2_loc (loc, ncode, type,
9206 arg0, TREE_OPERAND (arg1, 0));
9207 return fold_build2_loc (loc, icode, type, tem,
9208 TREE_OPERAND (arg1, 1));
9209 }
9210 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
9211 into (A OR B).
9212 For sequence point consistancy, we need to check for trapping,
9213 and side-effects. */
9214 else if (code == icode && simple_operand_p_2 (arg0)
9215 && simple_operand_p_2 (arg1))
9216 return fold_build2_loc (loc, ncode, type, arg0, arg1);
9217 }
9218
9219 return NULL_TREE;
9220 }
9221
9222 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
9223 by changing CODE to reduce the magnitude of constants involved in
9224 ARG0 of the comparison.
9225 Returns a canonicalized comparison tree if a simplification was
9226 possible, otherwise returns NULL_TREE.
9227 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
9228 valid if signed overflow is undefined. */
9229
9230 static tree
9231 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
9232 tree arg0, tree arg1,
9233 bool *strict_overflow_p)
9234 {
9235 enum tree_code code0 = TREE_CODE (arg0);
9236 tree t, cst0 = NULL_TREE;
9237 int sgn0;
9238
9239 /* Match A +- CST code arg1. We can change this only if overflow
9240 is undefined. */
9241 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9242 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
9243 /* In principle pointers also have undefined overflow behavior,
9244 but that causes problems elsewhere. */
9245 && !POINTER_TYPE_P (TREE_TYPE (arg0))
9246 && (code0 == MINUS_EXPR
9247 || code0 == PLUS_EXPR)
9248 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
9249 return NULL_TREE;
9250
9251 /* Identify the constant in arg0 and its sign. */
9252 cst0 = TREE_OPERAND (arg0, 1);
9253 sgn0 = tree_int_cst_sgn (cst0);
9254
9255 /* Overflowed constants and zero will cause problems. */
9256 if (integer_zerop (cst0)
9257 || TREE_OVERFLOW (cst0))
9258 return NULL_TREE;
9259
9260 /* See if we can reduce the magnitude of the constant in
9261 arg0 by changing the comparison code. */
9262 /* A - CST < arg1 -> A - CST-1 <= arg1. */
9263 if (code == LT_EXPR
9264 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
9265 code = LE_EXPR;
9266 /* A + CST > arg1 -> A + CST-1 >= arg1. */
9267 else if (code == GT_EXPR
9268 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
9269 code = GE_EXPR;
9270 /* A + CST <= arg1 -> A + CST-1 < arg1. */
9271 else if (code == LE_EXPR
9272 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
9273 code = LT_EXPR;
9274 /* A - CST >= arg1 -> A - CST-1 > arg1. */
9275 else if (code == GE_EXPR
9276 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
9277 code = GT_EXPR;
9278 else
9279 return NULL_TREE;
9280 *strict_overflow_p = true;
9281
9282 /* Now build the constant reduced in magnitude. But not if that
9283 would produce one outside of its types range. */
9284 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
9285 && ((sgn0 == 1
9286 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
9287 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
9288 || (sgn0 == -1
9289 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
9290 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
9291 return NULL_TREE;
9292
9293 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
9294 cst0, build_int_cst (TREE_TYPE (cst0), 1));
9295 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
9296 t = fold_convert (TREE_TYPE (arg1), t);
9297
9298 return fold_build2_loc (loc, code, type, t, arg1);
9299 }
9300
9301 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
9302 overflow further. Try to decrease the magnitude of constants involved
9303 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
9304 and put sole constants at the second argument position.
9305 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
9306
9307 static tree
9308 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
9309 tree arg0, tree arg1)
9310 {
9311 tree t;
9312 bool strict_overflow_p;
9313 const char * const warnmsg = G_("assuming signed overflow does not occur "
9314 "when reducing constant in comparison");
9315
9316 /* Try canonicalization by simplifying arg0. */
9317 strict_overflow_p = false;
9318 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
9319 &strict_overflow_p);
9320 if (t)
9321 {
9322 if (strict_overflow_p)
9323 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
9324 return t;
9325 }
9326
9327 /* Try canonicalization by simplifying arg1 using the swapped
9328 comparison. */
9329 code = swap_tree_comparison (code);
9330 strict_overflow_p = false;
9331 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
9332 &strict_overflow_p);
9333 if (t && strict_overflow_p)
9334 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
9335 return t;
9336 }
9337
9338 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
9339 space. This is used to avoid issuing overflow warnings for
9340 expressions like &p->x which cannot wrap. */
9341
9342 static bool
9343 pointer_may_wrap_p (tree base, tree offset, poly_int64 bitpos)
9344 {
9345 if (!POINTER_TYPE_P (TREE_TYPE (base)))
9346 return true;
9347
9348 if (maybe_lt (bitpos, 0))
9349 return true;
9350
9351 poly_wide_int wi_offset;
9352 int precision = TYPE_PRECISION (TREE_TYPE (base));
9353 if (offset == NULL_TREE)
9354 wi_offset = wi::zero (precision);
9355 else if (!poly_int_tree_p (offset) || TREE_OVERFLOW (offset))
9356 return true;
9357 else
9358 wi_offset = wi::to_poly_wide (offset);
9359
9360 wi::overflow_type overflow;
9361 poly_wide_int units = wi::shwi (bits_to_bytes_round_down (bitpos),
9362 precision);
9363 poly_wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
9364 if (overflow)
9365 return true;
9366
9367 poly_uint64 total_hwi, size;
9368 if (!total.to_uhwi (&total_hwi)
9369 || !poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (base))),
9370 &size)
9371 || known_eq (size, 0U))
9372 return true;
9373
9374 if (known_le (total_hwi, size))
9375 return false;
9376
9377 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
9378 array. */
9379 if (TREE_CODE (base) == ADDR_EXPR
9380 && poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (base, 0))),
9381 &size)
9382 && maybe_ne (size, 0U)
9383 && known_le (total_hwi, size))
9384 return false;
9385
9386 return true;
9387 }
9388
9389 /* Return a positive integer when the symbol DECL is known to have
9390 a nonzero address, zero when it's known not to (e.g., it's a weak
9391 symbol), and a negative integer when the symbol is not yet in the
9392 symbol table and so whether or not its address is zero is unknown.
9393 For function local objects always return positive integer. */
9394 static int
9395 maybe_nonzero_address (tree decl)
9396 {
9397 if (DECL_P (decl) && decl_in_symtab_p (decl))
9398 if (struct symtab_node *symbol = symtab_node::get_create (decl))
9399 return symbol->nonzero_address ();
9400
9401 /* Function local objects are never NULL. */
9402 if (DECL_P (decl)
9403 && (DECL_CONTEXT (decl)
9404 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
9405 && auto_var_in_fn_p (decl, DECL_CONTEXT (decl))))
9406 return 1;
9407
9408 return -1;
9409 }
9410
9411 /* Subroutine of fold_binary. This routine performs all of the
9412 transformations that are common to the equality/inequality
9413 operators (EQ_EXPR and NE_EXPR) and the ordering operators
9414 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
9415 fold_binary should call fold_binary. Fold a comparison with
9416 tree code CODE and type TYPE with operands OP0 and OP1. Return
9417 the folded comparison or NULL_TREE. */
9418
9419 static tree
9420 fold_comparison (location_t loc, enum tree_code code, tree type,
9421 tree op0, tree op1)
9422 {
9423 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
9424 tree arg0, arg1, tem;
9425
9426 arg0 = op0;
9427 arg1 = op1;
9428
9429 STRIP_SIGN_NOPS (arg0);
9430 STRIP_SIGN_NOPS (arg1);
9431
9432 /* For comparisons of pointers we can decompose it to a compile time
9433 comparison of the base objects and the offsets into the object.
9434 This requires at least one operand being an ADDR_EXPR or a
9435 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
9436 if (POINTER_TYPE_P (TREE_TYPE (arg0))
9437 && (TREE_CODE (arg0) == ADDR_EXPR
9438 || TREE_CODE (arg1) == ADDR_EXPR
9439 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
9440 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
9441 {
9442 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
9443 poly_int64 bitsize, bitpos0 = 0, bitpos1 = 0;
9444 machine_mode mode;
9445 int volatilep, reversep, unsignedp;
9446 bool indirect_base0 = false, indirect_base1 = false;
9447
9448 /* Get base and offset for the access. Strip ADDR_EXPR for
9449 get_inner_reference, but put it back by stripping INDIRECT_REF
9450 off the base object if possible. indirect_baseN will be true
9451 if baseN is not an address but refers to the object itself. */
9452 base0 = arg0;
9453 if (TREE_CODE (arg0) == ADDR_EXPR)
9454 {
9455 base0
9456 = get_inner_reference (TREE_OPERAND (arg0, 0),
9457 &bitsize, &bitpos0, &offset0, &mode,
9458 &unsignedp, &reversep, &volatilep);
9459 if (TREE_CODE (base0) == INDIRECT_REF)
9460 base0 = TREE_OPERAND (base0, 0);
9461 else
9462 indirect_base0 = true;
9463 }
9464 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9465 {
9466 base0 = TREE_OPERAND (arg0, 0);
9467 STRIP_SIGN_NOPS (base0);
9468 if (TREE_CODE (base0) == ADDR_EXPR)
9469 {
9470 base0
9471 = get_inner_reference (TREE_OPERAND (base0, 0),
9472 &bitsize, &bitpos0, &offset0, &mode,
9473 &unsignedp, &reversep, &volatilep);
9474 if (TREE_CODE (base0) == INDIRECT_REF)
9475 base0 = TREE_OPERAND (base0, 0);
9476 else
9477 indirect_base0 = true;
9478 }
9479 if (offset0 == NULL_TREE || integer_zerop (offset0))
9480 offset0 = TREE_OPERAND (arg0, 1);
9481 else
9482 offset0 = size_binop (PLUS_EXPR, offset0,
9483 TREE_OPERAND (arg0, 1));
9484 if (poly_int_tree_p (offset0))
9485 {
9486 poly_offset_int tem = wi::sext (wi::to_poly_offset (offset0),
9487 TYPE_PRECISION (sizetype));
9488 tem <<= LOG2_BITS_PER_UNIT;
9489 tem += bitpos0;
9490 if (tem.to_shwi (&bitpos0))
9491 offset0 = NULL_TREE;
9492 }
9493 }
9494
9495 base1 = arg1;
9496 if (TREE_CODE (arg1) == ADDR_EXPR)
9497 {
9498 base1
9499 = get_inner_reference (TREE_OPERAND (arg1, 0),
9500 &bitsize, &bitpos1, &offset1, &mode,
9501 &unsignedp, &reversep, &volatilep);
9502 if (TREE_CODE (base1) == INDIRECT_REF)
9503 base1 = TREE_OPERAND (base1, 0);
9504 else
9505 indirect_base1 = true;
9506 }
9507 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9508 {
9509 base1 = TREE_OPERAND (arg1, 0);
9510 STRIP_SIGN_NOPS (base1);
9511 if (TREE_CODE (base1) == ADDR_EXPR)
9512 {
9513 base1
9514 = get_inner_reference (TREE_OPERAND (base1, 0),
9515 &bitsize, &bitpos1, &offset1, &mode,
9516 &unsignedp, &reversep, &volatilep);
9517 if (TREE_CODE (base1) == INDIRECT_REF)
9518 base1 = TREE_OPERAND (base1, 0);
9519 else
9520 indirect_base1 = true;
9521 }
9522 if (offset1 == NULL_TREE || integer_zerop (offset1))
9523 offset1 = TREE_OPERAND (arg1, 1);
9524 else
9525 offset1 = size_binop (PLUS_EXPR, offset1,
9526 TREE_OPERAND (arg1, 1));
9527 if (poly_int_tree_p (offset1))
9528 {
9529 poly_offset_int tem = wi::sext (wi::to_poly_offset (offset1),
9530 TYPE_PRECISION (sizetype));
9531 tem <<= LOG2_BITS_PER_UNIT;
9532 tem += bitpos1;
9533 if (tem.to_shwi (&bitpos1))
9534 offset1 = NULL_TREE;
9535 }
9536 }
9537
9538 /* If we have equivalent bases we might be able to simplify. */
9539 if (indirect_base0 == indirect_base1
9540 && operand_equal_p (base0, base1,
9541 indirect_base0 ? OEP_ADDRESS_OF : 0))
9542 {
9543 /* We can fold this expression to a constant if the non-constant
9544 offset parts are equal. */
9545 if ((offset0 == offset1
9546 || (offset0 && offset1
9547 && operand_equal_p (offset0, offset1, 0)))
9548 && (equality_code
9549 || (indirect_base0
9550 && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
9551 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
9552 {
9553 if (!equality_code
9554 && maybe_ne (bitpos0, bitpos1)
9555 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9556 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9557 fold_overflow_warning (("assuming pointer wraparound does not "
9558 "occur when comparing P +- C1 with "
9559 "P +- C2"),
9560 WARN_STRICT_OVERFLOW_CONDITIONAL);
9561
9562 switch (code)
9563 {
9564 case EQ_EXPR:
9565 if (known_eq (bitpos0, bitpos1))
9566 return constant_boolean_node (true, type);
9567 if (known_ne (bitpos0, bitpos1))
9568 return constant_boolean_node (false, type);
9569 break;
9570 case NE_EXPR:
9571 if (known_ne (bitpos0, bitpos1))
9572 return constant_boolean_node (true, type);
9573 if (known_eq (bitpos0, bitpos1))
9574 return constant_boolean_node (false, type);
9575 break;
9576 case LT_EXPR:
9577 if (known_lt (bitpos0, bitpos1))
9578 return constant_boolean_node (true, type);
9579 if (known_ge (bitpos0, bitpos1))
9580 return constant_boolean_node (false, type);
9581 break;
9582 case LE_EXPR:
9583 if (known_le (bitpos0, bitpos1))
9584 return constant_boolean_node (true, type);
9585 if (known_gt (bitpos0, bitpos1))
9586 return constant_boolean_node (false, type);
9587 break;
9588 case GE_EXPR:
9589 if (known_ge (bitpos0, bitpos1))
9590 return constant_boolean_node (true, type);
9591 if (known_lt (bitpos0, bitpos1))
9592 return constant_boolean_node (false, type);
9593 break;
9594 case GT_EXPR:
9595 if (known_gt (bitpos0, bitpos1))
9596 return constant_boolean_node (true, type);
9597 if (known_le (bitpos0, bitpos1))
9598 return constant_boolean_node (false, type);
9599 break;
9600 default:;
9601 }
9602 }
9603 /* We can simplify the comparison to a comparison of the variable
9604 offset parts if the constant offset parts are equal.
9605 Be careful to use signed sizetype here because otherwise we
9606 mess with array offsets in the wrong way. This is possible
9607 because pointer arithmetic is restricted to retain within an
9608 object and overflow on pointer differences is undefined as of
9609 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9610 else if (known_eq (bitpos0, bitpos1)
9611 && (equality_code
9612 || (indirect_base0
9613 && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
9614 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
9615 {
9616 /* By converting to signed sizetype we cover middle-end pointer
9617 arithmetic which operates on unsigned pointer types of size
9618 type size and ARRAY_REF offsets which are properly sign or
9619 zero extended from their type in case it is narrower than
9620 sizetype. */
9621 if (offset0 == NULL_TREE)
9622 offset0 = build_int_cst (ssizetype, 0);
9623 else
9624 offset0 = fold_convert_loc (loc, ssizetype, offset0);
9625 if (offset1 == NULL_TREE)
9626 offset1 = build_int_cst (ssizetype, 0);
9627 else
9628 offset1 = fold_convert_loc (loc, ssizetype, offset1);
9629
9630 if (!equality_code
9631 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9632 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9633 fold_overflow_warning (("assuming pointer wraparound does not "
9634 "occur when comparing P +- C1 with "
9635 "P +- C2"),
9636 WARN_STRICT_OVERFLOW_COMPARISON);
9637
9638 return fold_build2_loc (loc, code, type, offset0, offset1);
9639 }
9640 }
9641 /* For equal offsets we can simplify to a comparison of the
9642 base addresses. */
9643 else if (known_eq (bitpos0, bitpos1)
9644 && (indirect_base0
9645 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9646 && (indirect_base1
9647 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9648 && ((offset0 == offset1)
9649 || (offset0 && offset1
9650 && operand_equal_p (offset0, offset1, 0))))
9651 {
9652 if (indirect_base0)
9653 base0 = build_fold_addr_expr_loc (loc, base0);
9654 if (indirect_base1)
9655 base1 = build_fold_addr_expr_loc (loc, base1);
9656 return fold_build2_loc (loc, code, type, base0, base1);
9657 }
9658 /* Comparison between an ordinary (non-weak) symbol and a null
9659 pointer can be eliminated since such symbols must have a non
9660 null address. In C, relational expressions between pointers
9661 to objects and null pointers are undefined. The results
9662 below follow the C++ rules with the additional property that
9663 every object pointer compares greater than a null pointer.
9664 */
9665 else if (((DECL_P (base0)
9666 && maybe_nonzero_address (base0) > 0
9667 /* Avoid folding references to struct members at offset 0 to
9668 prevent tests like '&ptr->firstmember == 0' from getting
9669 eliminated. When ptr is null, although the -> expression
9670 is strictly speaking invalid, GCC retains it as a matter
9671 of QoI. See PR c/44555. */
9672 && (offset0 == NULL_TREE && known_ne (bitpos0, 0)))
9673 || CONSTANT_CLASS_P (base0))
9674 && indirect_base0
9675 /* The caller guarantees that when one of the arguments is
9676 constant (i.e., null in this case) it is second. */
9677 && integer_zerop (arg1))
9678 {
9679 switch (code)
9680 {
9681 case EQ_EXPR:
9682 case LE_EXPR:
9683 case LT_EXPR:
9684 return constant_boolean_node (false, type);
9685 case GE_EXPR:
9686 case GT_EXPR:
9687 case NE_EXPR:
9688 return constant_boolean_node (true, type);
9689 default:
9690 gcc_unreachable ();
9691 }
9692 }
9693 }
9694
9695 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9696 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9697 the resulting offset is smaller in absolute value than the
9698 original one and has the same sign. */
9699 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9700 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9701 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9702 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9703 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9704 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9705 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9706 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9707 {
9708 tree const1 = TREE_OPERAND (arg0, 1);
9709 tree const2 = TREE_OPERAND (arg1, 1);
9710 tree variable1 = TREE_OPERAND (arg0, 0);
9711 tree variable2 = TREE_OPERAND (arg1, 0);
9712 tree cst;
9713 const char * const warnmsg = G_("assuming signed overflow does not "
9714 "occur when combining constants around "
9715 "a comparison");
9716
9717 /* Put the constant on the side where it doesn't overflow and is
9718 of lower absolute value and of same sign than before. */
9719 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9720 ? MINUS_EXPR : PLUS_EXPR,
9721 const2, const1);
9722 if (!TREE_OVERFLOW (cst)
9723 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
9724 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
9725 {
9726 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9727 return fold_build2_loc (loc, code, type,
9728 variable1,
9729 fold_build2_loc (loc, TREE_CODE (arg1),
9730 TREE_TYPE (arg1),
9731 variable2, cst));
9732 }
9733
9734 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9735 ? MINUS_EXPR : PLUS_EXPR,
9736 const1, const2);
9737 if (!TREE_OVERFLOW (cst)
9738 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
9739 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
9740 {
9741 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9742 return fold_build2_loc (loc, code, type,
9743 fold_build2_loc (loc, TREE_CODE (arg0),
9744 TREE_TYPE (arg0),
9745 variable1, cst),
9746 variable2);
9747 }
9748 }
9749
9750 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9751 if (tem)
9752 return tem;
9753
9754 /* If we are comparing an expression that just has comparisons
9755 of two integer values, arithmetic expressions of those comparisons,
9756 and constants, we can simplify it. There are only three cases
9757 to check: the two values can either be equal, the first can be
9758 greater, or the second can be greater. Fold the expression for
9759 those three values. Since each value must be 0 or 1, we have
9760 eight possibilities, each of which corresponds to the constant 0
9761 or 1 or one of the six possible comparisons.
9762
9763 This handles common cases like (a > b) == 0 but also handles
9764 expressions like ((x > y) - (y > x)) > 0, which supposedly
9765 occur in macroized code. */
9766
9767 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9768 {
9769 tree cval1 = 0, cval2 = 0;
9770
9771 if (twoval_comparison_p (arg0, &cval1, &cval2)
9772 /* Don't handle degenerate cases here; they should already
9773 have been handled anyway. */
9774 && cval1 != 0 && cval2 != 0
9775 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9776 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9777 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9778 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9779 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9780 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9781 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9782 {
9783 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9784 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9785
9786 /* We can't just pass T to eval_subst in case cval1 or cval2
9787 was the same as ARG1. */
9788
9789 tree high_result
9790 = fold_build2_loc (loc, code, type,
9791 eval_subst (loc, arg0, cval1, maxval,
9792 cval2, minval),
9793 arg1);
9794 tree equal_result
9795 = fold_build2_loc (loc, code, type,
9796 eval_subst (loc, arg0, cval1, maxval,
9797 cval2, maxval),
9798 arg1);
9799 tree low_result
9800 = fold_build2_loc (loc, code, type,
9801 eval_subst (loc, arg0, cval1, minval,
9802 cval2, maxval),
9803 arg1);
9804
9805 /* All three of these results should be 0 or 1. Confirm they are.
9806 Then use those values to select the proper code to use. */
9807
9808 if (TREE_CODE (high_result) == INTEGER_CST
9809 && TREE_CODE (equal_result) == INTEGER_CST
9810 && TREE_CODE (low_result) == INTEGER_CST)
9811 {
9812 /* Make a 3-bit mask with the high-order bit being the
9813 value for `>', the next for '=', and the low for '<'. */
9814 switch ((integer_onep (high_result) * 4)
9815 + (integer_onep (equal_result) * 2)
9816 + integer_onep (low_result))
9817 {
9818 case 0:
9819 /* Always false. */
9820 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9821 case 1:
9822 code = LT_EXPR;
9823 break;
9824 case 2:
9825 code = EQ_EXPR;
9826 break;
9827 case 3:
9828 code = LE_EXPR;
9829 break;
9830 case 4:
9831 code = GT_EXPR;
9832 break;
9833 case 5:
9834 code = NE_EXPR;
9835 break;
9836 case 6:
9837 code = GE_EXPR;
9838 break;
9839 case 7:
9840 /* Always true. */
9841 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9842 }
9843
9844 return fold_build2_loc (loc, code, type, cval1, cval2);
9845 }
9846 }
9847 }
9848
9849 return NULL_TREE;
9850 }
9851
9852
9853 /* Subroutine of fold_binary. Optimize complex multiplications of the
9854 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9855 argument EXPR represents the expression "z" of type TYPE. */
9856
9857 static tree
9858 fold_mult_zconjz (location_t loc, tree type, tree expr)
9859 {
9860 tree itype = TREE_TYPE (type);
9861 tree rpart, ipart, tem;
9862
9863 if (TREE_CODE (expr) == COMPLEX_EXPR)
9864 {
9865 rpart = TREE_OPERAND (expr, 0);
9866 ipart = TREE_OPERAND (expr, 1);
9867 }
9868 else if (TREE_CODE (expr) == COMPLEX_CST)
9869 {
9870 rpart = TREE_REALPART (expr);
9871 ipart = TREE_IMAGPART (expr);
9872 }
9873 else
9874 {
9875 expr = save_expr (expr);
9876 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9877 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9878 }
9879
9880 rpart = save_expr (rpart);
9881 ipart = save_expr (ipart);
9882 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9883 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9884 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9885 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9886 build_zero_cst (itype));
9887 }
9888
9889
9890 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9891 CONSTRUCTOR ARG into array ELTS, which has NELTS elements, and return
9892 true if successful. */
9893
9894 static bool
9895 vec_cst_ctor_to_array (tree arg, unsigned int nelts, tree *elts)
9896 {
9897 unsigned HOST_WIDE_INT i, nunits;
9898
9899 if (TREE_CODE (arg) == VECTOR_CST
9900 && VECTOR_CST_NELTS (arg).is_constant (&nunits))
9901 {
9902 for (i = 0; i < nunits; ++i)
9903 elts[i] = VECTOR_CST_ELT (arg, i);
9904 }
9905 else if (TREE_CODE (arg) == CONSTRUCTOR)
9906 {
9907 constructor_elt *elt;
9908
9909 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9910 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9911 return false;
9912 else
9913 elts[i] = elt->value;
9914 }
9915 else
9916 return false;
9917 for (; i < nelts; i++)
9918 elts[i]
9919 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9920 return true;
9921 }
9922
9923 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9924 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9925 NULL_TREE otherwise. */
9926
9927 tree
9928 fold_vec_perm (tree type, tree arg0, tree arg1, const vec_perm_indices &sel)
9929 {
9930 unsigned int i;
9931 unsigned HOST_WIDE_INT nelts;
9932 bool need_ctor = false;
9933
9934 if (!sel.length ().is_constant (&nelts))
9935 return NULL_TREE;
9936 gcc_assert (known_eq (TYPE_VECTOR_SUBPARTS (type), nelts)
9937 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)), nelts)
9938 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)), nelts));
9939 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9940 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9941 return NULL_TREE;
9942
9943 tree *in_elts = XALLOCAVEC (tree, nelts * 2);
9944 if (!vec_cst_ctor_to_array (arg0, nelts, in_elts)
9945 || !vec_cst_ctor_to_array (arg1, nelts, in_elts + nelts))
9946 return NULL_TREE;
9947
9948 tree_vector_builder out_elts (type, nelts, 1);
9949 for (i = 0; i < nelts; i++)
9950 {
9951 HOST_WIDE_INT index;
9952 if (!sel[i].is_constant (&index))
9953 return NULL_TREE;
9954 if (!CONSTANT_CLASS_P (in_elts[index]))
9955 need_ctor = true;
9956 out_elts.quick_push (unshare_expr (in_elts[index]));
9957 }
9958
9959 if (need_ctor)
9960 {
9961 vec<constructor_elt, va_gc> *v;
9962 vec_alloc (v, nelts);
9963 for (i = 0; i < nelts; i++)
9964 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, out_elts[i]);
9965 return build_constructor (type, v);
9966 }
9967 else
9968 return out_elts.build ();
9969 }
9970
9971 /* Try to fold a pointer difference of type TYPE two address expressions of
9972 array references AREF0 and AREF1 using location LOC. Return a
9973 simplified expression for the difference or NULL_TREE. */
9974
9975 static tree
9976 fold_addr_of_array_ref_difference (location_t loc, tree type,
9977 tree aref0, tree aref1,
9978 bool use_pointer_diff)
9979 {
9980 tree base0 = TREE_OPERAND (aref0, 0);
9981 tree base1 = TREE_OPERAND (aref1, 0);
9982 tree base_offset = build_int_cst (type, 0);
9983
9984 /* If the bases are array references as well, recurse. If the bases
9985 are pointer indirections compute the difference of the pointers.
9986 If the bases are equal, we are set. */
9987 if ((TREE_CODE (base0) == ARRAY_REF
9988 && TREE_CODE (base1) == ARRAY_REF
9989 && (base_offset
9990 = fold_addr_of_array_ref_difference (loc, type, base0, base1,
9991 use_pointer_diff)))
9992 || (INDIRECT_REF_P (base0)
9993 && INDIRECT_REF_P (base1)
9994 && (base_offset
9995 = use_pointer_diff
9996 ? fold_binary_loc (loc, POINTER_DIFF_EXPR, type,
9997 TREE_OPERAND (base0, 0),
9998 TREE_OPERAND (base1, 0))
9999 : fold_binary_loc (loc, MINUS_EXPR, type,
10000 fold_convert (type,
10001 TREE_OPERAND (base0, 0)),
10002 fold_convert (type,
10003 TREE_OPERAND (base1, 0)))))
10004 || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
10005 {
10006 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
10007 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
10008 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
10009 tree diff = fold_build2_loc (loc, MINUS_EXPR, type, op0, op1);
10010 return fold_build2_loc (loc, PLUS_EXPR, type,
10011 base_offset,
10012 fold_build2_loc (loc, MULT_EXPR, type,
10013 diff, esz));
10014 }
10015 return NULL_TREE;
10016 }
10017
10018 /* If the real or vector real constant CST of type TYPE has an exact
10019 inverse, return it, else return NULL. */
10020
10021 tree
10022 exact_inverse (tree type, tree cst)
10023 {
10024 REAL_VALUE_TYPE r;
10025 tree unit_type;
10026 machine_mode mode;
10027
10028 switch (TREE_CODE (cst))
10029 {
10030 case REAL_CST:
10031 r = TREE_REAL_CST (cst);
10032
10033 if (exact_real_inverse (TYPE_MODE (type), &r))
10034 return build_real (type, r);
10035
10036 return NULL_TREE;
10037
10038 case VECTOR_CST:
10039 {
10040 unit_type = TREE_TYPE (type);
10041 mode = TYPE_MODE (unit_type);
10042
10043 tree_vector_builder elts;
10044 if (!elts.new_unary_operation (type, cst, false))
10045 return NULL_TREE;
10046 unsigned int count = elts.encoded_nelts ();
10047 for (unsigned int i = 0; i < count; ++i)
10048 {
10049 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
10050 if (!exact_real_inverse (mode, &r))
10051 return NULL_TREE;
10052 elts.quick_push (build_real (unit_type, r));
10053 }
10054
10055 return elts.build ();
10056 }
10057
10058 default:
10059 return NULL_TREE;
10060 }
10061 }
10062
10063 /* Mask out the tz least significant bits of X of type TYPE where
10064 tz is the number of trailing zeroes in Y. */
10065 static wide_int
10066 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
10067 {
10068 int tz = wi::ctz (y);
10069 if (tz > 0)
10070 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
10071 return x;
10072 }
10073
10074 /* Return true when T is an address and is known to be nonzero.
10075 For floating point we further ensure that T is not denormal.
10076 Similar logic is present in nonzero_address in rtlanal.h.
10077
10078 If the return value is based on the assumption that signed overflow
10079 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
10080 change *STRICT_OVERFLOW_P. */
10081
10082 static bool
10083 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
10084 {
10085 tree type = TREE_TYPE (t);
10086 enum tree_code code;
10087
10088 /* Doing something useful for floating point would need more work. */
10089 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
10090 return false;
10091
10092 code = TREE_CODE (t);
10093 switch (TREE_CODE_CLASS (code))
10094 {
10095 case tcc_unary:
10096 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
10097 strict_overflow_p);
10098 case tcc_binary:
10099 case tcc_comparison:
10100 return tree_binary_nonzero_warnv_p (code, type,
10101 TREE_OPERAND (t, 0),
10102 TREE_OPERAND (t, 1),
10103 strict_overflow_p);
10104 case tcc_constant:
10105 case tcc_declaration:
10106 case tcc_reference:
10107 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
10108
10109 default:
10110 break;
10111 }
10112
10113 switch (code)
10114 {
10115 case TRUTH_NOT_EXPR:
10116 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
10117 strict_overflow_p);
10118
10119 case TRUTH_AND_EXPR:
10120 case TRUTH_OR_EXPR:
10121 case TRUTH_XOR_EXPR:
10122 return tree_binary_nonzero_warnv_p (code, type,
10123 TREE_OPERAND (t, 0),
10124 TREE_OPERAND (t, 1),
10125 strict_overflow_p);
10126
10127 case COND_EXPR:
10128 case CONSTRUCTOR:
10129 case OBJ_TYPE_REF:
10130 case ASSERT_EXPR:
10131 case ADDR_EXPR:
10132 case WITH_SIZE_EXPR:
10133 case SSA_NAME:
10134 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
10135
10136 case COMPOUND_EXPR:
10137 case MODIFY_EXPR:
10138 case BIND_EXPR:
10139 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
10140 strict_overflow_p);
10141
10142 case SAVE_EXPR:
10143 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
10144 strict_overflow_p);
10145
10146 case CALL_EXPR:
10147 {
10148 tree fndecl = get_callee_fndecl (t);
10149 if (!fndecl) return false;
10150 if (flag_delete_null_pointer_checks && !flag_check_new
10151 && DECL_IS_OPERATOR_NEW_P (fndecl)
10152 && !TREE_NOTHROW (fndecl))
10153 return true;
10154 if (flag_delete_null_pointer_checks
10155 && lookup_attribute ("returns_nonnull",
10156 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
10157 return true;
10158 return alloca_call_p (t);
10159 }
10160
10161 default:
10162 break;
10163 }
10164 return false;
10165 }
10166
10167 /* Return true when T is an address and is known to be nonzero.
10168 Handle warnings about undefined signed overflow. */
10169
10170 bool
10171 tree_expr_nonzero_p (tree t)
10172 {
10173 bool ret, strict_overflow_p;
10174
10175 strict_overflow_p = false;
10176 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
10177 if (strict_overflow_p)
10178 fold_overflow_warning (("assuming signed overflow does not occur when "
10179 "determining that expression is always "
10180 "non-zero"),
10181 WARN_STRICT_OVERFLOW_MISC);
10182 return ret;
10183 }
10184
10185 /* Return true if T is known not to be equal to an integer W. */
10186
10187 bool
10188 expr_not_equal_to (tree t, const wide_int &w)
10189 {
10190 wide_int min, max, nz;
10191 value_range_kind rtype;
10192 switch (TREE_CODE (t))
10193 {
10194 case INTEGER_CST:
10195 return wi::to_wide (t) != w;
10196
10197 case SSA_NAME:
10198 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
10199 return false;
10200 rtype = get_range_info (t, &min, &max);
10201 if (rtype == VR_RANGE)
10202 {
10203 if (wi::lt_p (max, w, TYPE_SIGN (TREE_TYPE (t))))
10204 return true;
10205 if (wi::lt_p (w, min, TYPE_SIGN (TREE_TYPE (t))))
10206 return true;
10207 }
10208 else if (rtype == VR_ANTI_RANGE
10209 && wi::le_p (min, w, TYPE_SIGN (TREE_TYPE (t)))
10210 && wi::le_p (w, max, TYPE_SIGN (TREE_TYPE (t))))
10211 return true;
10212 /* If T has some known zero bits and W has any of those bits set,
10213 then T is known not to be equal to W. */
10214 if (wi::ne_p (wi::zext (wi::bit_and_not (w, get_nonzero_bits (t)),
10215 TYPE_PRECISION (TREE_TYPE (t))), 0))
10216 return true;
10217 return false;
10218
10219 default:
10220 return false;
10221 }
10222 }
10223
10224 /* Fold a binary expression of code CODE and type TYPE with operands
10225 OP0 and OP1. LOC is the location of the resulting expression.
10226 Return the folded expression if folding is successful. Otherwise,
10227 return NULL_TREE. */
10228
10229 tree
10230 fold_binary_loc (location_t loc, enum tree_code code, tree type,
10231 tree op0, tree op1)
10232 {
10233 enum tree_code_class kind = TREE_CODE_CLASS (code);
10234 tree arg0, arg1, tem;
10235 tree t1 = NULL_TREE;
10236 bool strict_overflow_p;
10237 unsigned int prec;
10238
10239 gcc_assert (IS_EXPR_CODE_CLASS (kind)
10240 && TREE_CODE_LENGTH (code) == 2
10241 && op0 != NULL_TREE
10242 && op1 != NULL_TREE);
10243
10244 arg0 = op0;
10245 arg1 = op1;
10246
10247 /* Strip any conversions that don't change the mode. This is
10248 safe for every expression, except for a comparison expression
10249 because its signedness is derived from its operands. So, in
10250 the latter case, only strip conversions that don't change the
10251 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
10252 preserved.
10253
10254 Note that this is done as an internal manipulation within the
10255 constant folder, in order to find the simplest representation
10256 of the arguments so that their form can be studied. In any
10257 cases, the appropriate type conversions should be put back in
10258 the tree that will get out of the constant folder. */
10259
10260 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
10261 {
10262 STRIP_SIGN_NOPS (arg0);
10263 STRIP_SIGN_NOPS (arg1);
10264 }
10265 else
10266 {
10267 STRIP_NOPS (arg0);
10268 STRIP_NOPS (arg1);
10269 }
10270
10271 /* Note that TREE_CONSTANT isn't enough: static var addresses are
10272 constant but we can't do arithmetic on them. */
10273 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
10274 {
10275 tem = const_binop (code, type, arg0, arg1);
10276 if (tem != NULL_TREE)
10277 {
10278 if (TREE_TYPE (tem) != type)
10279 tem = fold_convert_loc (loc, type, tem);
10280 return tem;
10281 }
10282 }
10283
10284 /* If this is a commutative operation, and ARG0 is a constant, move it
10285 to ARG1 to reduce the number of tests below. */
10286 if (commutative_tree_code (code)
10287 && tree_swap_operands_p (arg0, arg1))
10288 return fold_build2_loc (loc, code, type, op1, op0);
10289
10290 /* Likewise if this is a comparison, and ARG0 is a constant, move it
10291 to ARG1 to reduce the number of tests below. */
10292 if (kind == tcc_comparison
10293 && tree_swap_operands_p (arg0, arg1))
10294 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
10295
10296 tem = generic_simplify (loc, code, type, op0, op1);
10297 if (tem)
10298 return tem;
10299
10300 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
10301
10302 First check for cases where an arithmetic operation is applied to a
10303 compound, conditional, or comparison operation. Push the arithmetic
10304 operation inside the compound or conditional to see if any folding
10305 can then be done. Convert comparison to conditional for this purpose.
10306 The also optimizes non-constant cases that used to be done in
10307 expand_expr.
10308
10309 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
10310 one of the operands is a comparison and the other is a comparison, a
10311 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
10312 code below would make the expression more complex. Change it to a
10313 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
10314 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
10315
10316 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
10317 || code == EQ_EXPR || code == NE_EXPR)
10318 && !VECTOR_TYPE_P (TREE_TYPE (arg0))
10319 && ((truth_value_p (TREE_CODE (arg0))
10320 && (truth_value_p (TREE_CODE (arg1))
10321 || (TREE_CODE (arg1) == BIT_AND_EXPR
10322 && integer_onep (TREE_OPERAND (arg1, 1)))))
10323 || (truth_value_p (TREE_CODE (arg1))
10324 && (truth_value_p (TREE_CODE (arg0))
10325 || (TREE_CODE (arg0) == BIT_AND_EXPR
10326 && integer_onep (TREE_OPERAND (arg0, 1)))))))
10327 {
10328 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
10329 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
10330 : TRUTH_XOR_EXPR,
10331 boolean_type_node,
10332 fold_convert_loc (loc, boolean_type_node, arg0),
10333 fold_convert_loc (loc, boolean_type_node, arg1));
10334
10335 if (code == EQ_EXPR)
10336 tem = invert_truthvalue_loc (loc, tem);
10337
10338 return fold_convert_loc (loc, type, tem);
10339 }
10340
10341 if (TREE_CODE_CLASS (code) == tcc_binary
10342 || TREE_CODE_CLASS (code) == tcc_comparison)
10343 {
10344 if (TREE_CODE (arg0) == COMPOUND_EXPR)
10345 {
10346 tem = fold_build2_loc (loc, code, type,
10347 fold_convert_loc (loc, TREE_TYPE (op0),
10348 TREE_OPERAND (arg0, 1)), op1);
10349 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
10350 tem);
10351 }
10352 if (TREE_CODE (arg1) == COMPOUND_EXPR)
10353 {
10354 tem = fold_build2_loc (loc, code, type, op0,
10355 fold_convert_loc (loc, TREE_TYPE (op1),
10356 TREE_OPERAND (arg1, 1)));
10357 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
10358 tem);
10359 }
10360
10361 if (TREE_CODE (arg0) == COND_EXPR
10362 || TREE_CODE (arg0) == VEC_COND_EXPR
10363 || COMPARISON_CLASS_P (arg0))
10364 {
10365 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10366 arg0, arg1,
10367 /*cond_first_p=*/1);
10368 if (tem != NULL_TREE)
10369 return tem;
10370 }
10371
10372 if (TREE_CODE (arg1) == COND_EXPR
10373 || TREE_CODE (arg1) == VEC_COND_EXPR
10374 || COMPARISON_CLASS_P (arg1))
10375 {
10376 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10377 arg1, arg0,
10378 /*cond_first_p=*/0);
10379 if (tem != NULL_TREE)
10380 return tem;
10381 }
10382 }
10383
10384 switch (code)
10385 {
10386 case MEM_REF:
10387 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10388 if (TREE_CODE (arg0) == ADDR_EXPR
10389 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
10390 {
10391 tree iref = TREE_OPERAND (arg0, 0);
10392 return fold_build2 (MEM_REF, type,
10393 TREE_OPERAND (iref, 0),
10394 int_const_binop (PLUS_EXPR, arg1,
10395 TREE_OPERAND (iref, 1)));
10396 }
10397
10398 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10399 if (TREE_CODE (arg0) == ADDR_EXPR
10400 && handled_component_p (TREE_OPERAND (arg0, 0)))
10401 {
10402 tree base;
10403 poly_int64 coffset;
10404 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
10405 &coffset);
10406 if (!base)
10407 return NULL_TREE;
10408 return fold_build2 (MEM_REF, type,
10409 build1 (ADDR_EXPR, TREE_TYPE (arg0), base),
10410 int_const_binop (PLUS_EXPR, arg1,
10411 size_int (coffset)));
10412 }
10413
10414 return NULL_TREE;
10415
10416 case POINTER_PLUS_EXPR:
10417 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10418 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10419 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10420 return fold_convert_loc (loc, type,
10421 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10422 fold_convert_loc (loc, sizetype,
10423 arg1),
10424 fold_convert_loc (loc, sizetype,
10425 arg0)));
10426
10427 return NULL_TREE;
10428
10429 case PLUS_EXPR:
10430 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10431 {
10432 /* X + (X / CST) * -CST is X % CST. */
10433 if (TREE_CODE (arg1) == MULT_EXPR
10434 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10435 && operand_equal_p (arg0,
10436 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10437 {
10438 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10439 tree cst1 = TREE_OPERAND (arg1, 1);
10440 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10441 cst1, cst0);
10442 if (sum && integer_zerop (sum))
10443 return fold_convert_loc (loc, type,
10444 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10445 TREE_TYPE (arg0), arg0,
10446 cst0));
10447 }
10448 }
10449
10450 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10451 one. Make sure the type is not saturating and has the signedness of
10452 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10453 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10454 if ((TREE_CODE (arg0) == MULT_EXPR
10455 || TREE_CODE (arg1) == MULT_EXPR)
10456 && !TYPE_SATURATING (type)
10457 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10458 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10459 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10460 {
10461 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10462 if (tem)
10463 return tem;
10464 }
10465
10466 if (! FLOAT_TYPE_P (type))
10467 {
10468 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10469 (plus (plus (mult) (mult)) (foo)) so that we can
10470 take advantage of the factoring cases below. */
10471 if (ANY_INTEGRAL_TYPE_P (type)
10472 && TYPE_OVERFLOW_WRAPS (type)
10473 && (((TREE_CODE (arg0) == PLUS_EXPR
10474 || TREE_CODE (arg0) == MINUS_EXPR)
10475 && TREE_CODE (arg1) == MULT_EXPR)
10476 || ((TREE_CODE (arg1) == PLUS_EXPR
10477 || TREE_CODE (arg1) == MINUS_EXPR)
10478 && TREE_CODE (arg0) == MULT_EXPR)))
10479 {
10480 tree parg0, parg1, parg, marg;
10481 enum tree_code pcode;
10482
10483 if (TREE_CODE (arg1) == MULT_EXPR)
10484 parg = arg0, marg = arg1;
10485 else
10486 parg = arg1, marg = arg0;
10487 pcode = TREE_CODE (parg);
10488 parg0 = TREE_OPERAND (parg, 0);
10489 parg1 = TREE_OPERAND (parg, 1);
10490 STRIP_NOPS (parg0);
10491 STRIP_NOPS (parg1);
10492
10493 if (TREE_CODE (parg0) == MULT_EXPR
10494 && TREE_CODE (parg1) != MULT_EXPR)
10495 return fold_build2_loc (loc, pcode, type,
10496 fold_build2_loc (loc, PLUS_EXPR, type,
10497 fold_convert_loc (loc, type,
10498 parg0),
10499 fold_convert_loc (loc, type,
10500 marg)),
10501 fold_convert_loc (loc, type, parg1));
10502 if (TREE_CODE (parg0) != MULT_EXPR
10503 && TREE_CODE (parg1) == MULT_EXPR)
10504 return
10505 fold_build2_loc (loc, PLUS_EXPR, type,
10506 fold_convert_loc (loc, type, parg0),
10507 fold_build2_loc (loc, pcode, type,
10508 fold_convert_loc (loc, type, marg),
10509 fold_convert_loc (loc, type,
10510 parg1)));
10511 }
10512 }
10513 else
10514 {
10515 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10516 to __complex__ ( x, y ). This is not the same for SNaNs or
10517 if signed zeros are involved. */
10518 if (!HONOR_SNANS (element_mode (arg0))
10519 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10520 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10521 {
10522 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10523 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10524 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10525 bool arg0rz = false, arg0iz = false;
10526 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10527 || (arg0i && (arg0iz = real_zerop (arg0i))))
10528 {
10529 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10530 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10531 if (arg0rz && arg1i && real_zerop (arg1i))
10532 {
10533 tree rp = arg1r ? arg1r
10534 : build1 (REALPART_EXPR, rtype, arg1);
10535 tree ip = arg0i ? arg0i
10536 : build1 (IMAGPART_EXPR, rtype, arg0);
10537 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10538 }
10539 else if (arg0iz && arg1r && real_zerop (arg1r))
10540 {
10541 tree rp = arg0r ? arg0r
10542 : build1 (REALPART_EXPR, rtype, arg0);
10543 tree ip = arg1i ? arg1i
10544 : build1 (IMAGPART_EXPR, rtype, arg1);
10545 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10546 }
10547 }
10548 }
10549
10550 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10551 We associate floats only if the user has specified
10552 -fassociative-math. */
10553 if (flag_associative_math
10554 && TREE_CODE (arg1) == PLUS_EXPR
10555 && TREE_CODE (arg0) != MULT_EXPR)
10556 {
10557 tree tree10 = TREE_OPERAND (arg1, 0);
10558 tree tree11 = TREE_OPERAND (arg1, 1);
10559 if (TREE_CODE (tree11) == MULT_EXPR
10560 && TREE_CODE (tree10) == MULT_EXPR)
10561 {
10562 tree tree0;
10563 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10564 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10565 }
10566 }
10567 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10568 We associate floats only if the user has specified
10569 -fassociative-math. */
10570 if (flag_associative_math
10571 && TREE_CODE (arg0) == PLUS_EXPR
10572 && TREE_CODE (arg1) != MULT_EXPR)
10573 {
10574 tree tree00 = TREE_OPERAND (arg0, 0);
10575 tree tree01 = TREE_OPERAND (arg0, 1);
10576 if (TREE_CODE (tree01) == MULT_EXPR
10577 && TREE_CODE (tree00) == MULT_EXPR)
10578 {
10579 tree tree0;
10580 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10581 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10582 }
10583 }
10584 }
10585
10586 bit_rotate:
10587 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10588 is a rotate of A by C1 bits. */
10589 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10590 is a rotate of A by B bits.
10591 Similarly for (A << B) | (A >> (-B & C3)) where C3 is Z-1,
10592 though in this case CODE must be | and not + or ^, otherwise
10593 it doesn't return A when B is 0. */
10594 {
10595 enum tree_code code0, code1;
10596 tree rtype;
10597 code0 = TREE_CODE (arg0);
10598 code1 = TREE_CODE (arg1);
10599 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10600 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10601 && operand_equal_p (TREE_OPERAND (arg0, 0),
10602 TREE_OPERAND (arg1, 0), 0)
10603 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10604 TYPE_UNSIGNED (rtype))
10605 /* Only create rotates in complete modes. Other cases are not
10606 expanded properly. */
10607 && (element_precision (rtype)
10608 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
10609 {
10610 tree tree01, tree11;
10611 tree orig_tree01, orig_tree11;
10612 enum tree_code code01, code11;
10613
10614 tree01 = orig_tree01 = TREE_OPERAND (arg0, 1);
10615 tree11 = orig_tree11 = TREE_OPERAND (arg1, 1);
10616 STRIP_NOPS (tree01);
10617 STRIP_NOPS (tree11);
10618 code01 = TREE_CODE (tree01);
10619 code11 = TREE_CODE (tree11);
10620 if (code11 != MINUS_EXPR
10621 && (code01 == MINUS_EXPR || code01 == BIT_AND_EXPR))
10622 {
10623 std::swap (code0, code1);
10624 std::swap (code01, code11);
10625 std::swap (tree01, tree11);
10626 std::swap (orig_tree01, orig_tree11);
10627 }
10628 if (code01 == INTEGER_CST
10629 && code11 == INTEGER_CST
10630 && (wi::to_widest (tree01) + wi::to_widest (tree11)
10631 == element_precision (rtype)))
10632 {
10633 tem = build2_loc (loc, LROTATE_EXPR,
10634 rtype, TREE_OPERAND (arg0, 0),
10635 code0 == LSHIFT_EXPR
10636 ? orig_tree01 : orig_tree11);
10637 return fold_convert_loc (loc, type, tem);
10638 }
10639 else if (code11 == MINUS_EXPR)
10640 {
10641 tree tree110, tree111;
10642 tree110 = TREE_OPERAND (tree11, 0);
10643 tree111 = TREE_OPERAND (tree11, 1);
10644 STRIP_NOPS (tree110);
10645 STRIP_NOPS (tree111);
10646 if (TREE_CODE (tree110) == INTEGER_CST
10647 && compare_tree_int (tree110,
10648 element_precision (rtype)) == 0
10649 && operand_equal_p (tree01, tree111, 0))
10650 {
10651 tem = build2_loc (loc, (code0 == LSHIFT_EXPR
10652 ? LROTATE_EXPR : RROTATE_EXPR),
10653 rtype, TREE_OPERAND (arg0, 0),
10654 orig_tree01);
10655 return fold_convert_loc (loc, type, tem);
10656 }
10657 }
10658 else if (code == BIT_IOR_EXPR
10659 && code11 == BIT_AND_EXPR
10660 && pow2p_hwi (element_precision (rtype)))
10661 {
10662 tree tree110, tree111;
10663 tree110 = TREE_OPERAND (tree11, 0);
10664 tree111 = TREE_OPERAND (tree11, 1);
10665 STRIP_NOPS (tree110);
10666 STRIP_NOPS (tree111);
10667 if (TREE_CODE (tree110) == NEGATE_EXPR
10668 && TREE_CODE (tree111) == INTEGER_CST
10669 && compare_tree_int (tree111,
10670 element_precision (rtype) - 1) == 0
10671 && operand_equal_p (tree01, TREE_OPERAND (tree110, 0), 0))
10672 {
10673 tem = build2_loc (loc, (code0 == LSHIFT_EXPR
10674 ? LROTATE_EXPR : RROTATE_EXPR),
10675 rtype, TREE_OPERAND (arg0, 0),
10676 orig_tree01);
10677 return fold_convert_loc (loc, type, tem);
10678 }
10679 }
10680 }
10681 }
10682
10683 associate:
10684 /* In most languages, can't associate operations on floats through
10685 parentheses. Rather than remember where the parentheses were, we
10686 don't associate floats at all, unless the user has specified
10687 -fassociative-math.
10688 And, we need to make sure type is not saturating. */
10689
10690 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10691 && !TYPE_SATURATING (type))
10692 {
10693 tree var0, minus_var0, con0, minus_con0, lit0, minus_lit0;
10694 tree var1, minus_var1, con1, minus_con1, lit1, minus_lit1;
10695 tree atype = type;
10696 bool ok = true;
10697
10698 /* Split both trees into variables, constants, and literals. Then
10699 associate each group together, the constants with literals,
10700 then the result with variables. This increases the chances of
10701 literals being recombined later and of generating relocatable
10702 expressions for the sum of a constant and literal. */
10703 var0 = split_tree (arg0, type, code,
10704 &minus_var0, &con0, &minus_con0,
10705 &lit0, &minus_lit0, 0);
10706 var1 = split_tree (arg1, type, code,
10707 &minus_var1, &con1, &minus_con1,
10708 &lit1, &minus_lit1, code == MINUS_EXPR);
10709
10710 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10711 if (code == MINUS_EXPR)
10712 code = PLUS_EXPR;
10713
10714 /* With undefined overflow prefer doing association in a type
10715 which wraps on overflow, if that is one of the operand types. */
10716 if ((POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
10717 && !TYPE_OVERFLOW_WRAPS (type))
10718 {
10719 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10720 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10721 atype = TREE_TYPE (arg0);
10722 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10723 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10724 atype = TREE_TYPE (arg1);
10725 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10726 }
10727
10728 /* With undefined overflow we can only associate constants with one
10729 variable, and constants whose association doesn't overflow. */
10730 if ((POINTER_TYPE_P (atype) || INTEGRAL_TYPE_P (atype))
10731 && !TYPE_OVERFLOW_WRAPS (atype))
10732 {
10733 if ((var0 && var1) || (minus_var0 && minus_var1))
10734 {
10735 /* ??? If split_tree would handle NEGATE_EXPR we could
10736 simply reject these cases and the allowed cases would
10737 be the var0/minus_var1 ones. */
10738 tree tmp0 = var0 ? var0 : minus_var0;
10739 tree tmp1 = var1 ? var1 : minus_var1;
10740 bool one_neg = false;
10741
10742 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10743 {
10744 tmp0 = TREE_OPERAND (tmp0, 0);
10745 one_neg = !one_neg;
10746 }
10747 if (CONVERT_EXPR_P (tmp0)
10748 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10749 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10750 <= TYPE_PRECISION (atype)))
10751 tmp0 = TREE_OPERAND (tmp0, 0);
10752 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10753 {
10754 tmp1 = TREE_OPERAND (tmp1, 0);
10755 one_neg = !one_neg;
10756 }
10757 if (CONVERT_EXPR_P (tmp1)
10758 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10759 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10760 <= TYPE_PRECISION (atype)))
10761 tmp1 = TREE_OPERAND (tmp1, 0);
10762 /* The only case we can still associate with two variables
10763 is if they cancel out. */
10764 if (!one_neg
10765 || !operand_equal_p (tmp0, tmp1, 0))
10766 ok = false;
10767 }
10768 else if ((var0 && minus_var1
10769 && ! operand_equal_p (var0, minus_var1, 0))
10770 || (minus_var0 && var1
10771 && ! operand_equal_p (minus_var0, var1, 0)))
10772 ok = false;
10773 }
10774
10775 /* Only do something if we found more than two objects. Otherwise,
10776 nothing has changed and we risk infinite recursion. */
10777 if (ok
10778 && ((var0 != 0) + (var1 != 0)
10779 + (minus_var0 != 0) + (minus_var1 != 0)
10780 + (con0 != 0) + (con1 != 0)
10781 + (minus_con0 != 0) + (minus_con1 != 0)
10782 + (lit0 != 0) + (lit1 != 0)
10783 + (minus_lit0 != 0) + (minus_lit1 != 0)) > 2)
10784 {
10785 var0 = associate_trees (loc, var0, var1, code, atype);
10786 minus_var0 = associate_trees (loc, minus_var0, minus_var1,
10787 code, atype);
10788 con0 = associate_trees (loc, con0, con1, code, atype);
10789 minus_con0 = associate_trees (loc, minus_con0, minus_con1,
10790 code, atype);
10791 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10792 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10793 code, atype);
10794
10795 if (minus_var0 && var0)
10796 {
10797 var0 = associate_trees (loc, var0, minus_var0,
10798 MINUS_EXPR, atype);
10799 minus_var0 = 0;
10800 }
10801 if (minus_con0 && con0)
10802 {
10803 con0 = associate_trees (loc, con0, minus_con0,
10804 MINUS_EXPR, atype);
10805 minus_con0 = 0;
10806 }
10807
10808 /* Preserve the MINUS_EXPR if the negative part of the literal is
10809 greater than the positive part. Otherwise, the multiplicative
10810 folding code (i.e extract_muldiv) may be fooled in case
10811 unsigned constants are subtracted, like in the following
10812 example: ((X*2 + 4) - 8U)/2. */
10813 if (minus_lit0 && lit0)
10814 {
10815 if (TREE_CODE (lit0) == INTEGER_CST
10816 && TREE_CODE (minus_lit0) == INTEGER_CST
10817 && tree_int_cst_lt (lit0, minus_lit0)
10818 /* But avoid ending up with only negated parts. */
10819 && (var0 || con0))
10820 {
10821 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10822 MINUS_EXPR, atype);
10823 lit0 = 0;
10824 }
10825 else
10826 {
10827 lit0 = associate_trees (loc, lit0, minus_lit0,
10828 MINUS_EXPR, atype);
10829 minus_lit0 = 0;
10830 }
10831 }
10832
10833 /* Don't introduce overflows through reassociation. */
10834 if ((lit0 && TREE_OVERFLOW_P (lit0))
10835 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0)))
10836 return NULL_TREE;
10837
10838 /* Eliminate lit0 and minus_lit0 to con0 and minus_con0. */
10839 con0 = associate_trees (loc, con0, lit0, code, atype);
10840 lit0 = 0;
10841 minus_con0 = associate_trees (loc, minus_con0, minus_lit0,
10842 code, atype);
10843 minus_lit0 = 0;
10844
10845 /* Eliminate minus_con0. */
10846 if (minus_con0)
10847 {
10848 if (con0)
10849 con0 = associate_trees (loc, con0, minus_con0,
10850 MINUS_EXPR, atype);
10851 else if (var0)
10852 var0 = associate_trees (loc, var0, minus_con0,
10853 MINUS_EXPR, atype);
10854 else
10855 gcc_unreachable ();
10856 minus_con0 = 0;
10857 }
10858
10859 /* Eliminate minus_var0. */
10860 if (minus_var0)
10861 {
10862 if (con0)
10863 con0 = associate_trees (loc, con0, minus_var0,
10864 MINUS_EXPR, atype);
10865 else
10866 gcc_unreachable ();
10867 minus_var0 = 0;
10868 }
10869
10870 return
10871 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10872 code, atype));
10873 }
10874 }
10875
10876 return NULL_TREE;
10877
10878 case POINTER_DIFF_EXPR:
10879 case MINUS_EXPR:
10880 /* Fold &a[i] - &a[j] to i-j. */
10881 if (TREE_CODE (arg0) == ADDR_EXPR
10882 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10883 && TREE_CODE (arg1) == ADDR_EXPR
10884 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10885 {
10886 tree tem = fold_addr_of_array_ref_difference (loc, type,
10887 TREE_OPERAND (arg0, 0),
10888 TREE_OPERAND (arg1, 0),
10889 code
10890 == POINTER_DIFF_EXPR);
10891 if (tem)
10892 return tem;
10893 }
10894
10895 /* Further transformations are not for pointers. */
10896 if (code == POINTER_DIFF_EXPR)
10897 return NULL_TREE;
10898
10899 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10900 if (TREE_CODE (arg0) == NEGATE_EXPR
10901 && negate_expr_p (op1)
10902 /* If arg0 is e.g. unsigned int and type is int, then this could
10903 introduce UB, because if A is INT_MIN at runtime, the original
10904 expression can be well defined while the latter is not.
10905 See PR83269. */
10906 && !(ANY_INTEGRAL_TYPE_P (type)
10907 && TYPE_OVERFLOW_UNDEFINED (type)
10908 && ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10909 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
10910 return fold_build2_loc (loc, MINUS_EXPR, type, negate_expr (op1),
10911 fold_convert_loc (loc, type,
10912 TREE_OPERAND (arg0, 0)));
10913
10914 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10915 __complex__ ( x, -y ). This is not the same for SNaNs or if
10916 signed zeros are involved. */
10917 if (!HONOR_SNANS (element_mode (arg0))
10918 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10919 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10920 {
10921 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10922 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10923 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10924 bool arg0rz = false, arg0iz = false;
10925 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10926 || (arg0i && (arg0iz = real_zerop (arg0i))))
10927 {
10928 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10929 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10930 if (arg0rz && arg1i && real_zerop (arg1i))
10931 {
10932 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10933 arg1r ? arg1r
10934 : build1 (REALPART_EXPR, rtype, arg1));
10935 tree ip = arg0i ? arg0i
10936 : build1 (IMAGPART_EXPR, rtype, arg0);
10937 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10938 }
10939 else if (arg0iz && arg1r && real_zerop (arg1r))
10940 {
10941 tree rp = arg0r ? arg0r
10942 : build1 (REALPART_EXPR, rtype, arg0);
10943 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10944 arg1i ? arg1i
10945 : build1 (IMAGPART_EXPR, rtype, arg1));
10946 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10947 }
10948 }
10949 }
10950
10951 /* A - B -> A + (-B) if B is easily negatable. */
10952 if (negate_expr_p (op1)
10953 && ! TYPE_OVERFLOW_SANITIZED (type)
10954 && ((FLOAT_TYPE_P (type)
10955 /* Avoid this transformation if B is a positive REAL_CST. */
10956 && (TREE_CODE (op1) != REAL_CST
10957 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1))))
10958 || INTEGRAL_TYPE_P (type)))
10959 return fold_build2_loc (loc, PLUS_EXPR, type,
10960 fold_convert_loc (loc, type, arg0),
10961 negate_expr (op1));
10962
10963 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10964 one. Make sure the type is not saturating and has the signedness of
10965 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10966 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10967 if ((TREE_CODE (arg0) == MULT_EXPR
10968 || TREE_CODE (arg1) == MULT_EXPR)
10969 && !TYPE_SATURATING (type)
10970 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10971 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10972 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10973 {
10974 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10975 if (tem)
10976 return tem;
10977 }
10978
10979 goto associate;
10980
10981 case MULT_EXPR:
10982 if (! FLOAT_TYPE_P (type))
10983 {
10984 /* Transform x * -C into -x * C if x is easily negatable. */
10985 if (TREE_CODE (op1) == INTEGER_CST
10986 && tree_int_cst_sgn (op1) == -1
10987 && negate_expr_p (op0)
10988 && negate_expr_p (op1)
10989 && (tem = negate_expr (op1)) != op1
10990 && ! TREE_OVERFLOW (tem))
10991 return fold_build2_loc (loc, MULT_EXPR, type,
10992 fold_convert_loc (loc, type,
10993 negate_expr (op0)), tem);
10994
10995 strict_overflow_p = false;
10996 if (TREE_CODE (arg1) == INTEGER_CST
10997 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10998 &strict_overflow_p)) != 0)
10999 {
11000 if (strict_overflow_p)
11001 fold_overflow_warning (("assuming signed overflow does not "
11002 "occur when simplifying "
11003 "multiplication"),
11004 WARN_STRICT_OVERFLOW_MISC);
11005 return fold_convert_loc (loc, type, tem);
11006 }
11007
11008 /* Optimize z * conj(z) for integer complex numbers. */
11009 if (TREE_CODE (arg0) == CONJ_EXPR
11010 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11011 return fold_mult_zconjz (loc, type, arg1);
11012 if (TREE_CODE (arg1) == CONJ_EXPR
11013 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11014 return fold_mult_zconjz (loc, type, arg0);
11015 }
11016 else
11017 {
11018 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
11019 This is not the same for NaNs or if signed zeros are
11020 involved. */
11021 if (!HONOR_NANS (arg0)
11022 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
11023 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11024 && TREE_CODE (arg1) == COMPLEX_CST
11025 && real_zerop (TREE_REALPART (arg1)))
11026 {
11027 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11028 if (real_onep (TREE_IMAGPART (arg1)))
11029 return
11030 fold_build2_loc (loc, COMPLEX_EXPR, type,
11031 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
11032 rtype, arg0)),
11033 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
11034 else if (real_minus_onep (TREE_IMAGPART (arg1)))
11035 return
11036 fold_build2_loc (loc, COMPLEX_EXPR, type,
11037 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
11038 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
11039 rtype, arg0)));
11040 }
11041
11042 /* Optimize z * conj(z) for floating point complex numbers.
11043 Guarded by flag_unsafe_math_optimizations as non-finite
11044 imaginary components don't produce scalar results. */
11045 if (flag_unsafe_math_optimizations
11046 && TREE_CODE (arg0) == CONJ_EXPR
11047 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11048 return fold_mult_zconjz (loc, type, arg1);
11049 if (flag_unsafe_math_optimizations
11050 && TREE_CODE (arg1) == CONJ_EXPR
11051 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11052 return fold_mult_zconjz (loc, type, arg0);
11053 }
11054 goto associate;
11055
11056 case BIT_IOR_EXPR:
11057 /* Canonicalize (X & C1) | C2. */
11058 if (TREE_CODE (arg0) == BIT_AND_EXPR
11059 && TREE_CODE (arg1) == INTEGER_CST
11060 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11061 {
11062 int width = TYPE_PRECISION (type), w;
11063 wide_int c1 = wi::to_wide (TREE_OPERAND (arg0, 1));
11064 wide_int c2 = wi::to_wide (arg1);
11065
11066 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11067 if ((c1 & c2) == c1)
11068 return omit_one_operand_loc (loc, type, arg1,
11069 TREE_OPERAND (arg0, 0));
11070
11071 wide_int msk = wi::mask (width, false,
11072 TYPE_PRECISION (TREE_TYPE (arg1)));
11073
11074 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11075 if (wi::bit_and_not (msk, c1 | c2) == 0)
11076 {
11077 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11078 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
11079 }
11080
11081 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11082 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11083 mode which allows further optimizations. */
11084 c1 &= msk;
11085 c2 &= msk;
11086 wide_int c3 = wi::bit_and_not (c1, c2);
11087 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
11088 {
11089 wide_int mask = wi::mask (w, false,
11090 TYPE_PRECISION (type));
11091 if (((c1 | c2) & mask) == mask
11092 && wi::bit_and_not (c1, mask) == 0)
11093 {
11094 c3 = mask;
11095 break;
11096 }
11097 }
11098
11099 if (c3 != c1)
11100 {
11101 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11102 tem = fold_build2_loc (loc, BIT_AND_EXPR, type, tem,
11103 wide_int_to_tree (type, c3));
11104 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
11105 }
11106 }
11107
11108 /* See if this can be simplified into a rotate first. If that
11109 is unsuccessful continue in the association code. */
11110 goto bit_rotate;
11111
11112 case BIT_XOR_EXPR:
11113 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11114 if (TREE_CODE (arg0) == BIT_AND_EXPR
11115 && INTEGRAL_TYPE_P (type)
11116 && integer_onep (TREE_OPERAND (arg0, 1))
11117 && integer_onep (arg1))
11118 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11119 build_zero_cst (TREE_TYPE (arg0)));
11120
11121 /* See if this can be simplified into a rotate first. If that
11122 is unsuccessful continue in the association code. */
11123 goto bit_rotate;
11124
11125 case BIT_AND_EXPR:
11126 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11127 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11128 && INTEGRAL_TYPE_P (type)
11129 && integer_onep (TREE_OPERAND (arg0, 1))
11130 && integer_onep (arg1))
11131 {
11132 tree tem2;
11133 tem = TREE_OPERAND (arg0, 0);
11134 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11135 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11136 tem, tem2);
11137 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11138 build_zero_cst (TREE_TYPE (tem)));
11139 }
11140 /* Fold ~X & 1 as (X & 1) == 0. */
11141 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11142 && INTEGRAL_TYPE_P (type)
11143 && integer_onep (arg1))
11144 {
11145 tree tem2;
11146 tem = TREE_OPERAND (arg0, 0);
11147 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11148 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11149 tem, tem2);
11150 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11151 build_zero_cst (TREE_TYPE (tem)));
11152 }
11153 /* Fold !X & 1 as X == 0. */
11154 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11155 && integer_onep (arg1))
11156 {
11157 tem = TREE_OPERAND (arg0, 0);
11158 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11159 build_zero_cst (TREE_TYPE (tem)));
11160 }
11161
11162 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11163 multiple of 1 << CST. */
11164 if (TREE_CODE (arg1) == INTEGER_CST)
11165 {
11166 wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
11167 wide_int ncst1 = -cst1;
11168 if ((cst1 & ncst1) == ncst1
11169 && multiple_of_p (type, arg0,
11170 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
11171 return fold_convert_loc (loc, type, arg0);
11172 }
11173
11174 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11175 bits from CST2. */
11176 if (TREE_CODE (arg1) == INTEGER_CST
11177 && TREE_CODE (arg0) == MULT_EXPR
11178 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11179 {
11180 wi::tree_to_wide_ref warg1 = wi::to_wide (arg1);
11181 wide_int masked
11182 = mask_with_tz (type, warg1, wi::to_wide (TREE_OPERAND (arg0, 1)));
11183
11184 if (masked == 0)
11185 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11186 arg0, arg1);
11187 else if (masked != warg1)
11188 {
11189 /* Avoid the transform if arg1 is a mask of some
11190 mode which allows further optimizations. */
11191 int pop = wi::popcount (warg1);
11192 if (!(pop >= BITS_PER_UNIT
11193 && pow2p_hwi (pop)
11194 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
11195 return fold_build2_loc (loc, code, type, op0,
11196 wide_int_to_tree (type, masked));
11197 }
11198 }
11199
11200 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11201 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11202 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11203 {
11204 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11205
11206 wide_int mask = wide_int::from (wi::to_wide (arg1), prec, UNSIGNED);
11207 if (mask == -1)
11208 return
11209 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11210 }
11211
11212 goto associate;
11213
11214 case RDIV_EXPR:
11215 /* Don't touch a floating-point divide by zero unless the mode
11216 of the constant can represent infinity. */
11217 if (TREE_CODE (arg1) == REAL_CST
11218 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11219 && real_zerop (arg1))
11220 return NULL_TREE;
11221
11222 /* (-A) / (-B) -> A / B */
11223 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11224 return fold_build2_loc (loc, RDIV_EXPR, type,
11225 TREE_OPERAND (arg0, 0),
11226 negate_expr (arg1));
11227 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11228 return fold_build2_loc (loc, RDIV_EXPR, type,
11229 negate_expr (arg0),
11230 TREE_OPERAND (arg1, 0));
11231 return NULL_TREE;
11232
11233 case TRUNC_DIV_EXPR:
11234 /* Fall through */
11235
11236 case FLOOR_DIV_EXPR:
11237 /* Simplify A / (B << N) where A and B are positive and B is
11238 a power of 2, to A >> (N + log2(B)). */
11239 strict_overflow_p = false;
11240 if (TREE_CODE (arg1) == LSHIFT_EXPR
11241 && (TYPE_UNSIGNED (type)
11242 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11243 {
11244 tree sval = TREE_OPERAND (arg1, 0);
11245 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11246 {
11247 tree sh_cnt = TREE_OPERAND (arg1, 1);
11248 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
11249 wi::exact_log2 (wi::to_wide (sval)));
11250
11251 if (strict_overflow_p)
11252 fold_overflow_warning (("assuming signed overflow does not "
11253 "occur when simplifying A / (B << N)"),
11254 WARN_STRICT_OVERFLOW_MISC);
11255
11256 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11257 sh_cnt, pow2);
11258 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11259 fold_convert_loc (loc, type, arg0), sh_cnt);
11260 }
11261 }
11262
11263 /* Fall through */
11264
11265 case ROUND_DIV_EXPR:
11266 case CEIL_DIV_EXPR:
11267 case EXACT_DIV_EXPR:
11268 if (integer_zerop (arg1))
11269 return NULL_TREE;
11270
11271 /* Convert -A / -B to A / B when the type is signed and overflow is
11272 undefined. */
11273 if ((!ANY_INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11274 && TREE_CODE (op0) == NEGATE_EXPR
11275 && negate_expr_p (op1))
11276 {
11277 if (ANY_INTEGRAL_TYPE_P (type))
11278 fold_overflow_warning (("assuming signed overflow does not occur "
11279 "when distributing negation across "
11280 "division"),
11281 WARN_STRICT_OVERFLOW_MISC);
11282 return fold_build2_loc (loc, code, type,
11283 fold_convert_loc (loc, type,
11284 TREE_OPERAND (arg0, 0)),
11285 negate_expr (op1));
11286 }
11287 if ((!ANY_INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11288 && TREE_CODE (arg1) == NEGATE_EXPR
11289 && negate_expr_p (op0))
11290 {
11291 if (ANY_INTEGRAL_TYPE_P (type))
11292 fold_overflow_warning (("assuming signed overflow does not occur "
11293 "when distributing negation across "
11294 "division"),
11295 WARN_STRICT_OVERFLOW_MISC);
11296 return fold_build2_loc (loc, code, type,
11297 negate_expr (op0),
11298 fold_convert_loc (loc, type,
11299 TREE_OPERAND (arg1, 0)));
11300 }
11301
11302 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11303 operation, EXACT_DIV_EXPR.
11304
11305 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11306 At one time others generated faster code, it's not clear if they do
11307 after the last round to changes to the DIV code in expmed.c. */
11308 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11309 && multiple_of_p (type, arg0, arg1))
11310 return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
11311 fold_convert (type, arg0),
11312 fold_convert (type, arg1));
11313
11314 strict_overflow_p = false;
11315 if (TREE_CODE (arg1) == INTEGER_CST
11316 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11317 &strict_overflow_p)) != 0)
11318 {
11319 if (strict_overflow_p)
11320 fold_overflow_warning (("assuming signed overflow does not occur "
11321 "when simplifying division"),
11322 WARN_STRICT_OVERFLOW_MISC);
11323 return fold_convert_loc (loc, type, tem);
11324 }
11325
11326 return NULL_TREE;
11327
11328 case CEIL_MOD_EXPR:
11329 case FLOOR_MOD_EXPR:
11330 case ROUND_MOD_EXPR:
11331 case TRUNC_MOD_EXPR:
11332 strict_overflow_p = false;
11333 if (TREE_CODE (arg1) == INTEGER_CST
11334 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11335 &strict_overflow_p)) != 0)
11336 {
11337 if (strict_overflow_p)
11338 fold_overflow_warning (("assuming signed overflow does not occur "
11339 "when simplifying modulus"),
11340 WARN_STRICT_OVERFLOW_MISC);
11341 return fold_convert_loc (loc, type, tem);
11342 }
11343
11344 return NULL_TREE;
11345
11346 case LROTATE_EXPR:
11347 case RROTATE_EXPR:
11348 case RSHIFT_EXPR:
11349 case LSHIFT_EXPR:
11350 /* Since negative shift count is not well-defined,
11351 don't try to compute it in the compiler. */
11352 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11353 return NULL_TREE;
11354
11355 prec = element_precision (type);
11356
11357 /* If we have a rotate of a bit operation with the rotate count and
11358 the second operand of the bit operation both constant,
11359 permute the two operations. */
11360 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11361 && (TREE_CODE (arg0) == BIT_AND_EXPR
11362 || TREE_CODE (arg0) == BIT_IOR_EXPR
11363 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11364 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11365 {
11366 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11367 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11368 return fold_build2_loc (loc, TREE_CODE (arg0), type,
11369 fold_build2_loc (loc, code, type,
11370 arg00, arg1),
11371 fold_build2_loc (loc, code, type,
11372 arg01, arg1));
11373 }
11374
11375 /* Two consecutive rotates adding up to the some integer
11376 multiple of the precision of the type can be ignored. */
11377 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11378 && TREE_CODE (arg0) == RROTATE_EXPR
11379 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11380 && wi::umod_trunc (wi::to_wide (arg1)
11381 + wi::to_wide (TREE_OPERAND (arg0, 1)),
11382 prec) == 0)
11383 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11384
11385 return NULL_TREE;
11386
11387 case MIN_EXPR:
11388 case MAX_EXPR:
11389 goto associate;
11390
11391 case TRUTH_ANDIF_EXPR:
11392 /* Note that the operands of this must be ints
11393 and their values must be 0 or 1.
11394 ("true" is a fixed value perhaps depending on the language.) */
11395 /* If first arg is constant zero, return it. */
11396 if (integer_zerop (arg0))
11397 return fold_convert_loc (loc, type, arg0);
11398 /* FALLTHRU */
11399 case TRUTH_AND_EXPR:
11400 /* If either arg is constant true, drop it. */
11401 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11402 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
11403 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
11404 /* Preserve sequence points. */
11405 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11406 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11407 /* If second arg is constant zero, result is zero, but first arg
11408 must be evaluated. */
11409 if (integer_zerop (arg1))
11410 return omit_one_operand_loc (loc, type, arg1, arg0);
11411 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11412 case will be handled here. */
11413 if (integer_zerop (arg0))
11414 return omit_one_operand_loc (loc, type, arg0, arg1);
11415
11416 /* !X && X is always false. */
11417 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11418 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11419 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11420 /* X && !X is always false. */
11421 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11422 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11423 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11424
11425 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
11426 means A >= Y && A != MAX, but in this case we know that
11427 A < X <= MAX. */
11428
11429 if (!TREE_SIDE_EFFECTS (arg0)
11430 && !TREE_SIDE_EFFECTS (arg1))
11431 {
11432 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
11433 if (tem && !operand_equal_p (tem, arg0, 0))
11434 return fold_build2_loc (loc, code, type, tem, arg1);
11435
11436 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
11437 if (tem && !operand_equal_p (tem, arg1, 0))
11438 return fold_build2_loc (loc, code, type, arg0, tem);
11439 }
11440
11441 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
11442 != NULL_TREE)
11443 return tem;
11444
11445 return NULL_TREE;
11446
11447 case TRUTH_ORIF_EXPR:
11448 /* Note that the operands of this must be ints
11449 and their values must be 0 or true.
11450 ("true" is a fixed value perhaps depending on the language.) */
11451 /* If first arg is constant true, return it. */
11452 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11453 return fold_convert_loc (loc, type, arg0);
11454 /* FALLTHRU */
11455 case TRUTH_OR_EXPR:
11456 /* If either arg is constant zero, drop it. */
11457 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
11458 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
11459 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
11460 /* Preserve sequence points. */
11461 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11462 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11463 /* If second arg is constant true, result is true, but we must
11464 evaluate first arg. */
11465 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
11466 return omit_one_operand_loc (loc, type, arg1, arg0);
11467 /* Likewise for first arg, but note this only occurs here for
11468 TRUTH_OR_EXPR. */
11469 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11470 return omit_one_operand_loc (loc, type, arg0, arg1);
11471
11472 /* !X || X is always true. */
11473 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11474 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11475 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
11476 /* X || !X is always true. */
11477 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11478 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11479 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
11480
11481 /* (X && !Y) || (!X && Y) is X ^ Y */
11482 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
11483 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
11484 {
11485 tree a0, a1, l0, l1, n0, n1;
11486
11487 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11488 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11489
11490 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11491 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11492
11493 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
11494 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
11495
11496 if ((operand_equal_p (n0, a0, 0)
11497 && operand_equal_p (n1, a1, 0))
11498 || (operand_equal_p (n0, a1, 0)
11499 && operand_equal_p (n1, a0, 0)))
11500 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
11501 }
11502
11503 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
11504 != NULL_TREE)
11505 return tem;
11506
11507 return NULL_TREE;
11508
11509 case TRUTH_XOR_EXPR:
11510 /* If the second arg is constant zero, drop it. */
11511 if (integer_zerop (arg1))
11512 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11513 /* If the second arg is constant true, this is a logical inversion. */
11514 if (integer_onep (arg1))
11515 {
11516 tem = invert_truthvalue_loc (loc, arg0);
11517 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
11518 }
11519 /* Identical arguments cancel to zero. */
11520 if (operand_equal_p (arg0, arg1, 0))
11521 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11522
11523 /* !X ^ X is always true. */
11524 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11525 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11526 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
11527
11528 /* X ^ !X is always true. */
11529 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11530 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11531 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
11532
11533 return NULL_TREE;
11534
11535 case EQ_EXPR:
11536 case NE_EXPR:
11537 STRIP_NOPS (arg0);
11538 STRIP_NOPS (arg1);
11539
11540 tem = fold_comparison (loc, code, type, op0, op1);
11541 if (tem != NULL_TREE)
11542 return tem;
11543
11544 /* bool_var != 1 becomes !bool_var. */
11545 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11546 && code == NE_EXPR)
11547 return fold_convert_loc (loc, type,
11548 fold_build1_loc (loc, TRUTH_NOT_EXPR,
11549 TREE_TYPE (arg0), arg0));
11550
11551 /* bool_var == 0 becomes !bool_var. */
11552 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11553 && code == EQ_EXPR)
11554 return fold_convert_loc (loc, type,
11555 fold_build1_loc (loc, TRUTH_NOT_EXPR,
11556 TREE_TYPE (arg0), arg0));
11557
11558 /* !exp != 0 becomes !exp */
11559 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
11560 && code == NE_EXPR)
11561 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11562
11563 /* If this is an EQ or NE comparison with zero and ARG0 is
11564 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
11565 two operations, but the latter can be done in one less insn
11566 on machines that have only two-operand insns or on which a
11567 constant cannot be the first operand. */
11568 if (TREE_CODE (arg0) == BIT_AND_EXPR
11569 && integer_zerop (arg1))
11570 {
11571 tree arg00 = TREE_OPERAND (arg0, 0);
11572 tree arg01 = TREE_OPERAND (arg0, 1);
11573 if (TREE_CODE (arg00) == LSHIFT_EXPR
11574 && integer_onep (TREE_OPERAND (arg00, 0)))
11575 {
11576 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
11577 arg01, TREE_OPERAND (arg00, 1));
11578 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
11579 build_int_cst (TREE_TYPE (arg0), 1));
11580 return fold_build2_loc (loc, code, type,
11581 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
11582 arg1);
11583 }
11584 else if (TREE_CODE (arg01) == LSHIFT_EXPR
11585 && integer_onep (TREE_OPERAND (arg01, 0)))
11586 {
11587 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
11588 arg00, TREE_OPERAND (arg01, 1));
11589 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
11590 build_int_cst (TREE_TYPE (arg0), 1));
11591 return fold_build2_loc (loc, code, type,
11592 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
11593 arg1);
11594 }
11595 }
11596
11597 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
11598 C1 is a valid shift constant, and C2 is a power of two, i.e.
11599 a single bit. */
11600 if (TREE_CODE (arg0) == BIT_AND_EXPR
11601 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
11602 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
11603 == INTEGER_CST
11604 && integer_pow2p (TREE_OPERAND (arg0, 1))
11605 && integer_zerop (arg1))
11606 {
11607 tree itype = TREE_TYPE (arg0);
11608 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
11609 prec = TYPE_PRECISION (itype);
11610
11611 /* Check for a valid shift count. */
11612 if (wi::ltu_p (wi::to_wide (arg001), prec))
11613 {
11614 tree arg01 = TREE_OPERAND (arg0, 1);
11615 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11616 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
11617 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
11618 can be rewritten as (X & (C2 << C1)) != 0. */
11619 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
11620 {
11621 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
11622 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
11623 return fold_build2_loc (loc, code, type, tem,
11624 fold_convert_loc (loc, itype, arg1));
11625 }
11626 /* Otherwise, for signed (arithmetic) shifts,
11627 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
11628 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
11629 else if (!TYPE_UNSIGNED (itype))
11630 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
11631 arg000, build_int_cst (itype, 0));
11632 /* Otherwise, of unsigned (logical) shifts,
11633 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
11634 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
11635 else
11636 return omit_one_operand_loc (loc, type,
11637 code == EQ_EXPR ? integer_one_node
11638 : integer_zero_node,
11639 arg000);
11640 }
11641 }
11642
11643 /* If this is a comparison of a field, we may be able to simplify it. */
11644 if ((TREE_CODE (arg0) == COMPONENT_REF
11645 || TREE_CODE (arg0) == BIT_FIELD_REF)
11646 /* Handle the constant case even without -O
11647 to make sure the warnings are given. */
11648 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
11649 {
11650 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
11651 if (t1)
11652 return t1;
11653 }
11654
11655 /* Optimize comparisons of strlen vs zero to a compare of the
11656 first character of the string vs zero. To wit,
11657 strlen(ptr) == 0 => *ptr == 0
11658 strlen(ptr) != 0 => *ptr != 0
11659 Other cases should reduce to one of these two (or a constant)
11660 due to the return value of strlen being unsigned. */
11661 if (TREE_CODE (arg0) == CALL_EXPR && integer_zerop (arg1))
11662 {
11663 tree fndecl = get_callee_fndecl (arg0);
11664
11665 if (fndecl
11666 && fndecl_built_in_p (fndecl, BUILT_IN_STRLEN)
11667 && call_expr_nargs (arg0) == 1
11668 && (TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0)))
11669 == POINTER_TYPE))
11670 {
11671 tree ptrtype
11672 = build_pointer_type (build_qualified_type (char_type_node,
11673 TYPE_QUAL_CONST));
11674 tree ptr = fold_convert_loc (loc, ptrtype,
11675 CALL_EXPR_ARG (arg0, 0));
11676 tree iref = build_fold_indirect_ref_loc (loc, ptr);
11677 return fold_build2_loc (loc, code, type, iref,
11678 build_int_cst (TREE_TYPE (iref), 0));
11679 }
11680 }
11681
11682 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
11683 of X. Similarly fold (X >> C) == 0 into X >= 0. */
11684 if (TREE_CODE (arg0) == RSHIFT_EXPR
11685 && integer_zerop (arg1)
11686 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11687 {
11688 tree arg00 = TREE_OPERAND (arg0, 0);
11689 tree arg01 = TREE_OPERAND (arg0, 1);
11690 tree itype = TREE_TYPE (arg00);
11691 if (wi::to_wide (arg01) == element_precision (itype) - 1)
11692 {
11693 if (TYPE_UNSIGNED (itype))
11694 {
11695 itype = signed_type_for (itype);
11696 arg00 = fold_convert_loc (loc, itype, arg00);
11697 }
11698 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
11699 type, arg00, build_zero_cst (itype));
11700 }
11701 }
11702
11703 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
11704 (X & C) == 0 when C is a single bit. */
11705 if (TREE_CODE (arg0) == BIT_AND_EXPR
11706 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
11707 && integer_zerop (arg1)
11708 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11709 {
11710 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
11711 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
11712 TREE_OPERAND (arg0, 1));
11713 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
11714 type, tem,
11715 fold_convert_loc (loc, TREE_TYPE (arg0),
11716 arg1));
11717 }
11718
11719 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
11720 constant C is a power of two, i.e. a single bit. */
11721 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11722 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11723 && integer_zerop (arg1)
11724 && integer_pow2p (TREE_OPERAND (arg0, 1))
11725 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11726 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11727 {
11728 tree arg00 = TREE_OPERAND (arg0, 0);
11729 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11730 arg00, build_int_cst (TREE_TYPE (arg00), 0));
11731 }
11732
11733 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
11734 when is C is a power of two, i.e. a single bit. */
11735 if (TREE_CODE (arg0) == BIT_AND_EXPR
11736 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
11737 && integer_zerop (arg1)
11738 && integer_pow2p (TREE_OPERAND (arg0, 1))
11739 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11740 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11741 {
11742 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11743 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
11744 arg000, TREE_OPERAND (arg0, 1));
11745 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11746 tem, build_int_cst (TREE_TYPE (tem), 0));
11747 }
11748
11749 if (integer_zerop (arg1)
11750 && tree_expr_nonzero_p (arg0))
11751 {
11752 tree res = constant_boolean_node (code==NE_EXPR, type);
11753 return omit_one_operand_loc (loc, type, res, arg0);
11754 }
11755
11756 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11757 && TREE_CODE (arg1) == BIT_XOR_EXPR)
11758 {
11759 tree arg00 = TREE_OPERAND (arg0, 0);
11760 tree arg01 = TREE_OPERAND (arg0, 1);
11761 tree arg10 = TREE_OPERAND (arg1, 0);
11762 tree arg11 = TREE_OPERAND (arg1, 1);
11763 tree itype = TREE_TYPE (arg0);
11764
11765 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
11766 operand_equal_p guarantees no side-effects so we don't need
11767 to use omit_one_operand on Z. */
11768 if (operand_equal_p (arg01, arg11, 0))
11769 return fold_build2_loc (loc, code, type, arg00,
11770 fold_convert_loc (loc, TREE_TYPE (arg00),
11771 arg10));
11772 if (operand_equal_p (arg01, arg10, 0))
11773 return fold_build2_loc (loc, code, type, arg00,
11774 fold_convert_loc (loc, TREE_TYPE (arg00),
11775 arg11));
11776 if (operand_equal_p (arg00, arg11, 0))
11777 return fold_build2_loc (loc, code, type, arg01,
11778 fold_convert_loc (loc, TREE_TYPE (arg01),
11779 arg10));
11780 if (operand_equal_p (arg00, arg10, 0))
11781 return fold_build2_loc (loc, code, type, arg01,
11782 fold_convert_loc (loc, TREE_TYPE (arg01),
11783 arg11));
11784
11785 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
11786 if (TREE_CODE (arg01) == INTEGER_CST
11787 && TREE_CODE (arg11) == INTEGER_CST)
11788 {
11789 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
11790 fold_convert_loc (loc, itype, arg11));
11791 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
11792 return fold_build2_loc (loc, code, type, tem,
11793 fold_convert_loc (loc, itype, arg10));
11794 }
11795 }
11796
11797 /* Attempt to simplify equality/inequality comparisons of complex
11798 values. Only lower the comparison if the result is known or
11799 can be simplified to a single scalar comparison. */
11800 if ((TREE_CODE (arg0) == COMPLEX_EXPR
11801 || TREE_CODE (arg0) == COMPLEX_CST)
11802 && (TREE_CODE (arg1) == COMPLEX_EXPR
11803 || TREE_CODE (arg1) == COMPLEX_CST))
11804 {
11805 tree real0, imag0, real1, imag1;
11806 tree rcond, icond;
11807
11808 if (TREE_CODE (arg0) == COMPLEX_EXPR)
11809 {
11810 real0 = TREE_OPERAND (arg0, 0);
11811 imag0 = TREE_OPERAND (arg0, 1);
11812 }
11813 else
11814 {
11815 real0 = TREE_REALPART (arg0);
11816 imag0 = TREE_IMAGPART (arg0);
11817 }
11818
11819 if (TREE_CODE (arg1) == COMPLEX_EXPR)
11820 {
11821 real1 = TREE_OPERAND (arg1, 0);
11822 imag1 = TREE_OPERAND (arg1, 1);
11823 }
11824 else
11825 {
11826 real1 = TREE_REALPART (arg1);
11827 imag1 = TREE_IMAGPART (arg1);
11828 }
11829
11830 rcond = fold_binary_loc (loc, code, type, real0, real1);
11831 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
11832 {
11833 if (integer_zerop (rcond))
11834 {
11835 if (code == EQ_EXPR)
11836 return omit_two_operands_loc (loc, type, boolean_false_node,
11837 imag0, imag1);
11838 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
11839 }
11840 else
11841 {
11842 if (code == NE_EXPR)
11843 return omit_two_operands_loc (loc, type, boolean_true_node,
11844 imag0, imag1);
11845 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
11846 }
11847 }
11848
11849 icond = fold_binary_loc (loc, code, type, imag0, imag1);
11850 if (icond && TREE_CODE (icond) == INTEGER_CST)
11851 {
11852 if (integer_zerop (icond))
11853 {
11854 if (code == EQ_EXPR)
11855 return omit_two_operands_loc (loc, type, boolean_false_node,
11856 real0, real1);
11857 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
11858 }
11859 else
11860 {
11861 if (code == NE_EXPR)
11862 return omit_two_operands_loc (loc, type, boolean_true_node,
11863 real0, real1);
11864 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
11865 }
11866 }
11867 }
11868
11869 return NULL_TREE;
11870
11871 case LT_EXPR:
11872 case GT_EXPR:
11873 case LE_EXPR:
11874 case GE_EXPR:
11875 tem = fold_comparison (loc, code, type, op0, op1);
11876 if (tem != NULL_TREE)
11877 return tem;
11878
11879 /* Transform comparisons of the form X +- C CMP X. */
11880 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11881 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11882 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
11883 && !HONOR_SNANS (arg0))
11884 {
11885 tree arg01 = TREE_OPERAND (arg0, 1);
11886 enum tree_code code0 = TREE_CODE (arg0);
11887 int is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
11888
11889 /* (X - c) > X becomes false. */
11890 if (code == GT_EXPR
11891 && ((code0 == MINUS_EXPR && is_positive >= 0)
11892 || (code0 == PLUS_EXPR && is_positive <= 0)))
11893 return constant_boolean_node (0, type);
11894
11895 /* Likewise (X + c) < X becomes false. */
11896 if (code == LT_EXPR
11897 && ((code0 == PLUS_EXPR && is_positive >= 0)
11898 || (code0 == MINUS_EXPR && is_positive <= 0)))
11899 return constant_boolean_node (0, type);
11900
11901 /* Convert (X - c) <= X to true. */
11902 if (!HONOR_NANS (arg1)
11903 && code == LE_EXPR
11904 && ((code0 == MINUS_EXPR && is_positive >= 0)
11905 || (code0 == PLUS_EXPR && is_positive <= 0)))
11906 return constant_boolean_node (1, type);
11907
11908 /* Convert (X + c) >= X to true. */
11909 if (!HONOR_NANS (arg1)
11910 && code == GE_EXPR
11911 && ((code0 == PLUS_EXPR && is_positive >= 0)
11912 || (code0 == MINUS_EXPR && is_positive <= 0)))
11913 return constant_boolean_node (1, type);
11914 }
11915
11916 /* If we are comparing an ABS_EXPR with a constant, we can
11917 convert all the cases into explicit comparisons, but they may
11918 well not be faster than doing the ABS and one comparison.
11919 But ABS (X) <= C is a range comparison, which becomes a subtraction
11920 and a comparison, and is probably faster. */
11921 if (code == LE_EXPR
11922 && TREE_CODE (arg1) == INTEGER_CST
11923 && TREE_CODE (arg0) == ABS_EXPR
11924 && ! TREE_SIDE_EFFECTS (arg0)
11925 && (tem = negate_expr (arg1)) != 0
11926 && TREE_CODE (tem) == INTEGER_CST
11927 && !TREE_OVERFLOW (tem))
11928 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
11929 build2 (GE_EXPR, type,
11930 TREE_OPERAND (arg0, 0), tem),
11931 build2 (LE_EXPR, type,
11932 TREE_OPERAND (arg0, 0), arg1));
11933
11934 /* Convert ABS_EXPR<x> >= 0 to true. */
11935 strict_overflow_p = false;
11936 if (code == GE_EXPR
11937 && (integer_zerop (arg1)
11938 || (! HONOR_NANS (arg0)
11939 && real_zerop (arg1)))
11940 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11941 {
11942 if (strict_overflow_p)
11943 fold_overflow_warning (("assuming signed overflow does not occur "
11944 "when simplifying comparison of "
11945 "absolute value and zero"),
11946 WARN_STRICT_OVERFLOW_CONDITIONAL);
11947 return omit_one_operand_loc (loc, type,
11948 constant_boolean_node (true, type),
11949 arg0);
11950 }
11951
11952 /* Convert ABS_EXPR<x> < 0 to false. */
11953 strict_overflow_p = false;
11954 if (code == LT_EXPR
11955 && (integer_zerop (arg1) || real_zerop (arg1))
11956 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11957 {
11958 if (strict_overflow_p)
11959 fold_overflow_warning (("assuming signed overflow does not occur "
11960 "when simplifying comparison of "
11961 "absolute value and zero"),
11962 WARN_STRICT_OVERFLOW_CONDITIONAL);
11963 return omit_one_operand_loc (loc, type,
11964 constant_boolean_node (false, type),
11965 arg0);
11966 }
11967
11968 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11969 and similarly for >= into !=. */
11970 if ((code == LT_EXPR || code == GE_EXPR)
11971 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11972 && TREE_CODE (arg1) == LSHIFT_EXPR
11973 && integer_onep (TREE_OPERAND (arg1, 0)))
11974 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11975 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11976 TREE_OPERAND (arg1, 1)),
11977 build_zero_cst (TREE_TYPE (arg0)));
11978
11979 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
11980 otherwise Y might be >= # of bits in X's type and thus e.g.
11981 (unsigned char) (1 << Y) for Y 15 might be 0.
11982 If the cast is widening, then 1 << Y should have unsigned type,
11983 otherwise if Y is number of bits in the signed shift type minus 1,
11984 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
11985 31 might be 0xffffffff80000000. */
11986 if ((code == LT_EXPR || code == GE_EXPR)
11987 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11988 && CONVERT_EXPR_P (arg1)
11989 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
11990 && (element_precision (TREE_TYPE (arg1))
11991 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
11992 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
11993 || (element_precision (TREE_TYPE (arg1))
11994 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
11995 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11996 {
11997 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11998 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
11999 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12000 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
12001 build_zero_cst (TREE_TYPE (arg0)));
12002 }
12003
12004 return NULL_TREE;
12005
12006 case UNORDERED_EXPR:
12007 case ORDERED_EXPR:
12008 case UNLT_EXPR:
12009 case UNLE_EXPR:
12010 case UNGT_EXPR:
12011 case UNGE_EXPR:
12012 case UNEQ_EXPR:
12013 case LTGT_EXPR:
12014 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
12015 {
12016 tree targ0 = strip_float_extensions (arg0);
12017 tree targ1 = strip_float_extensions (arg1);
12018 tree newtype = TREE_TYPE (targ0);
12019
12020 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
12021 newtype = TREE_TYPE (targ1);
12022
12023 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
12024 return fold_build2_loc (loc, code, type,
12025 fold_convert_loc (loc, newtype, targ0),
12026 fold_convert_loc (loc, newtype, targ1));
12027 }
12028
12029 return NULL_TREE;
12030
12031 case COMPOUND_EXPR:
12032 /* When pedantic, a compound expression can be neither an lvalue
12033 nor an integer constant expression. */
12034 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
12035 return NULL_TREE;
12036 /* Don't let (0, 0) be null pointer constant. */
12037 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
12038 : fold_convert_loc (loc, type, arg1);
12039 return pedantic_non_lvalue_loc (loc, tem);
12040
12041 case ASSERT_EXPR:
12042 /* An ASSERT_EXPR should never be passed to fold_binary. */
12043 gcc_unreachable ();
12044
12045 default:
12046 return NULL_TREE;
12047 } /* switch (code) */
12048 }
12049
12050 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
12051 ((A & N) + B) & M -> (A + B) & M
12052 Similarly if (N & M) == 0,
12053 ((A | N) + B) & M -> (A + B) & M
12054 and for - instead of + (or unary - instead of +)
12055 and/or ^ instead of |.
12056 If B is constant and (B & M) == 0, fold into A & M.
12057
12058 This function is a helper for match.pd patterns. Return non-NULL
12059 type in which the simplified operation should be performed only
12060 if any optimization is possible.
12061
12062 ARG1 is M above, ARG00 is left operand of +/-, if CODE00 is BIT_*_EXPR,
12063 then ARG00{0,1} are operands of that bitop, otherwise CODE00 is ERROR_MARK.
12064 Similarly for ARG01, CODE01 and ARG01{0,1}, just for the right operand of
12065 +/-. */
12066 tree
12067 fold_bit_and_mask (tree type, tree arg1, enum tree_code code,
12068 tree arg00, enum tree_code code00, tree arg000, tree arg001,
12069 tree arg01, enum tree_code code01, tree arg010, tree arg011,
12070 tree *pmop)
12071 {
12072 gcc_assert (TREE_CODE (arg1) == INTEGER_CST);
12073 gcc_assert (code == PLUS_EXPR || code == MINUS_EXPR || code == NEGATE_EXPR);
12074 wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
12075 if (~cst1 == 0
12076 || (cst1 & (cst1 + 1)) != 0
12077 || !INTEGRAL_TYPE_P (type)
12078 || (!TYPE_OVERFLOW_WRAPS (type)
12079 && TREE_CODE (type) != INTEGER_TYPE)
12080 || (wi::max_value (type) & cst1) != cst1)
12081 return NULL_TREE;
12082
12083 enum tree_code codes[2] = { code00, code01 };
12084 tree arg0xx[4] = { arg000, arg001, arg010, arg011 };
12085 int which = 0;
12086 wide_int cst0;
12087
12088 /* Now we know that arg0 is (C + D) or (C - D) or -C and
12089 arg1 (M) is == (1LL << cst) - 1.
12090 Store C into PMOP[0] and D into PMOP[1]. */
12091 pmop[0] = arg00;
12092 pmop[1] = arg01;
12093 which = code != NEGATE_EXPR;
12094
12095 for (; which >= 0; which--)
12096 switch (codes[which])
12097 {
12098 case BIT_AND_EXPR:
12099 case BIT_IOR_EXPR:
12100 case BIT_XOR_EXPR:
12101 gcc_assert (TREE_CODE (arg0xx[2 * which + 1]) == INTEGER_CST);
12102 cst0 = wi::to_wide (arg0xx[2 * which + 1]) & cst1;
12103 if (codes[which] == BIT_AND_EXPR)
12104 {
12105 if (cst0 != cst1)
12106 break;
12107 }
12108 else if (cst0 != 0)
12109 break;
12110 /* If C or D is of the form (A & N) where
12111 (N & M) == M, or of the form (A | N) or
12112 (A ^ N) where (N & M) == 0, replace it with A. */
12113 pmop[which] = arg0xx[2 * which];
12114 break;
12115 case ERROR_MARK:
12116 if (TREE_CODE (pmop[which]) != INTEGER_CST)
12117 break;
12118 /* If C or D is a N where (N & M) == 0, it can be
12119 omitted (replaced with 0). */
12120 if ((code == PLUS_EXPR
12121 || (code == MINUS_EXPR && which == 0))
12122 && (cst1 & wi::to_wide (pmop[which])) == 0)
12123 pmop[which] = build_int_cst (type, 0);
12124 /* Similarly, with C - N where (-N & M) == 0. */
12125 if (code == MINUS_EXPR
12126 && which == 1
12127 && (cst1 & -wi::to_wide (pmop[which])) == 0)
12128 pmop[which] = build_int_cst (type, 0);
12129 break;
12130 default:
12131 gcc_unreachable ();
12132 }
12133
12134 /* Only build anything new if we optimized one or both arguments above. */
12135 if (pmop[0] == arg00 && pmop[1] == arg01)
12136 return NULL_TREE;
12137
12138 if (TYPE_OVERFLOW_WRAPS (type))
12139 return type;
12140 else
12141 return unsigned_type_for (type);
12142 }
12143
12144 /* Used by contains_label_[p1]. */
12145
12146 struct contains_label_data
12147 {
12148 hash_set<tree> *pset;
12149 bool inside_switch_p;
12150 };
12151
12152 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
12153 a LABEL_EXPR or CASE_LABEL_EXPR not inside of another SWITCH_EXPR; otherwise
12154 return NULL_TREE. Do not check the subtrees of GOTO_EXPR. */
12155
12156 static tree
12157 contains_label_1 (tree *tp, int *walk_subtrees, void *data)
12158 {
12159 contains_label_data *d = (contains_label_data *) data;
12160 switch (TREE_CODE (*tp))
12161 {
12162 case LABEL_EXPR:
12163 return *tp;
12164
12165 case CASE_LABEL_EXPR:
12166 if (!d->inside_switch_p)
12167 return *tp;
12168 return NULL_TREE;
12169
12170 case SWITCH_EXPR:
12171 if (!d->inside_switch_p)
12172 {
12173 if (walk_tree (&SWITCH_COND (*tp), contains_label_1, data, d->pset))
12174 return *tp;
12175 d->inside_switch_p = true;
12176 if (walk_tree (&SWITCH_BODY (*tp), contains_label_1, data, d->pset))
12177 return *tp;
12178 d->inside_switch_p = false;
12179 *walk_subtrees = 0;
12180 }
12181 return NULL_TREE;
12182
12183 case GOTO_EXPR:
12184 *walk_subtrees = 0;
12185 return NULL_TREE;
12186
12187 default:
12188 return NULL_TREE;
12189 }
12190 }
12191
12192 /* Return whether the sub-tree ST contains a label which is accessible from
12193 outside the sub-tree. */
12194
12195 static bool
12196 contains_label_p (tree st)
12197 {
12198 hash_set<tree> pset;
12199 contains_label_data data = { &pset, false };
12200 return walk_tree (&st, contains_label_1, &data, &pset) != NULL_TREE;
12201 }
12202
12203 /* Fold a ternary expression of code CODE and type TYPE with operands
12204 OP0, OP1, and OP2. Return the folded expression if folding is
12205 successful. Otherwise, return NULL_TREE. */
12206
12207 tree
12208 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
12209 tree op0, tree op1, tree op2)
12210 {
12211 tree tem;
12212 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
12213 enum tree_code_class kind = TREE_CODE_CLASS (code);
12214
12215 gcc_assert (IS_EXPR_CODE_CLASS (kind)
12216 && TREE_CODE_LENGTH (code) == 3);
12217
12218 /* If this is a commutative operation, and OP0 is a constant, move it
12219 to OP1 to reduce the number of tests below. */
12220 if (commutative_ternary_tree_code (code)
12221 && tree_swap_operands_p (op0, op1))
12222 return fold_build3_loc (loc, code, type, op1, op0, op2);
12223
12224 tem = generic_simplify (loc, code, type, op0, op1, op2);
12225 if (tem)
12226 return tem;
12227
12228 /* Strip any conversions that don't change the mode. This is safe
12229 for every expression, except for a comparison expression because
12230 its signedness is derived from its operands. So, in the latter
12231 case, only strip conversions that don't change the signedness.
12232
12233 Note that this is done as an internal manipulation within the
12234 constant folder, in order to find the simplest representation of
12235 the arguments so that their form can be studied. In any cases,
12236 the appropriate type conversions should be put back in the tree
12237 that will get out of the constant folder. */
12238 if (op0)
12239 {
12240 arg0 = op0;
12241 STRIP_NOPS (arg0);
12242 }
12243
12244 if (op1)
12245 {
12246 arg1 = op1;
12247 STRIP_NOPS (arg1);
12248 }
12249
12250 if (op2)
12251 {
12252 arg2 = op2;
12253 STRIP_NOPS (arg2);
12254 }
12255
12256 switch (code)
12257 {
12258 case COMPONENT_REF:
12259 if (TREE_CODE (arg0) == CONSTRUCTOR
12260 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
12261 {
12262 unsigned HOST_WIDE_INT idx;
12263 tree field, value;
12264 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
12265 if (field == arg1)
12266 return value;
12267 }
12268 return NULL_TREE;
12269
12270 case COND_EXPR:
12271 case VEC_COND_EXPR:
12272 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
12273 so all simple results must be passed through pedantic_non_lvalue. */
12274 if (TREE_CODE (arg0) == INTEGER_CST)
12275 {
12276 tree unused_op = integer_zerop (arg0) ? op1 : op2;
12277 tem = integer_zerop (arg0) ? op2 : op1;
12278 /* Only optimize constant conditions when the selected branch
12279 has the same type as the COND_EXPR. This avoids optimizing
12280 away "c ? x : throw", where the throw has a void type.
12281 Avoid throwing away that operand which contains label. */
12282 if ((!TREE_SIDE_EFFECTS (unused_op)
12283 || !contains_label_p (unused_op))
12284 && (! VOID_TYPE_P (TREE_TYPE (tem))
12285 || VOID_TYPE_P (type)))
12286 return pedantic_non_lvalue_loc (loc, tem);
12287 return NULL_TREE;
12288 }
12289 else if (TREE_CODE (arg0) == VECTOR_CST)
12290 {
12291 unsigned HOST_WIDE_INT nelts;
12292 if ((TREE_CODE (arg1) == VECTOR_CST
12293 || TREE_CODE (arg1) == CONSTRUCTOR)
12294 && (TREE_CODE (arg2) == VECTOR_CST
12295 || TREE_CODE (arg2) == CONSTRUCTOR)
12296 && TYPE_VECTOR_SUBPARTS (type).is_constant (&nelts))
12297 {
12298 vec_perm_builder sel (nelts, nelts, 1);
12299 for (unsigned int i = 0; i < nelts; i++)
12300 {
12301 tree val = VECTOR_CST_ELT (arg0, i);
12302 if (integer_all_onesp (val))
12303 sel.quick_push (i);
12304 else if (integer_zerop (val))
12305 sel.quick_push (nelts + i);
12306 else /* Currently unreachable. */
12307 return NULL_TREE;
12308 }
12309 vec_perm_indices indices (sel, 2, nelts);
12310 tree t = fold_vec_perm (type, arg1, arg2, indices);
12311 if (t != NULL_TREE)
12312 return t;
12313 }
12314 }
12315
12316 /* If we have A op B ? A : C, we may be able to convert this to a
12317 simpler expression, depending on the operation and the values
12318 of B and C. Signed zeros prevent all of these transformations,
12319 for reasons given above each one.
12320
12321 Also try swapping the arguments and inverting the conditional. */
12322 if (COMPARISON_CLASS_P (arg0)
12323 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op1)
12324 && !HONOR_SIGNED_ZEROS (element_mode (op1)))
12325 {
12326 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
12327 if (tem)
12328 return tem;
12329 }
12330
12331 if (COMPARISON_CLASS_P (arg0)
12332 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op2)
12333 && !HONOR_SIGNED_ZEROS (element_mode (op2)))
12334 {
12335 location_t loc0 = expr_location_or (arg0, loc);
12336 tem = fold_invert_truthvalue (loc0, arg0);
12337 if (tem && COMPARISON_CLASS_P (tem))
12338 {
12339 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
12340 if (tem)
12341 return tem;
12342 }
12343 }
12344
12345 /* If the second operand is simpler than the third, swap them
12346 since that produces better jump optimization results. */
12347 if (truth_value_p (TREE_CODE (arg0))
12348 && tree_swap_operands_p (op1, op2))
12349 {
12350 location_t loc0 = expr_location_or (arg0, loc);
12351 /* See if this can be inverted. If it can't, possibly because
12352 it was a floating-point inequality comparison, don't do
12353 anything. */
12354 tem = fold_invert_truthvalue (loc0, arg0);
12355 if (tem)
12356 return fold_build3_loc (loc, code, type, tem, op2, op1);
12357 }
12358
12359 /* Convert A ? 1 : 0 to simply A. */
12360 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
12361 : (integer_onep (op1)
12362 && !VECTOR_TYPE_P (type)))
12363 && integer_zerop (op2)
12364 /* If we try to convert OP0 to our type, the
12365 call to fold will try to move the conversion inside
12366 a COND, which will recurse. In that case, the COND_EXPR
12367 is probably the best choice, so leave it alone. */
12368 && type == TREE_TYPE (arg0))
12369 return pedantic_non_lvalue_loc (loc, arg0);
12370
12371 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
12372 over COND_EXPR in cases such as floating point comparisons. */
12373 if (integer_zerop (op1)
12374 && code == COND_EXPR
12375 && integer_onep (op2)
12376 && !VECTOR_TYPE_P (type)
12377 && truth_value_p (TREE_CODE (arg0)))
12378 return pedantic_non_lvalue_loc (loc,
12379 fold_convert_loc (loc, type,
12380 invert_truthvalue_loc (loc,
12381 arg0)));
12382
12383 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
12384 if (TREE_CODE (arg0) == LT_EXPR
12385 && integer_zerop (TREE_OPERAND (arg0, 1))
12386 && integer_zerop (op2)
12387 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
12388 {
12389 /* sign_bit_p looks through both zero and sign extensions,
12390 but for this optimization only sign extensions are
12391 usable. */
12392 tree tem2 = TREE_OPERAND (arg0, 0);
12393 while (tem != tem2)
12394 {
12395 if (TREE_CODE (tem2) != NOP_EXPR
12396 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
12397 {
12398 tem = NULL_TREE;
12399 break;
12400 }
12401 tem2 = TREE_OPERAND (tem2, 0);
12402 }
12403 /* sign_bit_p only checks ARG1 bits within A's precision.
12404 If <sign bit of A> has wider type than A, bits outside
12405 of A's precision in <sign bit of A> need to be checked.
12406 If they are all 0, this optimization needs to be done
12407 in unsigned A's type, if they are all 1 in signed A's type,
12408 otherwise this can't be done. */
12409 if (tem
12410 && TYPE_PRECISION (TREE_TYPE (tem))
12411 < TYPE_PRECISION (TREE_TYPE (arg1))
12412 && TYPE_PRECISION (TREE_TYPE (tem))
12413 < TYPE_PRECISION (type))
12414 {
12415 int inner_width, outer_width;
12416 tree tem_type;
12417
12418 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
12419 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
12420 if (outer_width > TYPE_PRECISION (type))
12421 outer_width = TYPE_PRECISION (type);
12422
12423 wide_int mask = wi::shifted_mask
12424 (inner_width, outer_width - inner_width, false,
12425 TYPE_PRECISION (TREE_TYPE (arg1)));
12426
12427 wide_int common = mask & wi::to_wide (arg1);
12428 if (common == mask)
12429 {
12430 tem_type = signed_type_for (TREE_TYPE (tem));
12431 tem = fold_convert_loc (loc, tem_type, tem);
12432 }
12433 else if (common == 0)
12434 {
12435 tem_type = unsigned_type_for (TREE_TYPE (tem));
12436 tem = fold_convert_loc (loc, tem_type, tem);
12437 }
12438 else
12439 tem = NULL;
12440 }
12441
12442 if (tem)
12443 return
12444 fold_convert_loc (loc, type,
12445 fold_build2_loc (loc, BIT_AND_EXPR,
12446 TREE_TYPE (tem), tem,
12447 fold_convert_loc (loc,
12448 TREE_TYPE (tem),
12449 arg1)));
12450 }
12451
12452 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
12453 already handled above. */
12454 if (TREE_CODE (arg0) == BIT_AND_EXPR
12455 && integer_onep (TREE_OPERAND (arg0, 1))
12456 && integer_zerop (op2)
12457 && integer_pow2p (arg1))
12458 {
12459 tree tem = TREE_OPERAND (arg0, 0);
12460 STRIP_NOPS (tem);
12461 if (TREE_CODE (tem) == RSHIFT_EXPR
12462 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
12463 && (unsigned HOST_WIDE_INT) tree_log2 (arg1)
12464 == tree_to_uhwi (TREE_OPERAND (tem, 1)))
12465 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12466 fold_convert_loc (loc, type,
12467 TREE_OPERAND (tem, 0)),
12468 op1);
12469 }
12470
12471 /* A & N ? N : 0 is simply A & N if N is a power of two. This
12472 is probably obsolete because the first operand should be a
12473 truth value (that's why we have the two cases above), but let's
12474 leave it in until we can confirm this for all front-ends. */
12475 if (integer_zerop (op2)
12476 && TREE_CODE (arg0) == NE_EXPR
12477 && integer_zerop (TREE_OPERAND (arg0, 1))
12478 && integer_pow2p (arg1)
12479 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12480 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12481 arg1, OEP_ONLY_CONST)
12482 /* operand_equal_p compares just value, not precision, so e.g.
12483 arg1 could be 8-bit -128 and be power of two, but BIT_AND_EXPR
12484 second operand 32-bit -128, which is not a power of two (or vice
12485 versa. */
12486 && integer_pow2p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)))
12487 return pedantic_non_lvalue_loc (loc,
12488 fold_convert_loc (loc, type,
12489 TREE_OPERAND (arg0,
12490 0)));
12491
12492 /* Disable the transformations below for vectors, since
12493 fold_binary_op_with_conditional_arg may undo them immediately,
12494 yielding an infinite loop. */
12495 if (code == VEC_COND_EXPR)
12496 return NULL_TREE;
12497
12498 /* Convert A ? B : 0 into A && B if A and B are truth values. */
12499 if (integer_zerop (op2)
12500 && truth_value_p (TREE_CODE (arg0))
12501 && truth_value_p (TREE_CODE (arg1))
12502 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
12503 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
12504 : TRUTH_ANDIF_EXPR,
12505 type, fold_convert_loc (loc, type, arg0), op1);
12506
12507 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
12508 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
12509 && truth_value_p (TREE_CODE (arg0))
12510 && truth_value_p (TREE_CODE (arg1))
12511 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
12512 {
12513 location_t loc0 = expr_location_or (arg0, loc);
12514 /* Only perform transformation if ARG0 is easily inverted. */
12515 tem = fold_invert_truthvalue (loc0, arg0);
12516 if (tem)
12517 return fold_build2_loc (loc, code == VEC_COND_EXPR
12518 ? BIT_IOR_EXPR
12519 : TRUTH_ORIF_EXPR,
12520 type, fold_convert_loc (loc, type, tem),
12521 op1);
12522 }
12523
12524 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
12525 if (integer_zerop (arg1)
12526 && truth_value_p (TREE_CODE (arg0))
12527 && truth_value_p (TREE_CODE (op2))
12528 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
12529 {
12530 location_t loc0 = expr_location_or (arg0, loc);
12531 /* Only perform transformation if ARG0 is easily inverted. */
12532 tem = fold_invert_truthvalue (loc0, arg0);
12533 if (tem)
12534 return fold_build2_loc (loc, code == VEC_COND_EXPR
12535 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
12536 type, fold_convert_loc (loc, type, tem),
12537 op2);
12538 }
12539
12540 /* Convert A ? 1 : B into A || B if A and B are truth values. */
12541 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
12542 && truth_value_p (TREE_CODE (arg0))
12543 && truth_value_p (TREE_CODE (op2))
12544 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
12545 return fold_build2_loc (loc, code == VEC_COND_EXPR
12546 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
12547 type, fold_convert_loc (loc, type, arg0), op2);
12548
12549 return NULL_TREE;
12550
12551 case CALL_EXPR:
12552 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
12553 of fold_ternary on them. */
12554 gcc_unreachable ();
12555
12556 case BIT_FIELD_REF:
12557 if (TREE_CODE (arg0) == VECTOR_CST
12558 && (type == TREE_TYPE (TREE_TYPE (arg0))
12559 || (VECTOR_TYPE_P (type)
12560 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0))))
12561 && tree_fits_uhwi_p (op1)
12562 && tree_fits_uhwi_p (op2))
12563 {
12564 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
12565 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
12566 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
12567 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
12568
12569 if (n != 0
12570 && (idx % width) == 0
12571 && (n % width) == 0
12572 && known_le ((idx + n) / width,
12573 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))))
12574 {
12575 idx = idx / width;
12576 n = n / width;
12577
12578 if (TREE_CODE (arg0) == VECTOR_CST)
12579 {
12580 if (n == 1)
12581 {
12582 tem = VECTOR_CST_ELT (arg0, idx);
12583 if (VECTOR_TYPE_P (type))
12584 tem = fold_build1 (VIEW_CONVERT_EXPR, type, tem);
12585 return tem;
12586 }
12587
12588 tree_vector_builder vals (type, n, 1);
12589 for (unsigned i = 0; i < n; ++i)
12590 vals.quick_push (VECTOR_CST_ELT (arg0, idx + i));
12591 return vals.build ();
12592 }
12593 }
12594 }
12595
12596 /* On constants we can use native encode/interpret to constant
12597 fold (nearly) all BIT_FIELD_REFs. */
12598 if (CONSTANT_CLASS_P (arg0)
12599 && can_native_interpret_type_p (type)
12600 && BITS_PER_UNIT == 8
12601 && tree_fits_uhwi_p (op1)
12602 && tree_fits_uhwi_p (op2))
12603 {
12604 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
12605 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
12606 /* Limit us to a reasonable amount of work. To relax the
12607 other limitations we need bit-shifting of the buffer
12608 and rounding up the size. */
12609 if (bitpos % BITS_PER_UNIT == 0
12610 && bitsize % BITS_PER_UNIT == 0
12611 && bitsize <= MAX_BITSIZE_MODE_ANY_MODE)
12612 {
12613 unsigned char b[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
12614 unsigned HOST_WIDE_INT len
12615 = native_encode_expr (arg0, b, bitsize / BITS_PER_UNIT,
12616 bitpos / BITS_PER_UNIT);
12617 if (len > 0
12618 && len * BITS_PER_UNIT >= bitsize)
12619 {
12620 tree v = native_interpret_expr (type, b,
12621 bitsize / BITS_PER_UNIT);
12622 if (v)
12623 return v;
12624 }
12625 }
12626 }
12627
12628 return NULL_TREE;
12629
12630 case VEC_PERM_EXPR:
12631 /* Perform constant folding of BIT_INSERT_EXPR. */
12632 if (TREE_CODE (arg2) == VECTOR_CST
12633 && TREE_CODE (op0) == VECTOR_CST
12634 && TREE_CODE (op1) == VECTOR_CST)
12635 {
12636 /* Build a vector of integers from the tree mask. */
12637 vec_perm_builder builder;
12638 if (!tree_to_vec_perm_builder (&builder, arg2))
12639 return NULL_TREE;
12640
12641 /* Create a vec_perm_indices for the integer vector. */
12642 poly_uint64 nelts = TYPE_VECTOR_SUBPARTS (type);
12643 bool single_arg = (op0 == op1);
12644 vec_perm_indices sel (builder, single_arg ? 1 : 2, nelts);
12645 return fold_vec_perm (type, op0, op1, sel);
12646 }
12647 return NULL_TREE;
12648
12649 case BIT_INSERT_EXPR:
12650 /* Perform (partial) constant folding of BIT_INSERT_EXPR. */
12651 if (TREE_CODE (arg0) == INTEGER_CST
12652 && TREE_CODE (arg1) == INTEGER_CST)
12653 {
12654 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
12655 unsigned bitsize = TYPE_PRECISION (TREE_TYPE (arg1));
12656 wide_int tem = (wi::to_wide (arg0)
12657 & wi::shifted_mask (bitpos, bitsize, true,
12658 TYPE_PRECISION (type)));
12659 wide_int tem2
12660 = wi::lshift (wi::zext (wi::to_wide (arg1, TYPE_PRECISION (type)),
12661 bitsize), bitpos);
12662 return wide_int_to_tree (type, wi::bit_or (tem, tem2));
12663 }
12664 else if (TREE_CODE (arg0) == VECTOR_CST
12665 && CONSTANT_CLASS_P (arg1)
12666 && types_compatible_p (TREE_TYPE (TREE_TYPE (arg0)),
12667 TREE_TYPE (arg1)))
12668 {
12669 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
12670 unsigned HOST_WIDE_INT elsize
12671 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (arg1)));
12672 if (bitpos % elsize == 0)
12673 {
12674 unsigned k = bitpos / elsize;
12675 unsigned HOST_WIDE_INT nelts;
12676 if (operand_equal_p (VECTOR_CST_ELT (arg0, k), arg1, 0))
12677 return arg0;
12678 else if (VECTOR_CST_NELTS (arg0).is_constant (&nelts))
12679 {
12680 tree_vector_builder elts (type, nelts, 1);
12681 elts.quick_grow (nelts);
12682 for (unsigned HOST_WIDE_INT i = 0; i < nelts; ++i)
12683 elts[i] = (i == k ? arg1 : VECTOR_CST_ELT (arg0, i));
12684 return elts.build ();
12685 }
12686 }
12687 }
12688 return NULL_TREE;
12689
12690 default:
12691 return NULL_TREE;
12692 } /* switch (code) */
12693 }
12694
12695 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
12696 of an array (or vector). *CTOR_IDX if non-NULL is updated with the
12697 constructor element index of the value returned. If the element is
12698 not found NULL_TREE is returned and *CTOR_IDX is updated to
12699 the index of the element after the ACCESS_INDEX position (which
12700 may be outside of the CTOR array). */
12701
12702 tree
12703 get_array_ctor_element_at_index (tree ctor, offset_int access_index,
12704 unsigned *ctor_idx)
12705 {
12706 tree index_type = NULL_TREE;
12707 signop index_sgn = UNSIGNED;
12708 offset_int low_bound = 0;
12709
12710 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
12711 {
12712 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
12713 if (domain_type && TYPE_MIN_VALUE (domain_type))
12714 {
12715 /* Static constructors for variably sized objects makes no sense. */
12716 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
12717 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
12718 /* ??? When it is obvious that the range is signed, treat it so. */
12719 if (TYPE_UNSIGNED (index_type)
12720 && TYPE_MAX_VALUE (domain_type)
12721 && tree_int_cst_lt (TYPE_MAX_VALUE (domain_type),
12722 TYPE_MIN_VALUE (domain_type)))
12723 {
12724 index_sgn = SIGNED;
12725 low_bound
12726 = offset_int::from (wi::to_wide (TYPE_MIN_VALUE (domain_type)),
12727 SIGNED);
12728 }
12729 else
12730 {
12731 index_sgn = TYPE_SIGN (index_type);
12732 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
12733 }
12734 }
12735 }
12736
12737 if (index_type)
12738 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
12739 index_sgn);
12740
12741 offset_int index = low_bound;
12742 if (index_type)
12743 index = wi::ext (index, TYPE_PRECISION (index_type), index_sgn);
12744
12745 offset_int max_index = index;
12746 unsigned cnt;
12747 tree cfield, cval;
12748 bool first_p = true;
12749
12750 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
12751 {
12752 /* Array constructor might explicitly set index, or specify a range,
12753 or leave index NULL meaning that it is next index after previous
12754 one. */
12755 if (cfield)
12756 {
12757 if (TREE_CODE (cfield) == INTEGER_CST)
12758 max_index = index
12759 = offset_int::from (wi::to_wide (cfield), index_sgn);
12760 else
12761 {
12762 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
12763 index = offset_int::from (wi::to_wide (TREE_OPERAND (cfield, 0)),
12764 index_sgn);
12765 max_index
12766 = offset_int::from (wi::to_wide (TREE_OPERAND (cfield, 1)),
12767 index_sgn);
12768 gcc_checking_assert (wi::le_p (index, max_index, index_sgn));
12769 }
12770 }
12771 else if (!first_p)
12772 {
12773 index = max_index + 1;
12774 if (index_type)
12775 index = wi::ext (index, TYPE_PRECISION (index_type), index_sgn);
12776 gcc_checking_assert (wi::gt_p (index, max_index, index_sgn));
12777 max_index = index;
12778 }
12779 else
12780 first_p = false;
12781
12782 /* Do we have match? */
12783 if (wi::cmp (access_index, index, index_sgn) >= 0)
12784 {
12785 if (wi::cmp (access_index, max_index, index_sgn) <= 0)
12786 {
12787 if (ctor_idx)
12788 *ctor_idx = cnt;
12789 return cval;
12790 }
12791 }
12792 else if (in_gimple_form)
12793 /* We're past the element we search for. Note during parsing
12794 the elements might not be sorted.
12795 ??? We should use a binary search and a flag on the
12796 CONSTRUCTOR as to whether elements are sorted in declaration
12797 order. */
12798 break;
12799 }
12800 if (ctor_idx)
12801 *ctor_idx = cnt;
12802 return NULL_TREE;
12803 }
12804
12805 /* Perform constant folding and related simplification of EXPR.
12806 The related simplifications include x*1 => x, x*0 => 0, etc.,
12807 and application of the associative law.
12808 NOP_EXPR conversions may be removed freely (as long as we
12809 are careful not to change the type of the overall expression).
12810 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
12811 but we can constant-fold them if they have constant operands. */
12812
12813 #ifdef ENABLE_FOLD_CHECKING
12814 # define fold(x) fold_1 (x)
12815 static tree fold_1 (tree);
12816 static
12817 #endif
12818 tree
12819 fold (tree expr)
12820 {
12821 const tree t = expr;
12822 enum tree_code code = TREE_CODE (t);
12823 enum tree_code_class kind = TREE_CODE_CLASS (code);
12824 tree tem;
12825 location_t loc = EXPR_LOCATION (expr);
12826
12827 /* Return right away if a constant. */
12828 if (kind == tcc_constant)
12829 return t;
12830
12831 /* CALL_EXPR-like objects with variable numbers of operands are
12832 treated specially. */
12833 if (kind == tcc_vl_exp)
12834 {
12835 if (code == CALL_EXPR)
12836 {
12837 tem = fold_call_expr (loc, expr, false);
12838 return tem ? tem : expr;
12839 }
12840 return expr;
12841 }
12842
12843 if (IS_EXPR_CODE_CLASS (kind))
12844 {
12845 tree type = TREE_TYPE (t);
12846 tree op0, op1, op2;
12847
12848 switch (TREE_CODE_LENGTH (code))
12849 {
12850 case 1:
12851 op0 = TREE_OPERAND (t, 0);
12852 tem = fold_unary_loc (loc, code, type, op0);
12853 return tem ? tem : expr;
12854 case 2:
12855 op0 = TREE_OPERAND (t, 0);
12856 op1 = TREE_OPERAND (t, 1);
12857 tem = fold_binary_loc (loc, code, type, op0, op1);
12858 return tem ? tem : expr;
12859 case 3:
12860 op0 = TREE_OPERAND (t, 0);
12861 op1 = TREE_OPERAND (t, 1);
12862 op2 = TREE_OPERAND (t, 2);
12863 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12864 return tem ? tem : expr;
12865 default:
12866 break;
12867 }
12868 }
12869
12870 switch (code)
12871 {
12872 case ARRAY_REF:
12873 {
12874 tree op0 = TREE_OPERAND (t, 0);
12875 tree op1 = TREE_OPERAND (t, 1);
12876
12877 if (TREE_CODE (op1) == INTEGER_CST
12878 && TREE_CODE (op0) == CONSTRUCTOR
12879 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
12880 {
12881 tree val = get_array_ctor_element_at_index (op0,
12882 wi::to_offset (op1));
12883 if (val)
12884 return val;
12885 }
12886
12887 return t;
12888 }
12889
12890 /* Return a VECTOR_CST if possible. */
12891 case CONSTRUCTOR:
12892 {
12893 tree type = TREE_TYPE (t);
12894 if (TREE_CODE (type) != VECTOR_TYPE)
12895 return t;
12896
12897 unsigned i;
12898 tree val;
12899 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
12900 if (! CONSTANT_CLASS_P (val))
12901 return t;
12902
12903 return build_vector_from_ctor (type, CONSTRUCTOR_ELTS (t));
12904 }
12905
12906 case CONST_DECL:
12907 return fold (DECL_INITIAL (t));
12908
12909 default:
12910 return t;
12911 } /* switch (code) */
12912 }
12913
12914 #ifdef ENABLE_FOLD_CHECKING
12915 #undef fold
12916
12917 static void fold_checksum_tree (const_tree, struct md5_ctx *,
12918 hash_table<nofree_ptr_hash<const tree_node> > *);
12919 static void fold_check_failed (const_tree, const_tree);
12920 void print_fold_checksum (const_tree);
12921
12922 /* When --enable-checking=fold, compute a digest of expr before
12923 and after actual fold call to see if fold did not accidentally
12924 change original expr. */
12925
12926 tree
12927 fold (tree expr)
12928 {
12929 tree ret;
12930 struct md5_ctx ctx;
12931 unsigned char checksum_before[16], checksum_after[16];
12932 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12933
12934 md5_init_ctx (&ctx);
12935 fold_checksum_tree (expr, &ctx, &ht);
12936 md5_finish_ctx (&ctx, checksum_before);
12937 ht.empty ();
12938
12939 ret = fold_1 (expr);
12940
12941 md5_init_ctx (&ctx);
12942 fold_checksum_tree (expr, &ctx, &ht);
12943 md5_finish_ctx (&ctx, checksum_after);
12944
12945 if (memcmp (checksum_before, checksum_after, 16))
12946 fold_check_failed (expr, ret);
12947
12948 return ret;
12949 }
12950
12951 void
12952 print_fold_checksum (const_tree expr)
12953 {
12954 struct md5_ctx ctx;
12955 unsigned char checksum[16], cnt;
12956 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12957
12958 md5_init_ctx (&ctx);
12959 fold_checksum_tree (expr, &ctx, &ht);
12960 md5_finish_ctx (&ctx, checksum);
12961 for (cnt = 0; cnt < 16; ++cnt)
12962 fprintf (stderr, "%02x", checksum[cnt]);
12963 putc ('\n', stderr);
12964 }
12965
12966 static void
12967 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
12968 {
12969 internal_error ("fold check: original tree changed by fold");
12970 }
12971
12972 static void
12973 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
12974 hash_table<nofree_ptr_hash <const tree_node> > *ht)
12975 {
12976 const tree_node **slot;
12977 enum tree_code code;
12978 union tree_node *buf;
12979 int i, len;
12980
12981 recursive_label:
12982 if (expr == NULL)
12983 return;
12984 slot = ht->find_slot (expr, INSERT);
12985 if (*slot != NULL)
12986 return;
12987 *slot = expr;
12988 code = TREE_CODE (expr);
12989 if (TREE_CODE_CLASS (code) == tcc_declaration
12990 && HAS_DECL_ASSEMBLER_NAME_P (expr))
12991 {
12992 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
12993 size_t sz = tree_size (expr);
12994 buf = XALLOCAVAR (union tree_node, sz);
12995 memcpy ((char *) buf, expr, sz);
12996 SET_DECL_ASSEMBLER_NAME ((tree) buf, NULL);
12997 buf->decl_with_vis.symtab_node = NULL;
12998 buf->base.nowarning_flag = 0;
12999 expr = (tree) buf;
13000 }
13001 else if (TREE_CODE_CLASS (code) == tcc_type
13002 && (TYPE_POINTER_TO (expr)
13003 || TYPE_REFERENCE_TO (expr)
13004 || TYPE_CACHED_VALUES_P (expr)
13005 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
13006 || TYPE_NEXT_VARIANT (expr)
13007 || TYPE_ALIAS_SET_KNOWN_P (expr)))
13008 {
13009 /* Allow these fields to be modified. */
13010 tree tmp;
13011 size_t sz = tree_size (expr);
13012 buf = XALLOCAVAR (union tree_node, sz);
13013 memcpy ((char *) buf, expr, sz);
13014 expr = tmp = (tree) buf;
13015 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13016 TYPE_POINTER_TO (tmp) = NULL;
13017 TYPE_REFERENCE_TO (tmp) = NULL;
13018 TYPE_NEXT_VARIANT (tmp) = NULL;
13019 TYPE_ALIAS_SET (tmp) = -1;
13020 if (TYPE_CACHED_VALUES_P (tmp))
13021 {
13022 TYPE_CACHED_VALUES_P (tmp) = 0;
13023 TYPE_CACHED_VALUES (tmp) = NULL;
13024 }
13025 }
13026 else if (TREE_NO_WARNING (expr) && (DECL_P (expr) || EXPR_P (expr)))
13027 {
13028 /* Allow TREE_NO_WARNING to be set. Perhaps we shouldn't allow that
13029 and change builtins.c etc. instead - see PR89543. */
13030 size_t sz = tree_size (expr);
13031 buf = XALLOCAVAR (union tree_node, sz);
13032 memcpy ((char *) buf, expr, sz);
13033 buf->base.nowarning_flag = 0;
13034 expr = (tree) buf;
13035 }
13036 md5_process_bytes (expr, tree_size (expr), ctx);
13037 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
13038 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13039 if (TREE_CODE_CLASS (code) != tcc_type
13040 && TREE_CODE_CLASS (code) != tcc_declaration
13041 && code != TREE_LIST
13042 && code != SSA_NAME
13043 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
13044 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13045 switch (TREE_CODE_CLASS (code))
13046 {
13047 case tcc_constant:
13048 switch (code)
13049 {
13050 case STRING_CST:
13051 md5_process_bytes (TREE_STRING_POINTER (expr),
13052 TREE_STRING_LENGTH (expr), ctx);
13053 break;
13054 case COMPLEX_CST:
13055 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13056 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13057 break;
13058 case VECTOR_CST:
13059 len = vector_cst_encoded_nelts (expr);
13060 for (i = 0; i < len; ++i)
13061 fold_checksum_tree (VECTOR_CST_ENCODED_ELT (expr, i), ctx, ht);
13062 break;
13063 default:
13064 break;
13065 }
13066 break;
13067 case tcc_exceptional:
13068 switch (code)
13069 {
13070 case TREE_LIST:
13071 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13072 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13073 expr = TREE_CHAIN (expr);
13074 goto recursive_label;
13075 break;
13076 case TREE_VEC:
13077 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13078 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13079 break;
13080 default:
13081 break;
13082 }
13083 break;
13084 case tcc_expression:
13085 case tcc_reference:
13086 case tcc_comparison:
13087 case tcc_unary:
13088 case tcc_binary:
13089 case tcc_statement:
13090 case tcc_vl_exp:
13091 len = TREE_OPERAND_LENGTH (expr);
13092 for (i = 0; i < len; ++i)
13093 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13094 break;
13095 case tcc_declaration:
13096 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13097 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13098 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13099 {
13100 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13101 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13102 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13103 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13104 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13105 }
13106
13107 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13108 {
13109 if (TREE_CODE (expr) == FUNCTION_DECL)
13110 {
13111 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13112 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
13113 }
13114 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13115 }
13116 break;
13117 case tcc_type:
13118 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13119 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13120 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13121 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13122 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13123 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13124 if (INTEGRAL_TYPE_P (expr)
13125 || SCALAR_FLOAT_TYPE_P (expr))
13126 {
13127 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13128 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13129 }
13130 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13131 if (TREE_CODE (expr) == RECORD_TYPE
13132 || TREE_CODE (expr) == UNION_TYPE
13133 || TREE_CODE (expr) == QUAL_UNION_TYPE)
13134 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13135 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13136 break;
13137 default:
13138 break;
13139 }
13140 }
13141
13142 /* Helper function for outputting the checksum of a tree T. When
13143 debugging with gdb, you can "define mynext" to be "next" followed
13144 by "call debug_fold_checksum (op0)", then just trace down till the
13145 outputs differ. */
13146
13147 DEBUG_FUNCTION void
13148 debug_fold_checksum (const_tree t)
13149 {
13150 int i;
13151 unsigned char checksum[16];
13152 struct md5_ctx ctx;
13153 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13154
13155 md5_init_ctx (&ctx);
13156 fold_checksum_tree (t, &ctx, &ht);
13157 md5_finish_ctx (&ctx, checksum);
13158 ht.empty ();
13159
13160 for (i = 0; i < 16; i++)
13161 fprintf (stderr, "%d ", checksum[i]);
13162
13163 fprintf (stderr, "\n");
13164 }
13165
13166 #endif
13167
13168 /* Fold a unary tree expression with code CODE of type TYPE with an
13169 operand OP0. LOC is the location of the resulting expression.
13170 Return a folded expression if successful. Otherwise, return a tree
13171 expression with code CODE of type TYPE with an operand OP0. */
13172
13173 tree
13174 fold_build1_loc (location_t loc,
13175 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13176 {
13177 tree tem;
13178 #ifdef ENABLE_FOLD_CHECKING
13179 unsigned char checksum_before[16], checksum_after[16];
13180 struct md5_ctx ctx;
13181 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13182
13183 md5_init_ctx (&ctx);
13184 fold_checksum_tree (op0, &ctx, &ht);
13185 md5_finish_ctx (&ctx, checksum_before);
13186 ht.empty ();
13187 #endif
13188
13189 tem = fold_unary_loc (loc, code, type, op0);
13190 if (!tem)
13191 tem = build1_loc (loc, code, type, op0 PASS_MEM_STAT);
13192
13193 #ifdef ENABLE_FOLD_CHECKING
13194 md5_init_ctx (&ctx);
13195 fold_checksum_tree (op0, &ctx, &ht);
13196 md5_finish_ctx (&ctx, checksum_after);
13197
13198 if (memcmp (checksum_before, checksum_after, 16))
13199 fold_check_failed (op0, tem);
13200 #endif
13201 return tem;
13202 }
13203
13204 /* Fold a binary tree expression with code CODE of type TYPE with
13205 operands OP0 and OP1. LOC is the location of the resulting
13206 expression. Return a folded expression if successful. Otherwise,
13207 return a tree expression with code CODE of type TYPE with operands
13208 OP0 and OP1. */
13209
13210 tree
13211 fold_build2_loc (location_t loc,
13212 enum tree_code code, tree type, tree op0, tree op1
13213 MEM_STAT_DECL)
13214 {
13215 tree tem;
13216 #ifdef ENABLE_FOLD_CHECKING
13217 unsigned char checksum_before_op0[16],
13218 checksum_before_op1[16],
13219 checksum_after_op0[16],
13220 checksum_after_op1[16];
13221 struct md5_ctx ctx;
13222 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13223
13224 md5_init_ctx (&ctx);
13225 fold_checksum_tree (op0, &ctx, &ht);
13226 md5_finish_ctx (&ctx, checksum_before_op0);
13227 ht.empty ();
13228
13229 md5_init_ctx (&ctx);
13230 fold_checksum_tree (op1, &ctx, &ht);
13231 md5_finish_ctx (&ctx, checksum_before_op1);
13232 ht.empty ();
13233 #endif
13234
13235 tem = fold_binary_loc (loc, code, type, op0, op1);
13236 if (!tem)
13237 tem = build2_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
13238
13239 #ifdef ENABLE_FOLD_CHECKING
13240 md5_init_ctx (&ctx);
13241 fold_checksum_tree (op0, &ctx, &ht);
13242 md5_finish_ctx (&ctx, checksum_after_op0);
13243 ht.empty ();
13244
13245 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13246 fold_check_failed (op0, tem);
13247
13248 md5_init_ctx (&ctx);
13249 fold_checksum_tree (op1, &ctx, &ht);
13250 md5_finish_ctx (&ctx, checksum_after_op1);
13251
13252 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13253 fold_check_failed (op1, tem);
13254 #endif
13255 return tem;
13256 }
13257
13258 /* Fold a ternary tree expression with code CODE of type TYPE with
13259 operands OP0, OP1, and OP2. Return a folded expression if
13260 successful. Otherwise, return a tree expression with code CODE of
13261 type TYPE with operands OP0, OP1, and OP2. */
13262
13263 tree
13264 fold_build3_loc (location_t loc, enum tree_code code, tree type,
13265 tree op0, tree op1, tree op2 MEM_STAT_DECL)
13266 {
13267 tree tem;
13268 #ifdef ENABLE_FOLD_CHECKING
13269 unsigned char checksum_before_op0[16],
13270 checksum_before_op1[16],
13271 checksum_before_op2[16],
13272 checksum_after_op0[16],
13273 checksum_after_op1[16],
13274 checksum_after_op2[16];
13275 struct md5_ctx ctx;
13276 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13277
13278 md5_init_ctx (&ctx);
13279 fold_checksum_tree (op0, &ctx, &ht);
13280 md5_finish_ctx (&ctx, checksum_before_op0);
13281 ht.empty ();
13282
13283 md5_init_ctx (&ctx);
13284 fold_checksum_tree (op1, &ctx, &ht);
13285 md5_finish_ctx (&ctx, checksum_before_op1);
13286 ht.empty ();
13287
13288 md5_init_ctx (&ctx);
13289 fold_checksum_tree (op2, &ctx, &ht);
13290 md5_finish_ctx (&ctx, checksum_before_op2);
13291 ht.empty ();
13292 #endif
13293
13294 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
13295 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13296 if (!tem)
13297 tem = build3_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
13298
13299 #ifdef ENABLE_FOLD_CHECKING
13300 md5_init_ctx (&ctx);
13301 fold_checksum_tree (op0, &ctx, &ht);
13302 md5_finish_ctx (&ctx, checksum_after_op0);
13303 ht.empty ();
13304
13305 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13306 fold_check_failed (op0, tem);
13307
13308 md5_init_ctx (&ctx);
13309 fold_checksum_tree (op1, &ctx, &ht);
13310 md5_finish_ctx (&ctx, checksum_after_op1);
13311 ht.empty ();
13312
13313 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13314 fold_check_failed (op1, tem);
13315
13316 md5_init_ctx (&ctx);
13317 fold_checksum_tree (op2, &ctx, &ht);
13318 md5_finish_ctx (&ctx, checksum_after_op2);
13319
13320 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
13321 fold_check_failed (op2, tem);
13322 #endif
13323 return tem;
13324 }
13325
13326 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
13327 arguments in ARGARRAY, and a null static chain.
13328 Return a folded expression if successful. Otherwise, return a CALL_EXPR
13329 of type TYPE from the given operands as constructed by build_call_array. */
13330
13331 tree
13332 fold_build_call_array_loc (location_t loc, tree type, tree fn,
13333 int nargs, tree *argarray)
13334 {
13335 tree tem;
13336 #ifdef ENABLE_FOLD_CHECKING
13337 unsigned char checksum_before_fn[16],
13338 checksum_before_arglist[16],
13339 checksum_after_fn[16],
13340 checksum_after_arglist[16];
13341 struct md5_ctx ctx;
13342 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13343 int i;
13344
13345 md5_init_ctx (&ctx);
13346 fold_checksum_tree (fn, &ctx, &ht);
13347 md5_finish_ctx (&ctx, checksum_before_fn);
13348 ht.empty ();
13349
13350 md5_init_ctx (&ctx);
13351 for (i = 0; i < nargs; i++)
13352 fold_checksum_tree (argarray[i], &ctx, &ht);
13353 md5_finish_ctx (&ctx, checksum_before_arglist);
13354 ht.empty ();
13355 #endif
13356
13357 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
13358 if (!tem)
13359 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
13360
13361 #ifdef ENABLE_FOLD_CHECKING
13362 md5_init_ctx (&ctx);
13363 fold_checksum_tree (fn, &ctx, &ht);
13364 md5_finish_ctx (&ctx, checksum_after_fn);
13365 ht.empty ();
13366
13367 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
13368 fold_check_failed (fn, tem);
13369
13370 md5_init_ctx (&ctx);
13371 for (i = 0; i < nargs; i++)
13372 fold_checksum_tree (argarray[i], &ctx, &ht);
13373 md5_finish_ctx (&ctx, checksum_after_arglist);
13374
13375 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
13376 fold_check_failed (NULL_TREE, tem);
13377 #endif
13378 return tem;
13379 }
13380
13381 /* Perform constant folding and related simplification of initializer
13382 expression EXPR. These behave identically to "fold_buildN" but ignore
13383 potential run-time traps and exceptions that fold must preserve. */
13384
13385 #define START_FOLD_INIT \
13386 int saved_signaling_nans = flag_signaling_nans;\
13387 int saved_trapping_math = flag_trapping_math;\
13388 int saved_rounding_math = flag_rounding_math;\
13389 int saved_trapv = flag_trapv;\
13390 int saved_folding_initializer = folding_initializer;\
13391 flag_signaling_nans = 0;\
13392 flag_trapping_math = 0;\
13393 flag_rounding_math = 0;\
13394 flag_trapv = 0;\
13395 folding_initializer = 1;
13396
13397 #define END_FOLD_INIT \
13398 flag_signaling_nans = saved_signaling_nans;\
13399 flag_trapping_math = saved_trapping_math;\
13400 flag_rounding_math = saved_rounding_math;\
13401 flag_trapv = saved_trapv;\
13402 folding_initializer = saved_folding_initializer;
13403
13404 tree
13405 fold_build1_initializer_loc (location_t loc, enum tree_code code,
13406 tree type, tree op)
13407 {
13408 tree result;
13409 START_FOLD_INIT;
13410
13411 result = fold_build1_loc (loc, code, type, op);
13412
13413 END_FOLD_INIT;
13414 return result;
13415 }
13416
13417 tree
13418 fold_build2_initializer_loc (location_t loc, enum tree_code code,
13419 tree type, tree op0, tree op1)
13420 {
13421 tree result;
13422 START_FOLD_INIT;
13423
13424 result = fold_build2_loc (loc, code, type, op0, op1);
13425
13426 END_FOLD_INIT;
13427 return result;
13428 }
13429
13430 tree
13431 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
13432 int nargs, tree *argarray)
13433 {
13434 tree result;
13435 START_FOLD_INIT;
13436
13437 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
13438
13439 END_FOLD_INIT;
13440 return result;
13441 }
13442
13443 #undef START_FOLD_INIT
13444 #undef END_FOLD_INIT
13445
13446 /* Determine if first argument is a multiple of second argument. Return 0 if
13447 it is not, or we cannot easily determined it to be.
13448
13449 An example of the sort of thing we care about (at this point; this routine
13450 could surely be made more general, and expanded to do what the *_DIV_EXPR's
13451 fold cases do now) is discovering that
13452
13453 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13454
13455 is a multiple of
13456
13457 SAVE_EXPR (J * 8)
13458
13459 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
13460
13461 This code also handles discovering that
13462
13463 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13464
13465 is a multiple of 8 so we don't have to worry about dealing with a
13466 possible remainder.
13467
13468 Note that we *look* inside a SAVE_EXPR only to determine how it was
13469 calculated; it is not safe for fold to do much of anything else with the
13470 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
13471 at run time. For example, the latter example above *cannot* be implemented
13472 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
13473 evaluation time of the original SAVE_EXPR is not necessarily the same at
13474 the time the new expression is evaluated. The only optimization of this
13475 sort that would be valid is changing
13476
13477 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
13478
13479 divided by 8 to
13480
13481 SAVE_EXPR (I) * SAVE_EXPR (J)
13482
13483 (where the same SAVE_EXPR (J) is used in the original and the
13484 transformed version). */
13485
13486 int
13487 multiple_of_p (tree type, const_tree top, const_tree bottom)
13488 {
13489 gimple *stmt;
13490 tree t1, op1, op2;
13491
13492 if (operand_equal_p (top, bottom, 0))
13493 return 1;
13494
13495 if (TREE_CODE (type) != INTEGER_TYPE)
13496 return 0;
13497
13498 switch (TREE_CODE (top))
13499 {
13500 case BIT_AND_EXPR:
13501 /* Bitwise and provides a power of two multiple. If the mask is
13502 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
13503 if (!integer_pow2p (bottom))
13504 return 0;
13505 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
13506 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
13507
13508 case MULT_EXPR:
13509 if (TREE_CODE (bottom) == INTEGER_CST)
13510 {
13511 op1 = TREE_OPERAND (top, 0);
13512 op2 = TREE_OPERAND (top, 1);
13513 if (TREE_CODE (op1) == INTEGER_CST)
13514 std::swap (op1, op2);
13515 if (TREE_CODE (op2) == INTEGER_CST)
13516 {
13517 if (multiple_of_p (type, op2, bottom))
13518 return 1;
13519 /* Handle multiple_of_p ((x * 2 + 2) * 4, 8). */
13520 if (multiple_of_p (type, bottom, op2))
13521 {
13522 widest_int w = wi::sdiv_trunc (wi::to_widest (bottom),
13523 wi::to_widest (op2));
13524 if (wi::fits_to_tree_p (w, TREE_TYPE (bottom)))
13525 {
13526 op2 = wide_int_to_tree (TREE_TYPE (bottom), w);
13527 return multiple_of_p (type, op1, op2);
13528 }
13529 }
13530 return multiple_of_p (type, op1, bottom);
13531 }
13532 }
13533 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
13534 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
13535
13536 case MINUS_EXPR:
13537 /* It is impossible to prove if op0 - op1 is multiple of bottom
13538 precisely, so be conservative here checking if both op0 and op1
13539 are multiple of bottom. Note we check the second operand first
13540 since it's usually simpler. */
13541 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
13542 && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
13543
13544 case PLUS_EXPR:
13545 /* The same as MINUS_EXPR, but handle cases like op0 + 0xfffffffd
13546 as op0 - 3 if the expression has unsigned type. For example,
13547 (X / 3) + 0xfffffffd is multiple of 3, but 0xfffffffd is not. */
13548 op1 = TREE_OPERAND (top, 1);
13549 if (TYPE_UNSIGNED (type)
13550 && TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sign_bit (op1))
13551 op1 = fold_build1 (NEGATE_EXPR, type, op1);
13552 return (multiple_of_p (type, op1, bottom)
13553 && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
13554
13555 case LSHIFT_EXPR:
13556 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
13557 {
13558 op1 = TREE_OPERAND (top, 1);
13559 /* const_binop may not detect overflow correctly,
13560 so check for it explicitly here. */
13561 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
13562 wi::to_wide (op1))
13563 && (t1 = fold_convert (type,
13564 const_binop (LSHIFT_EXPR, size_one_node,
13565 op1))) != 0
13566 && !TREE_OVERFLOW (t1))
13567 return multiple_of_p (type, t1, bottom);
13568 }
13569 return 0;
13570
13571 case NOP_EXPR:
13572 /* Can't handle conversions from non-integral or wider integral type. */
13573 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
13574 || (TYPE_PRECISION (type)
13575 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
13576 return 0;
13577
13578 /* fall through */
13579
13580 case SAVE_EXPR:
13581 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
13582
13583 case COND_EXPR:
13584 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
13585 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
13586
13587 case INTEGER_CST:
13588 if (TREE_CODE (bottom) != INTEGER_CST
13589 || integer_zerop (bottom)
13590 || (TYPE_UNSIGNED (type)
13591 && (tree_int_cst_sgn (top) < 0
13592 || tree_int_cst_sgn (bottom) < 0)))
13593 return 0;
13594 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
13595 SIGNED);
13596
13597 case SSA_NAME:
13598 if (TREE_CODE (bottom) == INTEGER_CST
13599 && (stmt = SSA_NAME_DEF_STMT (top)) != NULL
13600 && gimple_code (stmt) == GIMPLE_ASSIGN)
13601 {
13602 enum tree_code code = gimple_assign_rhs_code (stmt);
13603
13604 /* Check for special cases to see if top is defined as multiple
13605 of bottom:
13606
13607 top = (X & ~(bottom - 1) ; bottom is power of 2
13608
13609 or
13610
13611 Y = X % bottom
13612 top = X - Y. */
13613 if (code == BIT_AND_EXPR
13614 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
13615 && TREE_CODE (op2) == INTEGER_CST
13616 && integer_pow2p (bottom)
13617 && wi::multiple_of_p (wi::to_widest (op2),
13618 wi::to_widest (bottom), UNSIGNED))
13619 return 1;
13620
13621 op1 = gimple_assign_rhs1 (stmt);
13622 if (code == MINUS_EXPR
13623 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
13624 && TREE_CODE (op2) == SSA_NAME
13625 && (stmt = SSA_NAME_DEF_STMT (op2)) != NULL
13626 && gimple_code (stmt) == GIMPLE_ASSIGN
13627 && (code = gimple_assign_rhs_code (stmt)) == TRUNC_MOD_EXPR
13628 && operand_equal_p (op1, gimple_assign_rhs1 (stmt), 0)
13629 && operand_equal_p (bottom, gimple_assign_rhs2 (stmt), 0))
13630 return 1;
13631 }
13632
13633 /* fall through */
13634
13635 default:
13636 if (POLY_INT_CST_P (top) && poly_int_tree_p (bottom))
13637 return multiple_p (wi::to_poly_widest (top),
13638 wi::to_poly_widest (bottom));
13639
13640 return 0;
13641 }
13642 }
13643
13644 #define tree_expr_nonnegative_warnv_p(X, Y) \
13645 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
13646
13647 #define RECURSE(X) \
13648 ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
13649
13650 /* Return true if CODE or TYPE is known to be non-negative. */
13651
13652 static bool
13653 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
13654 {
13655 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
13656 && truth_value_p (code))
13657 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
13658 have a signed:1 type (where the value is -1 and 0). */
13659 return true;
13660 return false;
13661 }
13662
13663 /* Return true if (CODE OP0) is known to be non-negative. If the return
13664 value is based on the assumption that signed overflow is undefined,
13665 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13666 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13667
13668 bool
13669 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
13670 bool *strict_overflow_p, int depth)
13671 {
13672 if (TYPE_UNSIGNED (type))
13673 return true;
13674
13675 switch (code)
13676 {
13677 case ABS_EXPR:
13678 /* We can't return 1 if flag_wrapv is set because
13679 ABS_EXPR<INT_MIN> = INT_MIN. */
13680 if (!ANY_INTEGRAL_TYPE_P (type))
13681 return true;
13682 if (TYPE_OVERFLOW_UNDEFINED (type))
13683 {
13684 *strict_overflow_p = true;
13685 return true;
13686 }
13687 break;
13688
13689 case NON_LVALUE_EXPR:
13690 case FLOAT_EXPR:
13691 case FIX_TRUNC_EXPR:
13692 return RECURSE (op0);
13693
13694 CASE_CONVERT:
13695 {
13696 tree inner_type = TREE_TYPE (op0);
13697 tree outer_type = type;
13698
13699 if (TREE_CODE (outer_type) == REAL_TYPE)
13700 {
13701 if (TREE_CODE (inner_type) == REAL_TYPE)
13702 return RECURSE (op0);
13703 if (INTEGRAL_TYPE_P (inner_type))
13704 {
13705 if (TYPE_UNSIGNED (inner_type))
13706 return true;
13707 return RECURSE (op0);
13708 }
13709 }
13710 else if (INTEGRAL_TYPE_P (outer_type))
13711 {
13712 if (TREE_CODE (inner_type) == REAL_TYPE)
13713 return RECURSE (op0);
13714 if (INTEGRAL_TYPE_P (inner_type))
13715 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
13716 && TYPE_UNSIGNED (inner_type);
13717 }
13718 }
13719 break;
13720
13721 default:
13722 return tree_simple_nonnegative_warnv_p (code, type);
13723 }
13724
13725 /* We don't know sign of `t', so be conservative and return false. */
13726 return false;
13727 }
13728
13729 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
13730 value is based on the assumption that signed overflow is undefined,
13731 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13732 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13733
13734 bool
13735 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
13736 tree op1, bool *strict_overflow_p,
13737 int depth)
13738 {
13739 if (TYPE_UNSIGNED (type))
13740 return true;
13741
13742 switch (code)
13743 {
13744 case POINTER_PLUS_EXPR:
13745 case PLUS_EXPR:
13746 if (FLOAT_TYPE_P (type))
13747 return RECURSE (op0) && RECURSE (op1);
13748
13749 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
13750 both unsigned and at least 2 bits shorter than the result. */
13751 if (TREE_CODE (type) == INTEGER_TYPE
13752 && TREE_CODE (op0) == NOP_EXPR
13753 && TREE_CODE (op1) == NOP_EXPR)
13754 {
13755 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
13756 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
13757 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
13758 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
13759 {
13760 unsigned int prec = MAX (TYPE_PRECISION (inner1),
13761 TYPE_PRECISION (inner2)) + 1;
13762 return prec < TYPE_PRECISION (type);
13763 }
13764 }
13765 break;
13766
13767 case MULT_EXPR:
13768 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
13769 {
13770 /* x * x is always non-negative for floating point x
13771 or without overflow. */
13772 if (operand_equal_p (op0, op1, 0)
13773 || (RECURSE (op0) && RECURSE (op1)))
13774 {
13775 if (ANY_INTEGRAL_TYPE_P (type)
13776 && TYPE_OVERFLOW_UNDEFINED (type))
13777 *strict_overflow_p = true;
13778 return true;
13779 }
13780 }
13781
13782 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
13783 both unsigned and their total bits is shorter than the result. */
13784 if (TREE_CODE (type) == INTEGER_TYPE
13785 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
13786 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
13787 {
13788 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
13789 ? TREE_TYPE (TREE_OPERAND (op0, 0))
13790 : TREE_TYPE (op0);
13791 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
13792 ? TREE_TYPE (TREE_OPERAND (op1, 0))
13793 : TREE_TYPE (op1);
13794
13795 bool unsigned0 = TYPE_UNSIGNED (inner0);
13796 bool unsigned1 = TYPE_UNSIGNED (inner1);
13797
13798 if (TREE_CODE (op0) == INTEGER_CST)
13799 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
13800
13801 if (TREE_CODE (op1) == INTEGER_CST)
13802 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
13803
13804 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
13805 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
13806 {
13807 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
13808 ? tree_int_cst_min_precision (op0, UNSIGNED)
13809 : TYPE_PRECISION (inner0);
13810
13811 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
13812 ? tree_int_cst_min_precision (op1, UNSIGNED)
13813 : TYPE_PRECISION (inner1);
13814
13815 return precision0 + precision1 < TYPE_PRECISION (type);
13816 }
13817 }
13818 return false;
13819
13820 case BIT_AND_EXPR:
13821 case MAX_EXPR:
13822 return RECURSE (op0) || RECURSE (op1);
13823
13824 case BIT_IOR_EXPR:
13825 case BIT_XOR_EXPR:
13826 case MIN_EXPR:
13827 case RDIV_EXPR:
13828 case TRUNC_DIV_EXPR:
13829 case CEIL_DIV_EXPR:
13830 case FLOOR_DIV_EXPR:
13831 case ROUND_DIV_EXPR:
13832 return RECURSE (op0) && RECURSE (op1);
13833
13834 case TRUNC_MOD_EXPR:
13835 return RECURSE (op0);
13836
13837 case FLOOR_MOD_EXPR:
13838 return RECURSE (op1);
13839
13840 case CEIL_MOD_EXPR:
13841 case ROUND_MOD_EXPR:
13842 default:
13843 return tree_simple_nonnegative_warnv_p (code, type);
13844 }
13845
13846 /* We don't know sign of `t', so be conservative and return false. */
13847 return false;
13848 }
13849
13850 /* Return true if T is known to be non-negative. If the return
13851 value is based on the assumption that signed overflow is undefined,
13852 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13853 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13854
13855 bool
13856 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13857 {
13858 if (TYPE_UNSIGNED (TREE_TYPE (t)))
13859 return true;
13860
13861 switch (TREE_CODE (t))
13862 {
13863 case INTEGER_CST:
13864 return tree_int_cst_sgn (t) >= 0;
13865
13866 case REAL_CST:
13867 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
13868
13869 case FIXED_CST:
13870 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
13871
13872 case COND_EXPR:
13873 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
13874
13875 case SSA_NAME:
13876 /* Limit the depth of recursion to avoid quadratic behavior.
13877 This is expected to catch almost all occurrences in practice.
13878 If this code misses important cases that unbounded recursion
13879 would not, passes that need this information could be revised
13880 to provide it through dataflow propagation. */
13881 return (!name_registered_for_update_p (t)
13882 && depth < param_max_ssa_name_query_depth
13883 && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
13884 strict_overflow_p, depth));
13885
13886 default:
13887 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
13888 }
13889 }
13890
13891 /* Return true if T is known to be non-negative. If the return
13892 value is based on the assumption that signed overflow is undefined,
13893 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13894 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13895
13896 bool
13897 tree_call_nonnegative_warnv_p (tree type, combined_fn fn, tree arg0, tree arg1,
13898 bool *strict_overflow_p, int depth)
13899 {
13900 switch (fn)
13901 {
13902 CASE_CFN_ACOS:
13903 CASE_CFN_ACOSH:
13904 CASE_CFN_CABS:
13905 CASE_CFN_COSH:
13906 CASE_CFN_ERFC:
13907 CASE_CFN_EXP:
13908 CASE_CFN_EXP10:
13909 CASE_CFN_EXP2:
13910 CASE_CFN_FABS:
13911 CASE_CFN_FDIM:
13912 CASE_CFN_HYPOT:
13913 CASE_CFN_POW10:
13914 CASE_CFN_FFS:
13915 CASE_CFN_PARITY:
13916 CASE_CFN_POPCOUNT:
13917 CASE_CFN_CLZ:
13918 CASE_CFN_CLRSB:
13919 case CFN_BUILT_IN_BSWAP16:
13920 case CFN_BUILT_IN_BSWAP32:
13921 case CFN_BUILT_IN_BSWAP64:
13922 case CFN_BUILT_IN_BSWAP128:
13923 /* Always true. */
13924 return true;
13925
13926 CASE_CFN_SQRT:
13927 CASE_CFN_SQRT_FN:
13928 /* sqrt(-0.0) is -0.0. */
13929 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
13930 return true;
13931 return RECURSE (arg0);
13932
13933 CASE_CFN_ASINH:
13934 CASE_CFN_ATAN:
13935 CASE_CFN_ATANH:
13936 CASE_CFN_CBRT:
13937 CASE_CFN_CEIL:
13938 CASE_CFN_CEIL_FN:
13939 CASE_CFN_ERF:
13940 CASE_CFN_EXPM1:
13941 CASE_CFN_FLOOR:
13942 CASE_CFN_FLOOR_FN:
13943 CASE_CFN_FMOD:
13944 CASE_CFN_FREXP:
13945 CASE_CFN_ICEIL:
13946 CASE_CFN_IFLOOR:
13947 CASE_CFN_IRINT:
13948 CASE_CFN_IROUND:
13949 CASE_CFN_LCEIL:
13950 CASE_CFN_LDEXP:
13951 CASE_CFN_LFLOOR:
13952 CASE_CFN_LLCEIL:
13953 CASE_CFN_LLFLOOR:
13954 CASE_CFN_LLRINT:
13955 CASE_CFN_LLROUND:
13956 CASE_CFN_LRINT:
13957 CASE_CFN_LROUND:
13958 CASE_CFN_MODF:
13959 CASE_CFN_NEARBYINT:
13960 CASE_CFN_NEARBYINT_FN:
13961 CASE_CFN_RINT:
13962 CASE_CFN_RINT_FN:
13963 CASE_CFN_ROUND:
13964 CASE_CFN_ROUND_FN:
13965 CASE_CFN_ROUNDEVEN:
13966 CASE_CFN_ROUNDEVEN_FN:
13967 CASE_CFN_SCALB:
13968 CASE_CFN_SCALBLN:
13969 CASE_CFN_SCALBN:
13970 CASE_CFN_SIGNBIT:
13971 CASE_CFN_SIGNIFICAND:
13972 CASE_CFN_SINH:
13973 CASE_CFN_TANH:
13974 CASE_CFN_TRUNC:
13975 CASE_CFN_TRUNC_FN:
13976 /* True if the 1st argument is nonnegative. */
13977 return RECURSE (arg0);
13978
13979 CASE_CFN_FMAX:
13980 CASE_CFN_FMAX_FN:
13981 /* True if the 1st OR 2nd arguments are nonnegative. */
13982 return RECURSE (arg0) || RECURSE (arg1);
13983
13984 CASE_CFN_FMIN:
13985 CASE_CFN_FMIN_FN:
13986 /* True if the 1st AND 2nd arguments are nonnegative. */
13987 return RECURSE (arg0) && RECURSE (arg1);
13988
13989 CASE_CFN_COPYSIGN:
13990 CASE_CFN_COPYSIGN_FN:
13991 /* True if the 2nd argument is nonnegative. */
13992 return RECURSE (arg1);
13993
13994 CASE_CFN_POWI:
13995 /* True if the 1st argument is nonnegative or the second
13996 argument is an even integer. */
13997 if (TREE_CODE (arg1) == INTEGER_CST
13998 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
13999 return true;
14000 return RECURSE (arg0);
14001
14002 CASE_CFN_POW:
14003 /* True if the 1st argument is nonnegative or the second
14004 argument is an even integer valued real. */
14005 if (TREE_CODE (arg1) == REAL_CST)
14006 {
14007 REAL_VALUE_TYPE c;
14008 HOST_WIDE_INT n;
14009
14010 c = TREE_REAL_CST (arg1);
14011 n = real_to_integer (&c);
14012 if ((n & 1) == 0)
14013 {
14014 REAL_VALUE_TYPE cint;
14015 real_from_integer (&cint, VOIDmode, n, SIGNED);
14016 if (real_identical (&c, &cint))
14017 return true;
14018 }
14019 }
14020 return RECURSE (arg0);
14021
14022 default:
14023 break;
14024 }
14025 return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
14026 }
14027
14028 /* Return true if T is known to be non-negative. If the return
14029 value is based on the assumption that signed overflow is undefined,
14030 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14031 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14032
14033 static bool
14034 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
14035 {
14036 enum tree_code code = TREE_CODE (t);
14037 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14038 return true;
14039
14040 switch (code)
14041 {
14042 case TARGET_EXPR:
14043 {
14044 tree temp = TARGET_EXPR_SLOT (t);
14045 t = TARGET_EXPR_INITIAL (t);
14046
14047 /* If the initializer is non-void, then it's a normal expression
14048 that will be assigned to the slot. */
14049 if (!VOID_TYPE_P (t))
14050 return RECURSE (t);
14051
14052 /* Otherwise, the initializer sets the slot in some way. One common
14053 way is an assignment statement at the end of the initializer. */
14054 while (1)
14055 {
14056 if (TREE_CODE (t) == BIND_EXPR)
14057 t = expr_last (BIND_EXPR_BODY (t));
14058 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
14059 || TREE_CODE (t) == TRY_CATCH_EXPR)
14060 t = expr_last (TREE_OPERAND (t, 0));
14061 else if (TREE_CODE (t) == STATEMENT_LIST)
14062 t = expr_last (t);
14063 else
14064 break;
14065 }
14066 if (TREE_CODE (t) == MODIFY_EXPR
14067 && TREE_OPERAND (t, 0) == temp)
14068 return RECURSE (TREE_OPERAND (t, 1));
14069
14070 return false;
14071 }
14072
14073 case CALL_EXPR:
14074 {
14075 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
14076 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
14077
14078 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
14079 get_call_combined_fn (t),
14080 arg0,
14081 arg1,
14082 strict_overflow_p, depth);
14083 }
14084 case COMPOUND_EXPR:
14085 case MODIFY_EXPR:
14086 return RECURSE (TREE_OPERAND (t, 1));
14087
14088 case BIND_EXPR:
14089 return RECURSE (expr_last (TREE_OPERAND (t, 1)));
14090
14091 case SAVE_EXPR:
14092 return RECURSE (TREE_OPERAND (t, 0));
14093
14094 default:
14095 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
14096 }
14097 }
14098
14099 #undef RECURSE
14100 #undef tree_expr_nonnegative_warnv_p
14101
14102 /* Return true if T is known to be non-negative. If the return
14103 value is based on the assumption that signed overflow is undefined,
14104 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14105 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14106
14107 bool
14108 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
14109 {
14110 enum tree_code code;
14111 if (t == error_mark_node)
14112 return false;
14113
14114 code = TREE_CODE (t);
14115 switch (TREE_CODE_CLASS (code))
14116 {
14117 case tcc_binary:
14118 case tcc_comparison:
14119 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14120 TREE_TYPE (t),
14121 TREE_OPERAND (t, 0),
14122 TREE_OPERAND (t, 1),
14123 strict_overflow_p, depth);
14124
14125 case tcc_unary:
14126 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14127 TREE_TYPE (t),
14128 TREE_OPERAND (t, 0),
14129 strict_overflow_p, depth);
14130
14131 case tcc_constant:
14132 case tcc_declaration:
14133 case tcc_reference:
14134 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
14135
14136 default:
14137 break;
14138 }
14139
14140 switch (code)
14141 {
14142 case TRUTH_AND_EXPR:
14143 case TRUTH_OR_EXPR:
14144 case TRUTH_XOR_EXPR:
14145 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14146 TREE_TYPE (t),
14147 TREE_OPERAND (t, 0),
14148 TREE_OPERAND (t, 1),
14149 strict_overflow_p, depth);
14150 case TRUTH_NOT_EXPR:
14151 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14152 TREE_TYPE (t),
14153 TREE_OPERAND (t, 0),
14154 strict_overflow_p, depth);
14155
14156 case COND_EXPR:
14157 case CONSTRUCTOR:
14158 case OBJ_TYPE_REF:
14159 case ASSERT_EXPR:
14160 case ADDR_EXPR:
14161 case WITH_SIZE_EXPR:
14162 case SSA_NAME:
14163 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
14164
14165 default:
14166 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
14167 }
14168 }
14169
14170 /* Return true if `t' is known to be non-negative. Handle warnings
14171 about undefined signed overflow. */
14172
14173 bool
14174 tree_expr_nonnegative_p (tree t)
14175 {
14176 bool ret, strict_overflow_p;
14177
14178 strict_overflow_p = false;
14179 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
14180 if (strict_overflow_p)
14181 fold_overflow_warning (("assuming signed overflow does not occur when "
14182 "determining that expression is always "
14183 "non-negative"),
14184 WARN_STRICT_OVERFLOW_MISC);
14185 return ret;
14186 }
14187
14188
14189 /* Return true when (CODE OP0) is an address and is known to be nonzero.
14190 For floating point we further ensure that T is not denormal.
14191 Similar logic is present in nonzero_address in rtlanal.h.
14192
14193 If the return value is based on the assumption that signed overflow
14194 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14195 change *STRICT_OVERFLOW_P. */
14196
14197 bool
14198 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
14199 bool *strict_overflow_p)
14200 {
14201 switch (code)
14202 {
14203 case ABS_EXPR:
14204 return tree_expr_nonzero_warnv_p (op0,
14205 strict_overflow_p);
14206
14207 case NOP_EXPR:
14208 {
14209 tree inner_type = TREE_TYPE (op0);
14210 tree outer_type = type;
14211
14212 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
14213 && tree_expr_nonzero_warnv_p (op0,
14214 strict_overflow_p));
14215 }
14216 break;
14217
14218 case NON_LVALUE_EXPR:
14219 return tree_expr_nonzero_warnv_p (op0,
14220 strict_overflow_p);
14221
14222 default:
14223 break;
14224 }
14225
14226 return false;
14227 }
14228
14229 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
14230 For floating point we further ensure that T is not denormal.
14231 Similar logic is present in nonzero_address in rtlanal.h.
14232
14233 If the return value is based on the assumption that signed overflow
14234 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14235 change *STRICT_OVERFLOW_P. */
14236
14237 bool
14238 tree_binary_nonzero_warnv_p (enum tree_code code,
14239 tree type,
14240 tree op0,
14241 tree op1, bool *strict_overflow_p)
14242 {
14243 bool sub_strict_overflow_p;
14244 switch (code)
14245 {
14246 case POINTER_PLUS_EXPR:
14247 case PLUS_EXPR:
14248 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
14249 {
14250 /* With the presence of negative values it is hard
14251 to say something. */
14252 sub_strict_overflow_p = false;
14253 if (!tree_expr_nonnegative_warnv_p (op0,
14254 &sub_strict_overflow_p)
14255 || !tree_expr_nonnegative_warnv_p (op1,
14256 &sub_strict_overflow_p))
14257 return false;
14258 /* One of operands must be positive and the other non-negative. */
14259 /* We don't set *STRICT_OVERFLOW_P here: even if this value
14260 overflows, on a twos-complement machine the sum of two
14261 nonnegative numbers can never be zero. */
14262 return (tree_expr_nonzero_warnv_p (op0,
14263 strict_overflow_p)
14264 || tree_expr_nonzero_warnv_p (op1,
14265 strict_overflow_p));
14266 }
14267 break;
14268
14269 case MULT_EXPR:
14270 if (TYPE_OVERFLOW_UNDEFINED (type))
14271 {
14272 if (tree_expr_nonzero_warnv_p (op0,
14273 strict_overflow_p)
14274 && tree_expr_nonzero_warnv_p (op1,
14275 strict_overflow_p))
14276 {
14277 *strict_overflow_p = true;
14278 return true;
14279 }
14280 }
14281 break;
14282
14283 case MIN_EXPR:
14284 sub_strict_overflow_p = false;
14285 if (tree_expr_nonzero_warnv_p (op0,
14286 &sub_strict_overflow_p)
14287 && tree_expr_nonzero_warnv_p (op1,
14288 &sub_strict_overflow_p))
14289 {
14290 if (sub_strict_overflow_p)
14291 *strict_overflow_p = true;
14292 }
14293 break;
14294
14295 case MAX_EXPR:
14296 sub_strict_overflow_p = false;
14297 if (tree_expr_nonzero_warnv_p (op0,
14298 &sub_strict_overflow_p))
14299 {
14300 if (sub_strict_overflow_p)
14301 *strict_overflow_p = true;
14302
14303 /* When both operands are nonzero, then MAX must be too. */
14304 if (tree_expr_nonzero_warnv_p (op1,
14305 strict_overflow_p))
14306 return true;
14307
14308 /* MAX where operand 0 is positive is positive. */
14309 return tree_expr_nonnegative_warnv_p (op0,
14310 strict_overflow_p);
14311 }
14312 /* MAX where operand 1 is positive is positive. */
14313 else if (tree_expr_nonzero_warnv_p (op1,
14314 &sub_strict_overflow_p)
14315 && tree_expr_nonnegative_warnv_p (op1,
14316 &sub_strict_overflow_p))
14317 {
14318 if (sub_strict_overflow_p)
14319 *strict_overflow_p = true;
14320 return true;
14321 }
14322 break;
14323
14324 case BIT_IOR_EXPR:
14325 return (tree_expr_nonzero_warnv_p (op1,
14326 strict_overflow_p)
14327 || tree_expr_nonzero_warnv_p (op0,
14328 strict_overflow_p));
14329
14330 default:
14331 break;
14332 }
14333
14334 return false;
14335 }
14336
14337 /* Return true when T is an address and is known to be nonzero.
14338 For floating point we further ensure that T is not denormal.
14339 Similar logic is present in nonzero_address in rtlanal.h.
14340
14341 If the return value is based on the assumption that signed overflow
14342 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14343 change *STRICT_OVERFLOW_P. */
14344
14345 bool
14346 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
14347 {
14348 bool sub_strict_overflow_p;
14349 switch (TREE_CODE (t))
14350 {
14351 case INTEGER_CST:
14352 return !integer_zerop (t);
14353
14354 case ADDR_EXPR:
14355 {
14356 tree base = TREE_OPERAND (t, 0);
14357
14358 if (!DECL_P (base))
14359 base = get_base_address (base);
14360
14361 if (base && TREE_CODE (base) == TARGET_EXPR)
14362 base = TARGET_EXPR_SLOT (base);
14363
14364 if (!base)
14365 return false;
14366
14367 /* For objects in symbol table check if we know they are non-zero.
14368 Don't do anything for variables and functions before symtab is built;
14369 it is quite possible that they will be declared weak later. */
14370 int nonzero_addr = maybe_nonzero_address (base);
14371 if (nonzero_addr >= 0)
14372 return nonzero_addr;
14373
14374 /* Constants are never weak. */
14375 if (CONSTANT_CLASS_P (base))
14376 return true;
14377
14378 return false;
14379 }
14380
14381 case COND_EXPR:
14382 sub_strict_overflow_p = false;
14383 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14384 &sub_strict_overflow_p)
14385 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
14386 &sub_strict_overflow_p))
14387 {
14388 if (sub_strict_overflow_p)
14389 *strict_overflow_p = true;
14390 return true;
14391 }
14392 break;
14393
14394 case SSA_NAME:
14395 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
14396 break;
14397 return expr_not_equal_to (t, wi::zero (TYPE_PRECISION (TREE_TYPE (t))));
14398
14399 default:
14400 break;
14401 }
14402 return false;
14403 }
14404
14405 #define integer_valued_real_p(X) \
14406 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
14407
14408 #define RECURSE(X) \
14409 ((integer_valued_real_p) (X, depth + 1))
14410
14411 /* Return true if the floating point result of (CODE OP0) has an
14412 integer value. We also allow +Inf, -Inf and NaN to be considered
14413 integer values. Return false for signaling NaN.
14414
14415 DEPTH is the current nesting depth of the query. */
14416
14417 bool
14418 integer_valued_real_unary_p (tree_code code, tree op0, int depth)
14419 {
14420 switch (code)
14421 {
14422 case FLOAT_EXPR:
14423 return true;
14424
14425 case ABS_EXPR:
14426 return RECURSE (op0);
14427
14428 CASE_CONVERT:
14429 {
14430 tree type = TREE_TYPE (op0);
14431 if (TREE_CODE (type) == INTEGER_TYPE)
14432 return true;
14433 if (TREE_CODE (type) == REAL_TYPE)
14434 return RECURSE (op0);
14435 break;
14436 }
14437
14438 default:
14439 break;
14440 }
14441 return false;
14442 }
14443
14444 /* Return true if the floating point result of (CODE OP0 OP1) has an
14445 integer value. We also allow +Inf, -Inf and NaN to be considered
14446 integer values. Return false for signaling NaN.
14447
14448 DEPTH is the current nesting depth of the query. */
14449
14450 bool
14451 integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth)
14452 {
14453 switch (code)
14454 {
14455 case PLUS_EXPR:
14456 case MINUS_EXPR:
14457 case MULT_EXPR:
14458 case MIN_EXPR:
14459 case MAX_EXPR:
14460 return RECURSE (op0) && RECURSE (op1);
14461
14462 default:
14463 break;
14464 }
14465 return false;
14466 }
14467
14468 /* Return true if the floating point result of calling FNDECL with arguments
14469 ARG0 and ARG1 has an integer value. We also allow +Inf, -Inf and NaN to be
14470 considered integer values. Return false for signaling NaN. If FNDECL
14471 takes fewer than 2 arguments, the remaining ARGn are null.
14472
14473 DEPTH is the current nesting depth of the query. */
14474
14475 bool
14476 integer_valued_real_call_p (combined_fn fn, tree arg0, tree arg1, int depth)
14477 {
14478 switch (fn)
14479 {
14480 CASE_CFN_CEIL:
14481 CASE_CFN_CEIL_FN:
14482 CASE_CFN_FLOOR:
14483 CASE_CFN_FLOOR_FN:
14484 CASE_CFN_NEARBYINT:
14485 CASE_CFN_NEARBYINT_FN:
14486 CASE_CFN_RINT:
14487 CASE_CFN_RINT_FN:
14488 CASE_CFN_ROUND:
14489 CASE_CFN_ROUND_FN:
14490 CASE_CFN_ROUNDEVEN:
14491 CASE_CFN_ROUNDEVEN_FN:
14492 CASE_CFN_TRUNC:
14493 CASE_CFN_TRUNC_FN:
14494 return true;
14495
14496 CASE_CFN_FMIN:
14497 CASE_CFN_FMIN_FN:
14498 CASE_CFN_FMAX:
14499 CASE_CFN_FMAX_FN:
14500 return RECURSE (arg0) && RECURSE (arg1);
14501
14502 default:
14503 break;
14504 }
14505 return false;
14506 }
14507
14508 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
14509 has an integer value. We also allow +Inf, -Inf and NaN to be
14510 considered integer values. Return false for signaling NaN.
14511
14512 DEPTH is the current nesting depth of the query. */
14513
14514 bool
14515 integer_valued_real_single_p (tree t, int depth)
14516 {
14517 switch (TREE_CODE (t))
14518 {
14519 case REAL_CST:
14520 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
14521
14522 case COND_EXPR:
14523 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
14524
14525 case SSA_NAME:
14526 /* Limit the depth of recursion to avoid quadratic behavior.
14527 This is expected to catch almost all occurrences in practice.
14528 If this code misses important cases that unbounded recursion
14529 would not, passes that need this information could be revised
14530 to provide it through dataflow propagation. */
14531 return (!name_registered_for_update_p (t)
14532 && depth < param_max_ssa_name_query_depth
14533 && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t),
14534 depth));
14535
14536 default:
14537 break;
14538 }
14539 return false;
14540 }
14541
14542 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
14543 has an integer value. We also allow +Inf, -Inf and NaN to be
14544 considered integer values. Return false for signaling NaN.
14545
14546 DEPTH is the current nesting depth of the query. */
14547
14548 static bool
14549 integer_valued_real_invalid_p (tree t, int depth)
14550 {
14551 switch (TREE_CODE (t))
14552 {
14553 case COMPOUND_EXPR:
14554 case MODIFY_EXPR:
14555 case BIND_EXPR:
14556 return RECURSE (TREE_OPERAND (t, 1));
14557
14558 case SAVE_EXPR:
14559 return RECURSE (TREE_OPERAND (t, 0));
14560
14561 default:
14562 break;
14563 }
14564 return false;
14565 }
14566
14567 #undef RECURSE
14568 #undef integer_valued_real_p
14569
14570 /* Return true if the floating point expression T has an integer value.
14571 We also allow +Inf, -Inf and NaN to be considered integer values.
14572 Return false for signaling NaN.
14573
14574 DEPTH is the current nesting depth of the query. */
14575
14576 bool
14577 integer_valued_real_p (tree t, int depth)
14578 {
14579 if (t == error_mark_node)
14580 return false;
14581
14582 STRIP_ANY_LOCATION_WRAPPER (t);
14583
14584 tree_code code = TREE_CODE (t);
14585 switch (TREE_CODE_CLASS (code))
14586 {
14587 case tcc_binary:
14588 case tcc_comparison:
14589 return integer_valued_real_binary_p (code, TREE_OPERAND (t, 0),
14590 TREE_OPERAND (t, 1), depth);
14591
14592 case tcc_unary:
14593 return integer_valued_real_unary_p (code, TREE_OPERAND (t, 0), depth);
14594
14595 case tcc_constant:
14596 case tcc_declaration:
14597 case tcc_reference:
14598 return integer_valued_real_single_p (t, depth);
14599
14600 default:
14601 break;
14602 }
14603
14604 switch (code)
14605 {
14606 case COND_EXPR:
14607 case SSA_NAME:
14608 return integer_valued_real_single_p (t, depth);
14609
14610 case CALL_EXPR:
14611 {
14612 tree arg0 = (call_expr_nargs (t) > 0
14613 ? CALL_EXPR_ARG (t, 0)
14614 : NULL_TREE);
14615 tree arg1 = (call_expr_nargs (t) > 1
14616 ? CALL_EXPR_ARG (t, 1)
14617 : NULL_TREE);
14618 return integer_valued_real_call_p (get_call_combined_fn (t),
14619 arg0, arg1, depth);
14620 }
14621
14622 default:
14623 return integer_valued_real_invalid_p (t, depth);
14624 }
14625 }
14626
14627 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
14628 attempt to fold the expression to a constant without modifying TYPE,
14629 OP0 or OP1.
14630
14631 If the expression could be simplified to a constant, then return
14632 the constant. If the expression would not be simplified to a
14633 constant, then return NULL_TREE. */
14634
14635 tree
14636 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
14637 {
14638 tree tem = fold_binary (code, type, op0, op1);
14639 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
14640 }
14641
14642 /* Given the components of a unary expression CODE, TYPE and OP0,
14643 attempt to fold the expression to a constant without modifying
14644 TYPE or OP0.
14645
14646 If the expression could be simplified to a constant, then return
14647 the constant. If the expression would not be simplified to a
14648 constant, then return NULL_TREE. */
14649
14650 tree
14651 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
14652 {
14653 tree tem = fold_unary (code, type, op0);
14654 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
14655 }
14656
14657 /* If EXP represents referencing an element in a constant string
14658 (either via pointer arithmetic or array indexing), return the
14659 tree representing the value accessed, otherwise return NULL. */
14660
14661 tree
14662 fold_read_from_constant_string (tree exp)
14663 {
14664 if ((TREE_CODE (exp) == INDIRECT_REF
14665 || TREE_CODE (exp) == ARRAY_REF)
14666 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
14667 {
14668 tree exp1 = TREE_OPERAND (exp, 0);
14669 tree index;
14670 tree string;
14671 location_t loc = EXPR_LOCATION (exp);
14672
14673 if (TREE_CODE (exp) == INDIRECT_REF)
14674 string = string_constant (exp1, &index, NULL, NULL);
14675 else
14676 {
14677 tree low_bound = array_ref_low_bound (exp);
14678 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
14679
14680 /* Optimize the special-case of a zero lower bound.
14681
14682 We convert the low_bound to sizetype to avoid some problems
14683 with constant folding. (E.g. suppose the lower bound is 1,
14684 and its mode is QI. Without the conversion,l (ARRAY
14685 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
14686 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
14687 if (! integer_zerop (low_bound))
14688 index = size_diffop_loc (loc, index,
14689 fold_convert_loc (loc, sizetype, low_bound));
14690
14691 string = exp1;
14692 }
14693
14694 scalar_int_mode char_mode;
14695 if (string
14696 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
14697 && TREE_CODE (string) == STRING_CST
14698 && TREE_CODE (index) == INTEGER_CST
14699 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
14700 && is_int_mode (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))),
14701 &char_mode)
14702 && GET_MODE_SIZE (char_mode) == 1)
14703 return build_int_cst_type (TREE_TYPE (exp),
14704 (TREE_STRING_POINTER (string)
14705 [TREE_INT_CST_LOW (index)]));
14706 }
14707 return NULL;
14708 }
14709
14710 /* Folds a read from vector element at IDX of vector ARG. */
14711
14712 tree
14713 fold_read_from_vector (tree arg, poly_uint64 idx)
14714 {
14715 unsigned HOST_WIDE_INT i;
14716 if (known_lt (idx, TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)))
14717 && known_ge (idx, 0u)
14718 && idx.is_constant (&i))
14719 {
14720 if (TREE_CODE (arg) == VECTOR_CST)
14721 return VECTOR_CST_ELT (arg, i);
14722 else if (TREE_CODE (arg) == CONSTRUCTOR)
14723 {
14724 if (i >= CONSTRUCTOR_NELTS (arg))
14725 return build_zero_cst (TREE_TYPE (TREE_TYPE (arg)));
14726 return CONSTRUCTOR_ELT (arg, i)->value;
14727 }
14728 }
14729 return NULL_TREE;
14730 }
14731
14732 /* Return the tree for neg (ARG0) when ARG0 is known to be either
14733 an integer constant, real, or fixed-point constant.
14734
14735 TYPE is the type of the result. */
14736
14737 static tree
14738 fold_negate_const (tree arg0, tree type)
14739 {
14740 tree t = NULL_TREE;
14741
14742 switch (TREE_CODE (arg0))
14743 {
14744 case REAL_CST:
14745 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
14746 break;
14747
14748 case FIXED_CST:
14749 {
14750 FIXED_VALUE_TYPE f;
14751 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
14752 &(TREE_FIXED_CST (arg0)), NULL,
14753 TYPE_SATURATING (type));
14754 t = build_fixed (type, f);
14755 /* Propagate overflow flags. */
14756 if (overflow_p | TREE_OVERFLOW (arg0))
14757 TREE_OVERFLOW (t) = 1;
14758 break;
14759 }
14760
14761 default:
14762 if (poly_int_tree_p (arg0))
14763 {
14764 wi::overflow_type overflow;
14765 poly_wide_int res = wi::neg (wi::to_poly_wide (arg0), &overflow);
14766 t = force_fit_type (type, res, 1,
14767 (overflow && ! TYPE_UNSIGNED (type))
14768 || TREE_OVERFLOW (arg0));
14769 break;
14770 }
14771
14772 gcc_unreachable ();
14773 }
14774
14775 return t;
14776 }
14777
14778 /* Return the tree for abs (ARG0) when ARG0 is known to be either
14779 an integer constant or real constant.
14780
14781 TYPE is the type of the result. */
14782
14783 tree
14784 fold_abs_const (tree arg0, tree type)
14785 {
14786 tree t = NULL_TREE;
14787
14788 switch (TREE_CODE (arg0))
14789 {
14790 case INTEGER_CST:
14791 {
14792 /* If the value is unsigned or non-negative, then the absolute value
14793 is the same as the ordinary value. */
14794 wide_int val = wi::to_wide (arg0);
14795 wi::overflow_type overflow = wi::OVF_NONE;
14796 if (!wi::neg_p (val, TYPE_SIGN (TREE_TYPE (arg0))))
14797 ;
14798
14799 /* If the value is negative, then the absolute value is
14800 its negation. */
14801 else
14802 val = wi::neg (val, &overflow);
14803
14804 /* Force to the destination type, set TREE_OVERFLOW for signed
14805 TYPE only. */
14806 t = force_fit_type (type, val, 1, overflow | TREE_OVERFLOW (arg0));
14807 }
14808 break;
14809
14810 case REAL_CST:
14811 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
14812 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
14813 else
14814 t = arg0;
14815 break;
14816
14817 default:
14818 gcc_unreachable ();
14819 }
14820
14821 return t;
14822 }
14823
14824 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
14825 constant. TYPE is the type of the result. */
14826
14827 static tree
14828 fold_not_const (const_tree arg0, tree type)
14829 {
14830 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
14831
14832 return force_fit_type (type, ~wi::to_wide (arg0), 0, TREE_OVERFLOW (arg0));
14833 }
14834
14835 /* Given CODE, a relational operator, the target type, TYPE and two
14836 constant operands OP0 and OP1, return the result of the
14837 relational operation. If the result is not a compile time
14838 constant, then return NULL_TREE. */
14839
14840 static tree
14841 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
14842 {
14843 int result, invert;
14844
14845 /* From here on, the only cases we handle are when the result is
14846 known to be a constant. */
14847
14848 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
14849 {
14850 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
14851 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
14852
14853 /* Handle the cases where either operand is a NaN. */
14854 if (real_isnan (c0) || real_isnan (c1))
14855 {
14856 switch (code)
14857 {
14858 case EQ_EXPR:
14859 case ORDERED_EXPR:
14860 result = 0;
14861 break;
14862
14863 case NE_EXPR:
14864 case UNORDERED_EXPR:
14865 case UNLT_EXPR:
14866 case UNLE_EXPR:
14867 case UNGT_EXPR:
14868 case UNGE_EXPR:
14869 case UNEQ_EXPR:
14870 result = 1;
14871 break;
14872
14873 case LT_EXPR:
14874 case LE_EXPR:
14875 case GT_EXPR:
14876 case GE_EXPR:
14877 case LTGT_EXPR:
14878 if (flag_trapping_math)
14879 return NULL_TREE;
14880 result = 0;
14881 break;
14882
14883 default:
14884 gcc_unreachable ();
14885 }
14886
14887 return constant_boolean_node (result, type);
14888 }
14889
14890 return constant_boolean_node (real_compare (code, c0, c1), type);
14891 }
14892
14893 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
14894 {
14895 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
14896 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
14897 return constant_boolean_node (fixed_compare (code, c0, c1), type);
14898 }
14899
14900 /* Handle equality/inequality of complex constants. */
14901 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
14902 {
14903 tree rcond = fold_relational_const (code, type,
14904 TREE_REALPART (op0),
14905 TREE_REALPART (op1));
14906 tree icond = fold_relational_const (code, type,
14907 TREE_IMAGPART (op0),
14908 TREE_IMAGPART (op1));
14909 if (code == EQ_EXPR)
14910 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
14911 else if (code == NE_EXPR)
14912 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
14913 else
14914 return NULL_TREE;
14915 }
14916
14917 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
14918 {
14919 if (!VECTOR_TYPE_P (type))
14920 {
14921 /* Have vector comparison with scalar boolean result. */
14922 gcc_assert ((code == EQ_EXPR || code == NE_EXPR)
14923 && known_eq (VECTOR_CST_NELTS (op0),
14924 VECTOR_CST_NELTS (op1)));
14925 unsigned HOST_WIDE_INT nunits;
14926 if (!VECTOR_CST_NELTS (op0).is_constant (&nunits))
14927 return NULL_TREE;
14928 for (unsigned i = 0; i < nunits; i++)
14929 {
14930 tree elem0 = VECTOR_CST_ELT (op0, i);
14931 tree elem1 = VECTOR_CST_ELT (op1, i);
14932 tree tmp = fold_relational_const (EQ_EXPR, type, elem0, elem1);
14933 if (tmp == NULL_TREE)
14934 return NULL_TREE;
14935 if (integer_zerop (tmp))
14936 return constant_boolean_node (code == NE_EXPR, type);
14937 }
14938 return constant_boolean_node (code == EQ_EXPR, type);
14939 }
14940 tree_vector_builder elts;
14941 if (!elts.new_binary_operation (type, op0, op1, false))
14942 return NULL_TREE;
14943 unsigned int count = elts.encoded_nelts ();
14944 for (unsigned i = 0; i < count; i++)
14945 {
14946 tree elem_type = TREE_TYPE (type);
14947 tree elem0 = VECTOR_CST_ELT (op0, i);
14948 tree elem1 = VECTOR_CST_ELT (op1, i);
14949
14950 tree tem = fold_relational_const (code, elem_type,
14951 elem0, elem1);
14952
14953 if (tem == NULL_TREE)
14954 return NULL_TREE;
14955
14956 elts.quick_push (build_int_cst (elem_type,
14957 integer_zerop (tem) ? 0 : -1));
14958 }
14959
14960 return elts.build ();
14961 }
14962
14963 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
14964
14965 To compute GT, swap the arguments and do LT.
14966 To compute GE, do LT and invert the result.
14967 To compute LE, swap the arguments, do LT and invert the result.
14968 To compute NE, do EQ and invert the result.
14969
14970 Therefore, the code below must handle only EQ and LT. */
14971
14972 if (code == LE_EXPR || code == GT_EXPR)
14973 {
14974 std::swap (op0, op1);
14975 code = swap_tree_comparison (code);
14976 }
14977
14978 /* Note that it is safe to invert for real values here because we
14979 have already handled the one case that it matters. */
14980
14981 invert = 0;
14982 if (code == NE_EXPR || code == GE_EXPR)
14983 {
14984 invert = 1;
14985 code = invert_tree_comparison (code, false);
14986 }
14987
14988 /* Compute a result for LT or EQ if args permit;
14989 Otherwise return T. */
14990 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
14991 {
14992 if (code == EQ_EXPR)
14993 result = tree_int_cst_equal (op0, op1);
14994 else
14995 result = tree_int_cst_lt (op0, op1);
14996 }
14997 else
14998 return NULL_TREE;
14999
15000 if (invert)
15001 result ^= 1;
15002 return constant_boolean_node (result, type);
15003 }
15004
15005 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15006 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15007 itself. */
15008
15009 tree
15010 fold_build_cleanup_point_expr (tree type, tree expr)
15011 {
15012 /* If the expression does not have side effects then we don't have to wrap
15013 it with a cleanup point expression. */
15014 if (!TREE_SIDE_EFFECTS (expr))
15015 return expr;
15016
15017 /* If the expression is a return, check to see if the expression inside the
15018 return has no side effects or the right hand side of the modify expression
15019 inside the return. If either don't have side effects set we don't need to
15020 wrap the expression in a cleanup point expression. Note we don't check the
15021 left hand side of the modify because it should always be a return decl. */
15022 if (TREE_CODE (expr) == RETURN_EXPR)
15023 {
15024 tree op = TREE_OPERAND (expr, 0);
15025 if (!op || !TREE_SIDE_EFFECTS (op))
15026 return expr;
15027 op = TREE_OPERAND (op, 1);
15028 if (!TREE_SIDE_EFFECTS (op))
15029 return expr;
15030 }
15031
15032 return build1_loc (EXPR_LOCATION (expr), CLEANUP_POINT_EXPR, type, expr);
15033 }
15034
15035 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15036 of an indirection through OP0, or NULL_TREE if no simplification is
15037 possible. */
15038
15039 tree
15040 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
15041 {
15042 tree sub = op0;
15043 tree subtype;
15044 poly_uint64 const_op01;
15045
15046 STRIP_NOPS (sub);
15047 subtype = TREE_TYPE (sub);
15048 if (!POINTER_TYPE_P (subtype)
15049 || TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (op0)))
15050 return NULL_TREE;
15051
15052 if (TREE_CODE (sub) == ADDR_EXPR)
15053 {
15054 tree op = TREE_OPERAND (sub, 0);
15055 tree optype = TREE_TYPE (op);
15056
15057 /* *&CONST_DECL -> to the value of the const decl. */
15058 if (TREE_CODE (op) == CONST_DECL)
15059 return DECL_INITIAL (op);
15060 /* *&p => p; make sure to handle *&"str"[cst] here. */
15061 if (type == optype)
15062 {
15063 tree fop = fold_read_from_constant_string (op);
15064 if (fop)
15065 return fop;
15066 else
15067 return op;
15068 }
15069 /* *(foo *)&fooarray => fooarray[0] */
15070 else if (TREE_CODE (optype) == ARRAY_TYPE
15071 && type == TREE_TYPE (optype)
15072 && (!in_gimple_form
15073 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15074 {
15075 tree type_domain = TYPE_DOMAIN (optype);
15076 tree min_val = size_zero_node;
15077 if (type_domain && TYPE_MIN_VALUE (type_domain))
15078 min_val = TYPE_MIN_VALUE (type_domain);
15079 if (in_gimple_form
15080 && TREE_CODE (min_val) != INTEGER_CST)
15081 return NULL_TREE;
15082 return build4_loc (loc, ARRAY_REF, type, op, min_val,
15083 NULL_TREE, NULL_TREE);
15084 }
15085 /* *(foo *)&complexfoo => __real__ complexfoo */
15086 else if (TREE_CODE (optype) == COMPLEX_TYPE
15087 && type == TREE_TYPE (optype))
15088 return fold_build1_loc (loc, REALPART_EXPR, type, op);
15089 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15090 else if (VECTOR_TYPE_P (optype)
15091 && type == TREE_TYPE (optype))
15092 {
15093 tree part_width = TYPE_SIZE (type);
15094 tree index = bitsize_int (0);
15095 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width,
15096 index);
15097 }
15098 }
15099
15100 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15101 && poly_int_tree_p (TREE_OPERAND (sub, 1), &const_op01))
15102 {
15103 tree op00 = TREE_OPERAND (sub, 0);
15104 tree op01 = TREE_OPERAND (sub, 1);
15105
15106 STRIP_NOPS (op00);
15107 if (TREE_CODE (op00) == ADDR_EXPR)
15108 {
15109 tree op00type;
15110 op00 = TREE_OPERAND (op00, 0);
15111 op00type = TREE_TYPE (op00);
15112
15113 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15114 if (VECTOR_TYPE_P (op00type)
15115 && type == TREE_TYPE (op00type)
15116 /* POINTER_PLUS_EXPR second operand is sizetype, unsigned,
15117 but we want to treat offsets with MSB set as negative.
15118 For the code below negative offsets are invalid and
15119 TYPE_SIZE of the element is something unsigned, so
15120 check whether op01 fits into poly_int64, which implies
15121 it is from 0 to INTTYPE_MAXIMUM (HOST_WIDE_INT), and
15122 then just use poly_uint64 because we want to treat the
15123 value as unsigned. */
15124 && tree_fits_poly_int64_p (op01))
15125 {
15126 tree part_width = TYPE_SIZE (type);
15127 poly_uint64 max_offset
15128 = (tree_to_uhwi (part_width) / BITS_PER_UNIT
15129 * TYPE_VECTOR_SUBPARTS (op00type));
15130 if (known_lt (const_op01, max_offset))
15131 {
15132 tree index = bitsize_int (const_op01 * BITS_PER_UNIT);
15133 return fold_build3_loc (loc,
15134 BIT_FIELD_REF, type, op00,
15135 part_width, index);
15136 }
15137 }
15138 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15139 else if (TREE_CODE (op00type) == COMPLEX_TYPE
15140 && type == TREE_TYPE (op00type))
15141 {
15142 if (known_eq (wi::to_poly_offset (TYPE_SIZE_UNIT (type)),
15143 const_op01))
15144 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
15145 }
15146 /* ((foo *)&fooarray)[1] => fooarray[1] */
15147 else if (TREE_CODE (op00type) == ARRAY_TYPE
15148 && type == TREE_TYPE (op00type))
15149 {
15150 tree type_domain = TYPE_DOMAIN (op00type);
15151 tree min_val = size_zero_node;
15152 if (type_domain && TYPE_MIN_VALUE (type_domain))
15153 min_val = TYPE_MIN_VALUE (type_domain);
15154 poly_uint64 type_size, index;
15155 if (poly_int_tree_p (min_val)
15156 && poly_int_tree_p (TYPE_SIZE_UNIT (type), &type_size)
15157 && multiple_p (const_op01, type_size, &index))
15158 {
15159 poly_offset_int off = index + wi::to_poly_offset (min_val);
15160 op01 = wide_int_to_tree (sizetype, off);
15161 return build4_loc (loc, ARRAY_REF, type, op00, op01,
15162 NULL_TREE, NULL_TREE);
15163 }
15164 }
15165 }
15166 }
15167
15168 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15169 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
15170 && type == TREE_TYPE (TREE_TYPE (subtype))
15171 && (!in_gimple_form
15172 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15173 {
15174 tree type_domain;
15175 tree min_val = size_zero_node;
15176 sub = build_fold_indirect_ref_loc (loc, sub);
15177 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15178 if (type_domain && TYPE_MIN_VALUE (type_domain))
15179 min_val = TYPE_MIN_VALUE (type_domain);
15180 if (in_gimple_form
15181 && TREE_CODE (min_val) != INTEGER_CST)
15182 return NULL_TREE;
15183 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
15184 NULL_TREE);
15185 }
15186
15187 return NULL_TREE;
15188 }
15189
15190 /* Builds an expression for an indirection through T, simplifying some
15191 cases. */
15192
15193 tree
15194 build_fold_indirect_ref_loc (location_t loc, tree t)
15195 {
15196 tree type = TREE_TYPE (TREE_TYPE (t));
15197 tree sub = fold_indirect_ref_1 (loc, type, t);
15198
15199 if (sub)
15200 return sub;
15201
15202 return build1_loc (loc, INDIRECT_REF, type, t);
15203 }
15204
15205 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15206
15207 tree
15208 fold_indirect_ref_loc (location_t loc, tree t)
15209 {
15210 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
15211
15212 if (sub)
15213 return sub;
15214 else
15215 return t;
15216 }
15217
15218 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15219 whose result is ignored. The type of the returned tree need not be
15220 the same as the original expression. */
15221
15222 tree
15223 fold_ignored_result (tree t)
15224 {
15225 if (!TREE_SIDE_EFFECTS (t))
15226 return integer_zero_node;
15227
15228 for (;;)
15229 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15230 {
15231 case tcc_unary:
15232 t = TREE_OPERAND (t, 0);
15233 break;
15234
15235 case tcc_binary:
15236 case tcc_comparison:
15237 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15238 t = TREE_OPERAND (t, 0);
15239 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15240 t = TREE_OPERAND (t, 1);
15241 else
15242 return t;
15243 break;
15244
15245 case tcc_expression:
15246 switch (TREE_CODE (t))
15247 {
15248 case COMPOUND_EXPR:
15249 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15250 return t;
15251 t = TREE_OPERAND (t, 0);
15252 break;
15253
15254 case COND_EXPR:
15255 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15256 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15257 return t;
15258 t = TREE_OPERAND (t, 0);
15259 break;
15260
15261 default:
15262 return t;
15263 }
15264 break;
15265
15266 default:
15267 return t;
15268 }
15269 }
15270
15271 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
15272
15273 tree
15274 round_up_loc (location_t loc, tree value, unsigned int divisor)
15275 {
15276 tree div = NULL_TREE;
15277
15278 if (divisor == 1)
15279 return value;
15280
15281 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15282 have to do anything. Only do this when we are not given a const,
15283 because in that case, this check is more expensive than just
15284 doing it. */
15285 if (TREE_CODE (value) != INTEGER_CST)
15286 {
15287 div = build_int_cst (TREE_TYPE (value), divisor);
15288
15289 if (multiple_of_p (TREE_TYPE (value), value, div))
15290 return value;
15291 }
15292
15293 /* If divisor is a power of two, simplify this to bit manipulation. */
15294 if (pow2_or_zerop (divisor))
15295 {
15296 if (TREE_CODE (value) == INTEGER_CST)
15297 {
15298 wide_int val = wi::to_wide (value);
15299 bool overflow_p;
15300
15301 if ((val & (divisor - 1)) == 0)
15302 return value;
15303
15304 overflow_p = TREE_OVERFLOW (value);
15305 val += divisor - 1;
15306 val &= (int) -divisor;
15307 if (val == 0)
15308 overflow_p = true;
15309
15310 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
15311 }
15312 else
15313 {
15314 tree t;
15315
15316 t = build_int_cst (TREE_TYPE (value), divisor - 1);
15317 value = size_binop_loc (loc, PLUS_EXPR, value, t);
15318 t = build_int_cst (TREE_TYPE (value), - (int) divisor);
15319 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
15320 }
15321 }
15322 else
15323 {
15324 if (!div)
15325 div = build_int_cst (TREE_TYPE (value), divisor);
15326 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
15327 value = size_binop_loc (loc, MULT_EXPR, value, div);
15328 }
15329
15330 return value;
15331 }
15332
15333 /* Likewise, but round down. */
15334
15335 tree
15336 round_down_loc (location_t loc, tree value, int divisor)
15337 {
15338 tree div = NULL_TREE;
15339
15340 gcc_assert (divisor > 0);
15341 if (divisor == 1)
15342 return value;
15343
15344 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15345 have to do anything. Only do this when we are not given a const,
15346 because in that case, this check is more expensive than just
15347 doing it. */
15348 if (TREE_CODE (value) != INTEGER_CST)
15349 {
15350 div = build_int_cst (TREE_TYPE (value), divisor);
15351
15352 if (multiple_of_p (TREE_TYPE (value), value, div))
15353 return value;
15354 }
15355
15356 /* If divisor is a power of two, simplify this to bit manipulation. */
15357 if (pow2_or_zerop (divisor))
15358 {
15359 tree t;
15360
15361 t = build_int_cst (TREE_TYPE (value), -divisor);
15362 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
15363 }
15364 else
15365 {
15366 if (!div)
15367 div = build_int_cst (TREE_TYPE (value), divisor);
15368 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
15369 value = size_binop_loc (loc, MULT_EXPR, value, div);
15370 }
15371
15372 return value;
15373 }
15374
15375 /* Returns the pointer to the base of the object addressed by EXP and
15376 extracts the information about the offset of the access, storing it
15377 to PBITPOS and POFFSET. */
15378
15379 static tree
15380 split_address_to_core_and_offset (tree exp,
15381 poly_int64_pod *pbitpos, tree *poffset)
15382 {
15383 tree core;
15384 machine_mode mode;
15385 int unsignedp, reversep, volatilep;
15386 poly_int64 bitsize;
15387 location_t loc = EXPR_LOCATION (exp);
15388
15389 if (TREE_CODE (exp) == ADDR_EXPR)
15390 {
15391 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
15392 poffset, &mode, &unsignedp, &reversep,
15393 &volatilep);
15394 core = build_fold_addr_expr_loc (loc, core);
15395 }
15396 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
15397 {
15398 core = TREE_OPERAND (exp, 0);
15399 STRIP_NOPS (core);
15400 *pbitpos = 0;
15401 *poffset = TREE_OPERAND (exp, 1);
15402 if (poly_int_tree_p (*poffset))
15403 {
15404 poly_offset_int tem
15405 = wi::sext (wi::to_poly_offset (*poffset),
15406 TYPE_PRECISION (TREE_TYPE (*poffset)));
15407 tem <<= LOG2_BITS_PER_UNIT;
15408 if (tem.to_shwi (pbitpos))
15409 *poffset = NULL_TREE;
15410 }
15411 }
15412 else
15413 {
15414 core = exp;
15415 *pbitpos = 0;
15416 *poffset = NULL_TREE;
15417 }
15418
15419 return core;
15420 }
15421
15422 /* Returns true if addresses of E1 and E2 differ by a constant, false
15423 otherwise. If they do, E1 - E2 is stored in *DIFF. */
15424
15425 bool
15426 ptr_difference_const (tree e1, tree e2, poly_int64_pod *diff)
15427 {
15428 tree core1, core2;
15429 poly_int64 bitpos1, bitpos2;
15430 tree toffset1, toffset2, tdiff, type;
15431
15432 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
15433 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
15434
15435 poly_int64 bytepos1, bytepos2;
15436 if (!multiple_p (bitpos1, BITS_PER_UNIT, &bytepos1)
15437 || !multiple_p (bitpos2, BITS_PER_UNIT, &bytepos2)
15438 || !operand_equal_p (core1, core2, 0))
15439 return false;
15440
15441 if (toffset1 && toffset2)
15442 {
15443 type = TREE_TYPE (toffset1);
15444 if (type != TREE_TYPE (toffset2))
15445 toffset2 = fold_convert (type, toffset2);
15446
15447 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
15448 if (!cst_and_fits_in_hwi (tdiff))
15449 return false;
15450
15451 *diff = int_cst_value (tdiff);
15452 }
15453 else if (toffset1 || toffset2)
15454 {
15455 /* If only one of the offsets is non-constant, the difference cannot
15456 be a constant. */
15457 return false;
15458 }
15459 else
15460 *diff = 0;
15461
15462 *diff += bytepos1 - bytepos2;
15463 return true;
15464 }
15465
15466 /* Return OFF converted to a pointer offset type suitable as offset for
15467 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
15468 tree
15469 convert_to_ptrofftype_loc (location_t loc, tree off)
15470 {
15471 return fold_convert_loc (loc, sizetype, off);
15472 }
15473
15474 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
15475 tree
15476 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
15477 {
15478 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
15479 ptr, convert_to_ptrofftype_loc (loc, off));
15480 }
15481
15482 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
15483 tree
15484 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
15485 {
15486 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
15487 ptr, size_int (off));
15488 }
15489
15490 /* Return a pointer P to a NUL-terminated string containing the sequence
15491 of bytes corresponding to the representation of the object referred to
15492 by SRC (or a subsequence of such bytes within it if SRC is a reference
15493 to an initialized constant array plus some constant offset).
15494 If STRSIZE is non-null, store the number of bytes in the constant
15495 sequence including the terminating NUL byte. *STRSIZE is equal to
15496 sizeof(A) - OFFSET where A is the array that stores the constant
15497 sequence that SRC points to and OFFSET is the byte offset of SRC from
15498 the beginning of A. SRC need not point to a string or even an array
15499 of characters but may point to an object of any type. */
15500
15501 const char *
15502 c_getstr (tree src, unsigned HOST_WIDE_INT *strsize /* = NULL */)
15503 {
15504 /* The offset into the array A storing the string, and A's byte size. */
15505 tree offset_node;
15506 tree mem_size;
15507
15508 if (strsize)
15509 *strsize = 0;
15510
15511 src = string_constant (src, &offset_node, &mem_size, NULL);
15512 if (!src)
15513 return NULL;
15514
15515 unsigned HOST_WIDE_INT offset = 0;
15516 if (offset_node != NULL_TREE)
15517 {
15518 if (!tree_fits_uhwi_p (offset_node))
15519 return NULL;
15520 else
15521 offset = tree_to_uhwi (offset_node);
15522 }
15523
15524 if (!tree_fits_uhwi_p (mem_size))
15525 return NULL;
15526
15527 /* ARRAY_SIZE is the byte size of the array the constant sequence
15528 is stored in and equal to sizeof A. INIT_BYTES is the number
15529 of bytes in the constant sequence used to initialize the array,
15530 including any embedded NULs as well as the terminating NUL (for
15531 strings), but not including any trailing zeros/NULs past
15532 the terminating one appended implicitly to a string literal to
15533 zero out the remainder of the array it's stored in. For example,
15534 given:
15535 const char a[7] = "abc\0d";
15536 n = strlen (a + 1);
15537 ARRAY_SIZE is 7, INIT_BYTES is 6, and OFFSET is 1. For a valid
15538 (i.e., nul-terminated) string with no embedded nuls, INIT_BYTES
15539 is equal to strlen (A) + 1. */
15540 const unsigned HOST_WIDE_INT array_size = tree_to_uhwi (mem_size);
15541 unsigned HOST_WIDE_INT init_bytes = TREE_STRING_LENGTH (src);
15542
15543 /* Ideally this would turn into a gcc_checking_assert over time. */
15544 if (init_bytes > array_size)
15545 init_bytes = array_size;
15546
15547 const char *string = TREE_STRING_POINTER (src);
15548
15549 /* Ideally this would turn into a gcc_checking_assert over time. */
15550 if (init_bytes > array_size)
15551 init_bytes = array_size;
15552
15553 if (init_bytes == 0 || offset >= array_size)
15554 return NULL;
15555
15556 if (strsize)
15557 {
15558 /* Compute and store the number of characters from the beginning
15559 of the substring at OFFSET to the end, including the terminating
15560 nul. Offsets past the initial length refer to null strings. */
15561 if (offset < init_bytes)
15562 *strsize = init_bytes - offset;
15563 else
15564 *strsize = 1;
15565 }
15566 else
15567 {
15568 tree eltype = TREE_TYPE (TREE_TYPE (src));
15569 /* Support only properly NUL-terminated single byte strings. */
15570 if (tree_to_uhwi (TYPE_SIZE_UNIT (eltype)) != 1)
15571 return NULL;
15572 if (string[init_bytes - 1] != '\0')
15573 return NULL;
15574 }
15575
15576 return offset < init_bytes ? string + offset : "";
15577 }
15578
15579 /* Given a tree T, compute which bits in T may be nonzero. */
15580
15581 wide_int
15582 tree_nonzero_bits (const_tree t)
15583 {
15584 switch (TREE_CODE (t))
15585 {
15586 case INTEGER_CST:
15587 return wi::to_wide (t);
15588 case SSA_NAME:
15589 return get_nonzero_bits (t);
15590 case NON_LVALUE_EXPR:
15591 case SAVE_EXPR:
15592 return tree_nonzero_bits (TREE_OPERAND (t, 0));
15593 case BIT_AND_EXPR:
15594 return wi::bit_and (tree_nonzero_bits (TREE_OPERAND (t, 0)),
15595 tree_nonzero_bits (TREE_OPERAND (t, 1)));
15596 case BIT_IOR_EXPR:
15597 case BIT_XOR_EXPR:
15598 return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 0)),
15599 tree_nonzero_bits (TREE_OPERAND (t, 1)));
15600 case COND_EXPR:
15601 return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 1)),
15602 tree_nonzero_bits (TREE_OPERAND (t, 2)));
15603 CASE_CONVERT:
15604 return wide_int::from (tree_nonzero_bits (TREE_OPERAND (t, 0)),
15605 TYPE_PRECISION (TREE_TYPE (t)),
15606 TYPE_SIGN (TREE_TYPE (TREE_OPERAND (t, 0))));
15607 case PLUS_EXPR:
15608 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
15609 {
15610 wide_int nzbits1 = tree_nonzero_bits (TREE_OPERAND (t, 0));
15611 wide_int nzbits2 = tree_nonzero_bits (TREE_OPERAND (t, 1));
15612 if (wi::bit_and (nzbits1, nzbits2) == 0)
15613 return wi::bit_or (nzbits1, nzbits2);
15614 }
15615 break;
15616 case LSHIFT_EXPR:
15617 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
15618 {
15619 tree type = TREE_TYPE (t);
15620 wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
15621 wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
15622 TYPE_PRECISION (type));
15623 return wi::neg_p (arg1)
15624 ? wi::rshift (nzbits, -arg1, TYPE_SIGN (type))
15625 : wi::lshift (nzbits, arg1);
15626 }
15627 break;
15628 case RSHIFT_EXPR:
15629 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
15630 {
15631 tree type = TREE_TYPE (t);
15632 wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
15633 wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
15634 TYPE_PRECISION (type));
15635 return wi::neg_p (arg1)
15636 ? wi::lshift (nzbits, -arg1)
15637 : wi::rshift (nzbits, arg1, TYPE_SIGN (type));
15638 }
15639 break;
15640 default:
15641 break;
15642 }
15643
15644 return wi::shwi (-1, TYPE_PRECISION (TREE_TYPE (t)));
15645 }
15646
15647 #if CHECKING_P
15648
15649 namespace selftest {
15650
15651 /* Helper functions for writing tests of folding trees. */
15652
15653 /* Verify that the binary op (LHS CODE RHS) folds to CONSTANT. */
15654
15655 static void
15656 assert_binop_folds_to_const (tree lhs, enum tree_code code, tree rhs,
15657 tree constant)
15658 {
15659 ASSERT_EQ (constant, fold_build2 (code, TREE_TYPE (lhs), lhs, rhs));
15660 }
15661
15662 /* Verify that the binary op (LHS CODE RHS) folds to an NON_LVALUE_EXPR
15663 wrapping WRAPPED_EXPR. */
15664
15665 static void
15666 assert_binop_folds_to_nonlvalue (tree lhs, enum tree_code code, tree rhs,
15667 tree wrapped_expr)
15668 {
15669 tree result = fold_build2 (code, TREE_TYPE (lhs), lhs, rhs);
15670 ASSERT_NE (wrapped_expr, result);
15671 ASSERT_EQ (NON_LVALUE_EXPR, TREE_CODE (result));
15672 ASSERT_EQ (wrapped_expr, TREE_OPERAND (result, 0));
15673 }
15674
15675 /* Verify that various arithmetic binary operations are folded
15676 correctly. */
15677
15678 static void
15679 test_arithmetic_folding ()
15680 {
15681 tree type = integer_type_node;
15682 tree x = create_tmp_var_raw (type, "x");
15683 tree zero = build_zero_cst (type);
15684 tree one = build_int_cst (type, 1);
15685
15686 /* Addition. */
15687 /* 1 <-- (0 + 1) */
15688 assert_binop_folds_to_const (zero, PLUS_EXPR, one,
15689 one);
15690 assert_binop_folds_to_const (one, PLUS_EXPR, zero,
15691 one);
15692
15693 /* (nonlvalue)x <-- (x + 0) */
15694 assert_binop_folds_to_nonlvalue (x, PLUS_EXPR, zero,
15695 x);
15696
15697 /* Subtraction. */
15698 /* 0 <-- (x - x) */
15699 assert_binop_folds_to_const (x, MINUS_EXPR, x,
15700 zero);
15701 assert_binop_folds_to_nonlvalue (x, MINUS_EXPR, zero,
15702 x);
15703
15704 /* Multiplication. */
15705 /* 0 <-- (x * 0) */
15706 assert_binop_folds_to_const (x, MULT_EXPR, zero,
15707 zero);
15708
15709 /* (nonlvalue)x <-- (x * 1) */
15710 assert_binop_folds_to_nonlvalue (x, MULT_EXPR, one,
15711 x);
15712 }
15713
15714 /* Verify that various binary operations on vectors are folded
15715 correctly. */
15716
15717 static void
15718 test_vector_folding ()
15719 {
15720 tree inner_type = integer_type_node;
15721 tree type = build_vector_type (inner_type, 4);
15722 tree zero = build_zero_cst (type);
15723 tree one = build_one_cst (type);
15724 tree index = build_index_vector (type, 0, 1);
15725
15726 /* Verify equality tests that return a scalar boolean result. */
15727 tree res_type = boolean_type_node;
15728 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, one)));
15729 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, zero)));
15730 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, zero, one)));
15731 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, one, one)));
15732 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, index, one)));
15733 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
15734 index, one)));
15735 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type,
15736 index, index)));
15737 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
15738 index, index)));
15739 }
15740
15741 /* Verify folding of VEC_DUPLICATE_EXPRs. */
15742
15743 static void
15744 test_vec_duplicate_folding ()
15745 {
15746 scalar_int_mode int_mode = SCALAR_INT_TYPE_MODE (ssizetype);
15747 machine_mode vec_mode = targetm.vectorize.preferred_simd_mode (int_mode);
15748 /* This will be 1 if VEC_MODE isn't a vector mode. */
15749 poly_uint64 nunits = GET_MODE_NUNITS (vec_mode);
15750
15751 tree type = build_vector_type (ssizetype, nunits);
15752 tree dup5_expr = fold_unary (VEC_DUPLICATE_EXPR, type, ssize_int (5));
15753 tree dup5_cst = build_vector_from_val (type, ssize_int (5));
15754 ASSERT_TRUE (operand_equal_p (dup5_expr, dup5_cst, 0));
15755 }
15756
15757 /* Run all of the selftests within this file. */
15758
15759 void
15760 fold_const_c_tests ()
15761 {
15762 test_arithmetic_folding ();
15763 test_vector_folding ();
15764 test_vec_duplicate_folding ();
15765 }
15766
15767 } // namespace selftest
15768
15769 #endif /* CHECKING_P */