Adjust expr_not_equal_to to use irange API.
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2020 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
27
28 /* The entry points in this file are fold, size_int_wide and size_binop.
29
30 fold takes a tree as argument and returns a simplified tree.
31
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
35
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
38
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
42
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "backend.h"
47 #include "target.h"
48 #include "rtl.h"
49 #include "tree.h"
50 #include "gimple.h"
51 #include "predict.h"
52 #include "memmodel.h"
53 #include "tm_p.h"
54 #include "tree-ssa-operands.h"
55 #include "optabs-query.h"
56 #include "cgraph.h"
57 #include "diagnostic-core.h"
58 #include "flags.h"
59 #include "alias.h"
60 #include "fold-const.h"
61 #include "fold-const-call.h"
62 #include "stor-layout.h"
63 #include "calls.h"
64 #include "tree-iterator.h"
65 #include "expr.h"
66 #include "intl.h"
67 #include "langhooks.h"
68 #include "tree-eh.h"
69 #include "gimplify.h"
70 #include "tree-dfa.h"
71 #include "builtins.h"
72 #include "generic-match.h"
73 #include "gimple-fold.h"
74 #include "tree-into-ssa.h"
75 #include "md5.h"
76 #include "case-cfn-macros.h"
77 #include "stringpool.h"
78 #include "tree-vrp.h"
79 #include "tree-ssanames.h"
80 #include "selftest.h"
81 #include "stringpool.h"
82 #include "attribs.h"
83 #include "tree-vector-builder.h"
84 #include "vec-perm-indices.h"
85
86 /* Nonzero if we are folding constants inside an initializer; zero
87 otherwise. */
88 int folding_initializer = 0;
89
90 /* The following constants represent a bit based encoding of GCC's
91 comparison operators. This encoding simplifies transformations
92 on relational comparison operators, such as AND and OR. */
93 enum comparison_code {
94 COMPCODE_FALSE = 0,
95 COMPCODE_LT = 1,
96 COMPCODE_EQ = 2,
97 COMPCODE_LE = 3,
98 COMPCODE_GT = 4,
99 COMPCODE_LTGT = 5,
100 COMPCODE_GE = 6,
101 COMPCODE_ORD = 7,
102 COMPCODE_UNORD = 8,
103 COMPCODE_UNLT = 9,
104 COMPCODE_UNEQ = 10,
105 COMPCODE_UNLE = 11,
106 COMPCODE_UNGT = 12,
107 COMPCODE_NE = 13,
108 COMPCODE_UNGE = 14,
109 COMPCODE_TRUE = 15
110 };
111
112 static bool negate_expr_p (tree);
113 static tree negate_expr (tree);
114 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
115 static enum comparison_code comparison_to_compcode (enum tree_code);
116 static enum tree_code compcode_to_comparison (enum comparison_code);
117 static bool twoval_comparison_p (tree, tree *, tree *);
118 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
119 static tree optimize_bit_field_compare (location_t, enum tree_code,
120 tree, tree, tree);
121 static bool simple_operand_p (const_tree);
122 static bool simple_operand_p_2 (tree);
123 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
124 static tree range_predecessor (tree);
125 static tree range_successor (tree);
126 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
127 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
128 static tree unextend (tree, int, int, tree);
129 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
130 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
131 static tree fold_binary_op_with_conditional_arg (location_t,
132 enum tree_code, tree,
133 tree, tree,
134 tree, tree, int);
135 static tree fold_negate_const (tree, tree);
136 static tree fold_not_const (const_tree, tree);
137 static tree fold_relational_const (enum tree_code, tree, tree, tree);
138 static tree fold_convert_const (enum tree_code, tree, tree);
139 static tree fold_view_convert_expr (tree, tree);
140 static tree fold_negate_expr (location_t, tree);
141
142
143 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
144 Otherwise, return LOC. */
145
146 static location_t
147 expr_location_or (tree t, location_t loc)
148 {
149 location_t tloc = EXPR_LOCATION (t);
150 return tloc == UNKNOWN_LOCATION ? loc : tloc;
151 }
152
153 /* Similar to protected_set_expr_location, but never modify x in place,
154 if location can and needs to be set, unshare it. */
155
156 static inline tree
157 protected_set_expr_location_unshare (tree x, location_t loc)
158 {
159 if (CAN_HAVE_LOCATION_P (x)
160 && EXPR_LOCATION (x) != loc
161 && !(TREE_CODE (x) == SAVE_EXPR
162 || TREE_CODE (x) == TARGET_EXPR
163 || TREE_CODE (x) == BIND_EXPR))
164 {
165 x = copy_node (x);
166 SET_EXPR_LOCATION (x, loc);
167 }
168 return x;
169 }
170 \f
171 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
172 division and returns the quotient. Otherwise returns
173 NULL_TREE. */
174
175 tree
176 div_if_zero_remainder (const_tree arg1, const_tree arg2)
177 {
178 widest_int quo;
179
180 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
181 SIGNED, &quo))
182 return wide_int_to_tree (TREE_TYPE (arg1), quo);
183
184 return NULL_TREE;
185 }
186 \f
187 /* This is nonzero if we should defer warnings about undefined
188 overflow. This facility exists because these warnings are a
189 special case. The code to estimate loop iterations does not want
190 to issue any warnings, since it works with expressions which do not
191 occur in user code. Various bits of cleanup code call fold(), but
192 only use the result if it has certain characteristics (e.g., is a
193 constant); that code only wants to issue a warning if the result is
194 used. */
195
196 static int fold_deferring_overflow_warnings;
197
198 /* If a warning about undefined overflow is deferred, this is the
199 warning. Note that this may cause us to turn two warnings into
200 one, but that is fine since it is sufficient to only give one
201 warning per expression. */
202
203 static const char* fold_deferred_overflow_warning;
204
205 /* If a warning about undefined overflow is deferred, this is the
206 level at which the warning should be emitted. */
207
208 static enum warn_strict_overflow_code fold_deferred_overflow_code;
209
210 /* Start deferring overflow warnings. We could use a stack here to
211 permit nested calls, but at present it is not necessary. */
212
213 void
214 fold_defer_overflow_warnings (void)
215 {
216 ++fold_deferring_overflow_warnings;
217 }
218
219 /* Stop deferring overflow warnings. If there is a pending warning,
220 and ISSUE is true, then issue the warning if appropriate. STMT is
221 the statement with which the warning should be associated (used for
222 location information); STMT may be NULL. CODE is the level of the
223 warning--a warn_strict_overflow_code value. This function will use
224 the smaller of CODE and the deferred code when deciding whether to
225 issue the warning. CODE may be zero to mean to always use the
226 deferred code. */
227
228 void
229 fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
230 {
231 const char *warnmsg;
232 location_t locus;
233
234 gcc_assert (fold_deferring_overflow_warnings > 0);
235 --fold_deferring_overflow_warnings;
236 if (fold_deferring_overflow_warnings > 0)
237 {
238 if (fold_deferred_overflow_warning != NULL
239 && code != 0
240 && code < (int) fold_deferred_overflow_code)
241 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
242 return;
243 }
244
245 warnmsg = fold_deferred_overflow_warning;
246 fold_deferred_overflow_warning = NULL;
247
248 if (!issue || warnmsg == NULL)
249 return;
250
251 if (gimple_no_warning_p (stmt))
252 return;
253
254 /* Use the smallest code level when deciding to issue the
255 warning. */
256 if (code == 0 || code > (int) fold_deferred_overflow_code)
257 code = fold_deferred_overflow_code;
258
259 if (!issue_strict_overflow_warning (code))
260 return;
261
262 if (stmt == NULL)
263 locus = input_location;
264 else
265 locus = gimple_location (stmt);
266 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
267 }
268
269 /* Stop deferring overflow warnings, ignoring any deferred
270 warnings. */
271
272 void
273 fold_undefer_and_ignore_overflow_warnings (void)
274 {
275 fold_undefer_overflow_warnings (false, NULL, 0);
276 }
277
278 /* Whether we are deferring overflow warnings. */
279
280 bool
281 fold_deferring_overflow_warnings_p (void)
282 {
283 return fold_deferring_overflow_warnings > 0;
284 }
285
286 /* This is called when we fold something based on the fact that signed
287 overflow is undefined. */
288
289 void
290 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
291 {
292 if (fold_deferring_overflow_warnings > 0)
293 {
294 if (fold_deferred_overflow_warning == NULL
295 || wc < fold_deferred_overflow_code)
296 {
297 fold_deferred_overflow_warning = gmsgid;
298 fold_deferred_overflow_code = wc;
299 }
300 }
301 else if (issue_strict_overflow_warning (wc))
302 warning (OPT_Wstrict_overflow, gmsgid);
303 }
304 \f
305 /* Return true if the built-in mathematical function specified by CODE
306 is odd, i.e. -f(x) == f(-x). */
307
308 bool
309 negate_mathfn_p (combined_fn fn)
310 {
311 switch (fn)
312 {
313 CASE_CFN_ASIN:
314 CASE_CFN_ASINH:
315 CASE_CFN_ATAN:
316 CASE_CFN_ATANH:
317 CASE_CFN_CASIN:
318 CASE_CFN_CASINH:
319 CASE_CFN_CATAN:
320 CASE_CFN_CATANH:
321 CASE_CFN_CBRT:
322 CASE_CFN_CPROJ:
323 CASE_CFN_CSIN:
324 CASE_CFN_CSINH:
325 CASE_CFN_CTAN:
326 CASE_CFN_CTANH:
327 CASE_CFN_ERF:
328 CASE_CFN_LLROUND:
329 CASE_CFN_LROUND:
330 CASE_CFN_ROUND:
331 CASE_CFN_ROUNDEVEN:
332 CASE_CFN_ROUNDEVEN_FN:
333 CASE_CFN_SIN:
334 CASE_CFN_SINH:
335 CASE_CFN_TAN:
336 CASE_CFN_TANH:
337 CASE_CFN_TRUNC:
338 return true;
339
340 CASE_CFN_LLRINT:
341 CASE_CFN_LRINT:
342 CASE_CFN_NEARBYINT:
343 CASE_CFN_RINT:
344 return !flag_rounding_math;
345
346 default:
347 break;
348 }
349 return false;
350 }
351
352 /* Check whether we may negate an integer constant T without causing
353 overflow. */
354
355 bool
356 may_negate_without_overflow_p (const_tree t)
357 {
358 tree type;
359
360 gcc_assert (TREE_CODE (t) == INTEGER_CST);
361
362 type = TREE_TYPE (t);
363 if (TYPE_UNSIGNED (type))
364 return false;
365
366 return !wi::only_sign_bit_p (wi::to_wide (t));
367 }
368
369 /* Determine whether an expression T can be cheaply negated using
370 the function negate_expr without introducing undefined overflow. */
371
372 static bool
373 negate_expr_p (tree t)
374 {
375 tree type;
376
377 if (t == 0)
378 return false;
379
380 type = TREE_TYPE (t);
381
382 STRIP_SIGN_NOPS (t);
383 switch (TREE_CODE (t))
384 {
385 case INTEGER_CST:
386 if (INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type))
387 return true;
388
389 /* Check that -CST will not overflow type. */
390 return may_negate_without_overflow_p (t);
391 case BIT_NOT_EXPR:
392 return (INTEGRAL_TYPE_P (type)
393 && TYPE_OVERFLOW_WRAPS (type));
394
395 case FIXED_CST:
396 return true;
397
398 case NEGATE_EXPR:
399 return !TYPE_OVERFLOW_SANITIZED (type);
400
401 case REAL_CST:
402 /* We want to canonicalize to positive real constants. Pretend
403 that only negative ones can be easily negated. */
404 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
405
406 case COMPLEX_CST:
407 return negate_expr_p (TREE_REALPART (t))
408 && negate_expr_p (TREE_IMAGPART (t));
409
410 case VECTOR_CST:
411 {
412 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
413 return true;
414
415 /* Steps don't prevent negation. */
416 unsigned int count = vector_cst_encoded_nelts (t);
417 for (unsigned int i = 0; i < count; ++i)
418 if (!negate_expr_p (VECTOR_CST_ENCODED_ELT (t, i)))
419 return false;
420
421 return true;
422 }
423
424 case COMPLEX_EXPR:
425 return negate_expr_p (TREE_OPERAND (t, 0))
426 && negate_expr_p (TREE_OPERAND (t, 1));
427
428 case CONJ_EXPR:
429 return negate_expr_p (TREE_OPERAND (t, 0));
430
431 case PLUS_EXPR:
432 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
433 || HONOR_SIGNED_ZEROS (element_mode (type))
434 || (ANY_INTEGRAL_TYPE_P (type)
435 && ! TYPE_OVERFLOW_WRAPS (type)))
436 return false;
437 /* -(A + B) -> (-B) - A. */
438 if (negate_expr_p (TREE_OPERAND (t, 1)))
439 return true;
440 /* -(A + B) -> (-A) - B. */
441 return negate_expr_p (TREE_OPERAND (t, 0));
442
443 case MINUS_EXPR:
444 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
445 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
446 && !HONOR_SIGNED_ZEROS (element_mode (type))
447 && (! ANY_INTEGRAL_TYPE_P (type)
448 || TYPE_OVERFLOW_WRAPS (type));
449
450 case MULT_EXPR:
451 if (TYPE_UNSIGNED (type))
452 break;
453 /* INT_MIN/n * n doesn't overflow while negating one operand it does
454 if n is a (negative) power of two. */
455 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
456 && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
457 && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
458 && (wi::popcount
459 (wi::abs (wi::to_wide (TREE_OPERAND (t, 0))))) != 1)
460 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
461 && (wi::popcount
462 (wi::abs (wi::to_wide (TREE_OPERAND (t, 1))))) != 1)))
463 break;
464
465 /* Fall through. */
466
467 case RDIV_EXPR:
468 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
469 return negate_expr_p (TREE_OPERAND (t, 1))
470 || negate_expr_p (TREE_OPERAND (t, 0));
471 break;
472
473 case TRUNC_DIV_EXPR:
474 case ROUND_DIV_EXPR:
475 case EXACT_DIV_EXPR:
476 if (TYPE_UNSIGNED (type))
477 break;
478 /* In general we can't negate A in A / B, because if A is INT_MIN and
479 B is not 1 we change the sign of the result. */
480 if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
481 && negate_expr_p (TREE_OPERAND (t, 0)))
482 return true;
483 /* In general we can't negate B in A / B, because if A is INT_MIN and
484 B is 1, we may turn this into INT_MIN / -1 which is undefined
485 and actually traps on some architectures. */
486 if (! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
487 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
488 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
489 && ! integer_onep (TREE_OPERAND (t, 1))))
490 return negate_expr_p (TREE_OPERAND (t, 1));
491 break;
492
493 case NOP_EXPR:
494 /* Negate -((double)float) as (double)(-float). */
495 if (TREE_CODE (type) == REAL_TYPE)
496 {
497 tree tem = strip_float_extensions (t);
498 if (tem != t)
499 return negate_expr_p (tem);
500 }
501 break;
502
503 case CALL_EXPR:
504 /* Negate -f(x) as f(-x). */
505 if (negate_mathfn_p (get_call_combined_fn (t)))
506 return negate_expr_p (CALL_EXPR_ARG (t, 0));
507 break;
508
509 case RSHIFT_EXPR:
510 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
511 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
512 {
513 tree op1 = TREE_OPERAND (t, 1);
514 if (wi::to_wide (op1) == TYPE_PRECISION (type) - 1)
515 return true;
516 }
517 break;
518
519 default:
520 break;
521 }
522 return false;
523 }
524
525 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
526 simplification is possible.
527 If negate_expr_p would return true for T, NULL_TREE will never be
528 returned. */
529
530 static tree
531 fold_negate_expr_1 (location_t loc, tree t)
532 {
533 tree type = TREE_TYPE (t);
534 tree tem;
535
536 switch (TREE_CODE (t))
537 {
538 /* Convert - (~A) to A + 1. */
539 case BIT_NOT_EXPR:
540 if (INTEGRAL_TYPE_P (type))
541 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
542 build_one_cst (type));
543 break;
544
545 case INTEGER_CST:
546 tem = fold_negate_const (t, type);
547 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
548 || (ANY_INTEGRAL_TYPE_P (type)
549 && !TYPE_OVERFLOW_TRAPS (type)
550 && TYPE_OVERFLOW_WRAPS (type))
551 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
552 return tem;
553 break;
554
555 case POLY_INT_CST:
556 case REAL_CST:
557 case FIXED_CST:
558 tem = fold_negate_const (t, type);
559 return tem;
560
561 case COMPLEX_CST:
562 {
563 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
564 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
565 if (rpart && ipart)
566 return build_complex (type, rpart, ipart);
567 }
568 break;
569
570 case VECTOR_CST:
571 {
572 tree_vector_builder elts;
573 elts.new_unary_operation (type, t, true);
574 unsigned int count = elts.encoded_nelts ();
575 for (unsigned int i = 0; i < count; ++i)
576 {
577 tree elt = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
578 if (elt == NULL_TREE)
579 return NULL_TREE;
580 elts.quick_push (elt);
581 }
582
583 return elts.build ();
584 }
585
586 case COMPLEX_EXPR:
587 if (negate_expr_p (t))
588 return fold_build2_loc (loc, COMPLEX_EXPR, type,
589 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
590 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
591 break;
592
593 case CONJ_EXPR:
594 if (negate_expr_p (t))
595 return fold_build1_loc (loc, CONJ_EXPR, type,
596 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
597 break;
598
599 case NEGATE_EXPR:
600 if (!TYPE_OVERFLOW_SANITIZED (type))
601 return TREE_OPERAND (t, 0);
602 break;
603
604 case PLUS_EXPR:
605 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
606 && !HONOR_SIGNED_ZEROS (element_mode (type)))
607 {
608 /* -(A + B) -> (-B) - A. */
609 if (negate_expr_p (TREE_OPERAND (t, 1)))
610 {
611 tem = negate_expr (TREE_OPERAND (t, 1));
612 return fold_build2_loc (loc, MINUS_EXPR, type,
613 tem, TREE_OPERAND (t, 0));
614 }
615
616 /* -(A + B) -> (-A) - B. */
617 if (negate_expr_p (TREE_OPERAND (t, 0)))
618 {
619 tem = negate_expr (TREE_OPERAND (t, 0));
620 return fold_build2_loc (loc, MINUS_EXPR, type,
621 tem, TREE_OPERAND (t, 1));
622 }
623 }
624 break;
625
626 case MINUS_EXPR:
627 /* - (A - B) -> B - A */
628 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
629 && !HONOR_SIGNED_ZEROS (element_mode (type)))
630 return fold_build2_loc (loc, MINUS_EXPR, type,
631 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
632 break;
633
634 case MULT_EXPR:
635 if (TYPE_UNSIGNED (type))
636 break;
637
638 /* Fall through. */
639
640 case RDIV_EXPR:
641 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
642 {
643 tem = TREE_OPERAND (t, 1);
644 if (negate_expr_p (tem))
645 return fold_build2_loc (loc, TREE_CODE (t), type,
646 TREE_OPERAND (t, 0), negate_expr (tem));
647 tem = TREE_OPERAND (t, 0);
648 if (negate_expr_p (tem))
649 return fold_build2_loc (loc, TREE_CODE (t), type,
650 negate_expr (tem), TREE_OPERAND (t, 1));
651 }
652 break;
653
654 case TRUNC_DIV_EXPR:
655 case ROUND_DIV_EXPR:
656 case EXACT_DIV_EXPR:
657 if (TYPE_UNSIGNED (type))
658 break;
659 /* In general we can't negate A in A / B, because if A is INT_MIN and
660 B is not 1 we change the sign of the result. */
661 if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
662 && negate_expr_p (TREE_OPERAND (t, 0)))
663 return fold_build2_loc (loc, TREE_CODE (t), type,
664 negate_expr (TREE_OPERAND (t, 0)),
665 TREE_OPERAND (t, 1));
666 /* In general we can't negate B in A / B, because if A is INT_MIN and
667 B is 1, we may turn this into INT_MIN / -1 which is undefined
668 and actually traps on some architectures. */
669 if ((! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
670 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
671 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
672 && ! integer_onep (TREE_OPERAND (t, 1))))
673 && negate_expr_p (TREE_OPERAND (t, 1)))
674 return fold_build2_loc (loc, TREE_CODE (t), type,
675 TREE_OPERAND (t, 0),
676 negate_expr (TREE_OPERAND (t, 1)));
677 break;
678
679 case NOP_EXPR:
680 /* Convert -((double)float) into (double)(-float). */
681 if (TREE_CODE (type) == REAL_TYPE)
682 {
683 tem = strip_float_extensions (t);
684 if (tem != t && negate_expr_p (tem))
685 return fold_convert_loc (loc, type, negate_expr (tem));
686 }
687 break;
688
689 case CALL_EXPR:
690 /* Negate -f(x) as f(-x). */
691 if (negate_mathfn_p (get_call_combined_fn (t))
692 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
693 {
694 tree fndecl, arg;
695
696 fndecl = get_callee_fndecl (t);
697 arg = negate_expr (CALL_EXPR_ARG (t, 0));
698 return build_call_expr_loc (loc, fndecl, 1, arg);
699 }
700 break;
701
702 case RSHIFT_EXPR:
703 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
704 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
705 {
706 tree op1 = TREE_OPERAND (t, 1);
707 if (wi::to_wide (op1) == TYPE_PRECISION (type) - 1)
708 {
709 tree ntype = TYPE_UNSIGNED (type)
710 ? signed_type_for (type)
711 : unsigned_type_for (type);
712 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
713 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
714 return fold_convert_loc (loc, type, temp);
715 }
716 }
717 break;
718
719 default:
720 break;
721 }
722
723 return NULL_TREE;
724 }
725
726 /* A wrapper for fold_negate_expr_1. */
727
728 static tree
729 fold_negate_expr (location_t loc, tree t)
730 {
731 tree type = TREE_TYPE (t);
732 STRIP_SIGN_NOPS (t);
733 tree tem = fold_negate_expr_1 (loc, t);
734 if (tem == NULL_TREE)
735 return NULL_TREE;
736 return fold_convert_loc (loc, type, tem);
737 }
738
739 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T cannot be
740 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
741 return NULL_TREE. */
742
743 static tree
744 negate_expr (tree t)
745 {
746 tree type, tem;
747 location_t loc;
748
749 if (t == NULL_TREE)
750 return NULL_TREE;
751
752 loc = EXPR_LOCATION (t);
753 type = TREE_TYPE (t);
754 STRIP_SIGN_NOPS (t);
755
756 tem = fold_negate_expr (loc, t);
757 if (!tem)
758 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
759 return fold_convert_loc (loc, type, tem);
760 }
761 \f
762 /* Split a tree IN into a constant, literal and variable parts that could be
763 combined with CODE to make IN. "constant" means an expression with
764 TREE_CONSTANT but that isn't an actual constant. CODE must be a
765 commutative arithmetic operation. Store the constant part into *CONP,
766 the literal in *LITP and return the variable part. If a part isn't
767 present, set it to null. If the tree does not decompose in this way,
768 return the entire tree as the variable part and the other parts as null.
769
770 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
771 case, we negate an operand that was subtracted. Except if it is a
772 literal for which we use *MINUS_LITP instead.
773
774 If NEGATE_P is true, we are negating all of IN, again except a literal
775 for which we use *MINUS_LITP instead. If a variable part is of pointer
776 type, it is negated after converting to TYPE. This prevents us from
777 generating illegal MINUS pointer expression. LOC is the location of
778 the converted variable part.
779
780 If IN is itself a literal or constant, return it as appropriate.
781
782 Note that we do not guarantee that any of the three values will be the
783 same type as IN, but they will have the same signedness and mode. */
784
785 static tree
786 split_tree (tree in, tree type, enum tree_code code,
787 tree *minus_varp, tree *conp, tree *minus_conp,
788 tree *litp, tree *minus_litp, int negate_p)
789 {
790 tree var = 0;
791 *minus_varp = 0;
792 *conp = 0;
793 *minus_conp = 0;
794 *litp = 0;
795 *minus_litp = 0;
796
797 /* Strip any conversions that don't change the machine mode or signedness. */
798 STRIP_SIGN_NOPS (in);
799
800 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
801 || TREE_CODE (in) == FIXED_CST)
802 *litp = in;
803 else if (TREE_CODE (in) == code
804 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
805 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
806 /* We can associate addition and subtraction together (even
807 though the C standard doesn't say so) for integers because
808 the value is not affected. For reals, the value might be
809 affected, so we can't. */
810 && ((code == PLUS_EXPR && TREE_CODE (in) == POINTER_PLUS_EXPR)
811 || (code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
812 || (code == MINUS_EXPR
813 && (TREE_CODE (in) == PLUS_EXPR
814 || TREE_CODE (in) == POINTER_PLUS_EXPR)))))
815 {
816 tree op0 = TREE_OPERAND (in, 0);
817 tree op1 = TREE_OPERAND (in, 1);
818 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
819 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
820
821 /* First see if either of the operands is a literal, then a constant. */
822 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
823 || TREE_CODE (op0) == FIXED_CST)
824 *litp = op0, op0 = 0;
825 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
826 || TREE_CODE (op1) == FIXED_CST)
827 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
828
829 if (op0 != 0 && TREE_CONSTANT (op0))
830 *conp = op0, op0 = 0;
831 else if (op1 != 0 && TREE_CONSTANT (op1))
832 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
833
834 /* If we haven't dealt with either operand, this is not a case we can
835 decompose. Otherwise, VAR is either of the ones remaining, if any. */
836 if (op0 != 0 && op1 != 0)
837 var = in;
838 else if (op0 != 0)
839 var = op0;
840 else
841 var = op1, neg_var_p = neg1_p;
842
843 /* Now do any needed negations. */
844 if (neg_litp_p)
845 *minus_litp = *litp, *litp = 0;
846 if (neg_conp_p && *conp)
847 *minus_conp = *conp, *conp = 0;
848 if (neg_var_p && var)
849 *minus_varp = var, var = 0;
850 }
851 else if (TREE_CONSTANT (in))
852 *conp = in;
853 else if (TREE_CODE (in) == BIT_NOT_EXPR
854 && code == PLUS_EXPR)
855 {
856 /* -1 - X is folded to ~X, undo that here. Do _not_ do this
857 when IN is constant. */
858 *litp = build_minus_one_cst (type);
859 *minus_varp = TREE_OPERAND (in, 0);
860 }
861 else
862 var = in;
863
864 if (negate_p)
865 {
866 if (*litp)
867 *minus_litp = *litp, *litp = 0;
868 else if (*minus_litp)
869 *litp = *minus_litp, *minus_litp = 0;
870 if (*conp)
871 *minus_conp = *conp, *conp = 0;
872 else if (*minus_conp)
873 *conp = *minus_conp, *minus_conp = 0;
874 if (var)
875 *minus_varp = var, var = 0;
876 else if (*minus_varp)
877 var = *minus_varp, *minus_varp = 0;
878 }
879
880 if (*litp
881 && TREE_OVERFLOW_P (*litp))
882 *litp = drop_tree_overflow (*litp);
883 if (*minus_litp
884 && TREE_OVERFLOW_P (*minus_litp))
885 *minus_litp = drop_tree_overflow (*minus_litp);
886
887 return var;
888 }
889
890 /* Re-associate trees split by the above function. T1 and T2 are
891 either expressions to associate or null. Return the new
892 expression, if any. LOC is the location of the new expression. If
893 we build an operation, do it in TYPE and with CODE. */
894
895 static tree
896 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
897 {
898 if (t1 == 0)
899 {
900 gcc_assert (t2 == 0 || code != MINUS_EXPR);
901 return t2;
902 }
903 else if (t2 == 0)
904 return t1;
905
906 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
907 try to fold this since we will have infinite recursion. But do
908 deal with any NEGATE_EXPRs. */
909 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
910 || TREE_CODE (t1) == PLUS_EXPR || TREE_CODE (t2) == PLUS_EXPR
911 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
912 {
913 if (code == PLUS_EXPR)
914 {
915 if (TREE_CODE (t1) == NEGATE_EXPR)
916 return build2_loc (loc, MINUS_EXPR, type,
917 fold_convert_loc (loc, type, t2),
918 fold_convert_loc (loc, type,
919 TREE_OPERAND (t1, 0)));
920 else if (TREE_CODE (t2) == NEGATE_EXPR)
921 return build2_loc (loc, MINUS_EXPR, type,
922 fold_convert_loc (loc, type, t1),
923 fold_convert_loc (loc, type,
924 TREE_OPERAND (t2, 0)));
925 else if (integer_zerop (t2))
926 return fold_convert_loc (loc, type, t1);
927 }
928 else if (code == MINUS_EXPR)
929 {
930 if (integer_zerop (t2))
931 return fold_convert_loc (loc, type, t1);
932 }
933
934 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
935 fold_convert_loc (loc, type, t2));
936 }
937
938 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
939 fold_convert_loc (loc, type, t2));
940 }
941 \f
942 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
943 for use in int_const_binop, size_binop and size_diffop. */
944
945 static bool
946 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
947 {
948 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
949 return false;
950 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
951 return false;
952
953 switch (code)
954 {
955 case LSHIFT_EXPR:
956 case RSHIFT_EXPR:
957 case LROTATE_EXPR:
958 case RROTATE_EXPR:
959 return true;
960
961 default:
962 break;
963 }
964
965 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
966 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
967 && TYPE_MODE (type1) == TYPE_MODE (type2);
968 }
969
970 /* Combine two wide ints ARG1 and ARG2 under operation CODE to produce
971 a new constant in RES. Return FALSE if we don't know how to
972 evaluate CODE at compile-time. */
973
974 bool
975 wide_int_binop (wide_int &res,
976 enum tree_code code, const wide_int &arg1, const wide_int &arg2,
977 signop sign, wi::overflow_type *overflow)
978 {
979 wide_int tmp;
980 *overflow = wi::OVF_NONE;
981 switch (code)
982 {
983 case BIT_IOR_EXPR:
984 res = wi::bit_or (arg1, arg2);
985 break;
986
987 case BIT_XOR_EXPR:
988 res = wi::bit_xor (arg1, arg2);
989 break;
990
991 case BIT_AND_EXPR:
992 res = wi::bit_and (arg1, arg2);
993 break;
994
995 case RSHIFT_EXPR:
996 case LSHIFT_EXPR:
997 if (wi::neg_p (arg2))
998 {
999 tmp = -arg2;
1000 if (code == RSHIFT_EXPR)
1001 code = LSHIFT_EXPR;
1002 else
1003 code = RSHIFT_EXPR;
1004 }
1005 else
1006 tmp = arg2;
1007
1008 if (code == RSHIFT_EXPR)
1009 /* It's unclear from the C standard whether shifts can overflow.
1010 The following code ignores overflow; perhaps a C standard
1011 interpretation ruling is needed. */
1012 res = wi::rshift (arg1, tmp, sign);
1013 else
1014 res = wi::lshift (arg1, tmp);
1015 break;
1016
1017 case RROTATE_EXPR:
1018 case LROTATE_EXPR:
1019 if (wi::neg_p (arg2))
1020 {
1021 tmp = -arg2;
1022 if (code == RROTATE_EXPR)
1023 code = LROTATE_EXPR;
1024 else
1025 code = RROTATE_EXPR;
1026 }
1027 else
1028 tmp = arg2;
1029
1030 if (code == RROTATE_EXPR)
1031 res = wi::rrotate (arg1, tmp);
1032 else
1033 res = wi::lrotate (arg1, tmp);
1034 break;
1035
1036 case PLUS_EXPR:
1037 res = wi::add (arg1, arg2, sign, overflow);
1038 break;
1039
1040 case MINUS_EXPR:
1041 res = wi::sub (arg1, arg2, sign, overflow);
1042 break;
1043
1044 case MULT_EXPR:
1045 res = wi::mul (arg1, arg2, sign, overflow);
1046 break;
1047
1048 case MULT_HIGHPART_EXPR:
1049 res = wi::mul_high (arg1, arg2, sign);
1050 break;
1051
1052 case TRUNC_DIV_EXPR:
1053 case EXACT_DIV_EXPR:
1054 if (arg2 == 0)
1055 return false;
1056 res = wi::div_trunc (arg1, arg2, sign, overflow);
1057 break;
1058
1059 case FLOOR_DIV_EXPR:
1060 if (arg2 == 0)
1061 return false;
1062 res = wi::div_floor (arg1, arg2, sign, overflow);
1063 break;
1064
1065 case CEIL_DIV_EXPR:
1066 if (arg2 == 0)
1067 return false;
1068 res = wi::div_ceil (arg1, arg2, sign, overflow);
1069 break;
1070
1071 case ROUND_DIV_EXPR:
1072 if (arg2 == 0)
1073 return false;
1074 res = wi::div_round (arg1, arg2, sign, overflow);
1075 break;
1076
1077 case TRUNC_MOD_EXPR:
1078 if (arg2 == 0)
1079 return false;
1080 res = wi::mod_trunc (arg1, arg2, sign, overflow);
1081 break;
1082
1083 case FLOOR_MOD_EXPR:
1084 if (arg2 == 0)
1085 return false;
1086 res = wi::mod_floor (arg1, arg2, sign, overflow);
1087 break;
1088
1089 case CEIL_MOD_EXPR:
1090 if (arg2 == 0)
1091 return false;
1092 res = wi::mod_ceil (arg1, arg2, sign, overflow);
1093 break;
1094
1095 case ROUND_MOD_EXPR:
1096 if (arg2 == 0)
1097 return false;
1098 res = wi::mod_round (arg1, arg2, sign, overflow);
1099 break;
1100
1101 case MIN_EXPR:
1102 res = wi::min (arg1, arg2, sign);
1103 break;
1104
1105 case MAX_EXPR:
1106 res = wi::max (arg1, arg2, sign);
1107 break;
1108
1109 default:
1110 return false;
1111 }
1112 return true;
1113 }
1114
1115 /* Combine two poly int's ARG1 and ARG2 under operation CODE to
1116 produce a new constant in RES. Return FALSE if we don't know how
1117 to evaluate CODE at compile-time. */
1118
1119 static bool
1120 poly_int_binop (poly_wide_int &res, enum tree_code code,
1121 const_tree arg1, const_tree arg2,
1122 signop sign, wi::overflow_type *overflow)
1123 {
1124 gcc_assert (NUM_POLY_INT_COEFFS != 1);
1125 gcc_assert (poly_int_tree_p (arg1) && poly_int_tree_p (arg2));
1126 switch (code)
1127 {
1128 case PLUS_EXPR:
1129 res = wi::add (wi::to_poly_wide (arg1),
1130 wi::to_poly_wide (arg2), sign, overflow);
1131 break;
1132
1133 case MINUS_EXPR:
1134 res = wi::sub (wi::to_poly_wide (arg1),
1135 wi::to_poly_wide (arg2), sign, overflow);
1136 break;
1137
1138 case MULT_EXPR:
1139 if (TREE_CODE (arg2) == INTEGER_CST)
1140 res = wi::mul (wi::to_poly_wide (arg1),
1141 wi::to_wide (arg2), sign, overflow);
1142 else if (TREE_CODE (arg1) == INTEGER_CST)
1143 res = wi::mul (wi::to_poly_wide (arg2),
1144 wi::to_wide (arg1), sign, overflow);
1145 else
1146 return NULL_TREE;
1147 break;
1148
1149 case LSHIFT_EXPR:
1150 if (TREE_CODE (arg2) == INTEGER_CST)
1151 res = wi::to_poly_wide (arg1) << wi::to_wide (arg2);
1152 else
1153 return false;
1154 break;
1155
1156 case BIT_IOR_EXPR:
1157 if (TREE_CODE (arg2) != INTEGER_CST
1158 || !can_ior_p (wi::to_poly_wide (arg1), wi::to_wide (arg2),
1159 &res))
1160 return false;
1161 break;
1162
1163 default:
1164 return false;
1165 }
1166 return true;
1167 }
1168
1169 /* Combine two integer constants ARG1 and ARG2 under operation CODE to
1170 produce a new constant. Return NULL_TREE if we don't know how to
1171 evaluate CODE at compile-time. */
1172
1173 tree
1174 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2,
1175 int overflowable)
1176 {
1177 poly_wide_int poly_res;
1178 tree type = TREE_TYPE (arg1);
1179 signop sign = TYPE_SIGN (type);
1180 wi::overflow_type overflow = wi::OVF_NONE;
1181
1182 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1183 {
1184 wide_int warg1 = wi::to_wide (arg1), res;
1185 wide_int warg2 = wi::to_wide (arg2, TYPE_PRECISION (type));
1186 if (!wide_int_binop (res, code, warg1, warg2, sign, &overflow))
1187 return NULL_TREE;
1188 poly_res = res;
1189 }
1190 else if (!poly_int_tree_p (arg1)
1191 || !poly_int_tree_p (arg2)
1192 || !poly_int_binop (poly_res, code, arg1, arg2, sign, &overflow))
1193 return NULL_TREE;
1194 return force_fit_type (type, poly_res, overflowable,
1195 (((sign == SIGNED || overflowable == -1)
1196 && overflow)
1197 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2)));
1198 }
1199
1200 /* Return true if binary operation OP distributes over addition in operand
1201 OPNO, with the other operand being held constant. OPNO counts from 1. */
1202
1203 static bool
1204 distributes_over_addition_p (tree_code op, int opno)
1205 {
1206 switch (op)
1207 {
1208 case PLUS_EXPR:
1209 case MINUS_EXPR:
1210 case MULT_EXPR:
1211 return true;
1212
1213 case LSHIFT_EXPR:
1214 return opno == 1;
1215
1216 default:
1217 return false;
1218 }
1219 }
1220
1221 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1222 constant. We assume ARG1 and ARG2 have the same data type, or at least
1223 are the same kind of constant and the same machine mode. Return zero if
1224 combining the constants is not allowed in the current operating mode. */
1225
1226 static tree
1227 const_binop (enum tree_code code, tree arg1, tree arg2)
1228 {
1229 /* Sanity check for the recursive cases. */
1230 if (!arg1 || !arg2)
1231 return NULL_TREE;
1232
1233 STRIP_NOPS (arg1);
1234 STRIP_NOPS (arg2);
1235
1236 if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1237 {
1238 if (code == POINTER_PLUS_EXPR)
1239 return int_const_binop (PLUS_EXPR,
1240 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1241
1242 return int_const_binop (code, arg1, arg2);
1243 }
1244
1245 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1246 {
1247 machine_mode mode;
1248 REAL_VALUE_TYPE d1;
1249 REAL_VALUE_TYPE d2;
1250 REAL_VALUE_TYPE value;
1251 REAL_VALUE_TYPE result;
1252 bool inexact;
1253 tree t, type;
1254
1255 /* The following codes are handled by real_arithmetic. */
1256 switch (code)
1257 {
1258 case PLUS_EXPR:
1259 case MINUS_EXPR:
1260 case MULT_EXPR:
1261 case RDIV_EXPR:
1262 case MIN_EXPR:
1263 case MAX_EXPR:
1264 break;
1265
1266 default:
1267 return NULL_TREE;
1268 }
1269
1270 d1 = TREE_REAL_CST (arg1);
1271 d2 = TREE_REAL_CST (arg2);
1272
1273 type = TREE_TYPE (arg1);
1274 mode = TYPE_MODE (type);
1275
1276 /* Don't perform operation if we honor signaling NaNs and
1277 either operand is a signaling NaN. */
1278 if (HONOR_SNANS (mode)
1279 && (REAL_VALUE_ISSIGNALING_NAN (d1)
1280 || REAL_VALUE_ISSIGNALING_NAN (d2)))
1281 return NULL_TREE;
1282
1283 /* Don't perform operation if it would raise a division
1284 by zero exception. */
1285 if (code == RDIV_EXPR
1286 && real_equal (&d2, &dconst0)
1287 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1288 return NULL_TREE;
1289
1290 /* If either operand is a NaN, just return it. Otherwise, set up
1291 for floating-point trap; we return an overflow. */
1292 if (REAL_VALUE_ISNAN (d1))
1293 {
1294 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1295 is off. */
1296 d1.signalling = 0;
1297 t = build_real (type, d1);
1298 return t;
1299 }
1300 else if (REAL_VALUE_ISNAN (d2))
1301 {
1302 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1303 is off. */
1304 d2.signalling = 0;
1305 t = build_real (type, d2);
1306 return t;
1307 }
1308
1309 inexact = real_arithmetic (&value, code, &d1, &d2);
1310 real_convert (&result, mode, &value);
1311
1312 /* Don't constant fold this floating point operation if
1313 the result has overflowed and flag_trapping_math. */
1314 if (flag_trapping_math
1315 && MODE_HAS_INFINITIES (mode)
1316 && REAL_VALUE_ISINF (result)
1317 && !REAL_VALUE_ISINF (d1)
1318 && !REAL_VALUE_ISINF (d2))
1319 return NULL_TREE;
1320
1321 /* Don't constant fold this floating point operation if the
1322 result may dependent upon the run-time rounding mode and
1323 flag_rounding_math is set, or if GCC's software emulation
1324 is unable to accurately represent the result. */
1325 if ((flag_rounding_math
1326 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1327 && (inexact || !real_identical (&result, &value)))
1328 return NULL_TREE;
1329
1330 t = build_real (type, result);
1331
1332 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1333 return t;
1334 }
1335
1336 if (TREE_CODE (arg1) == FIXED_CST)
1337 {
1338 FIXED_VALUE_TYPE f1;
1339 FIXED_VALUE_TYPE f2;
1340 FIXED_VALUE_TYPE result;
1341 tree t, type;
1342 int sat_p;
1343 bool overflow_p;
1344
1345 /* The following codes are handled by fixed_arithmetic. */
1346 switch (code)
1347 {
1348 case PLUS_EXPR:
1349 case MINUS_EXPR:
1350 case MULT_EXPR:
1351 case TRUNC_DIV_EXPR:
1352 if (TREE_CODE (arg2) != FIXED_CST)
1353 return NULL_TREE;
1354 f2 = TREE_FIXED_CST (arg2);
1355 break;
1356
1357 case LSHIFT_EXPR:
1358 case RSHIFT_EXPR:
1359 {
1360 if (TREE_CODE (arg2) != INTEGER_CST)
1361 return NULL_TREE;
1362 wi::tree_to_wide_ref w2 = wi::to_wide (arg2);
1363 f2.data.high = w2.elt (1);
1364 f2.data.low = w2.ulow ();
1365 f2.mode = SImode;
1366 }
1367 break;
1368
1369 default:
1370 return NULL_TREE;
1371 }
1372
1373 f1 = TREE_FIXED_CST (arg1);
1374 type = TREE_TYPE (arg1);
1375 sat_p = TYPE_SATURATING (type);
1376 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1377 t = build_fixed (type, result);
1378 /* Propagate overflow flags. */
1379 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1380 TREE_OVERFLOW (t) = 1;
1381 return t;
1382 }
1383
1384 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1385 {
1386 tree type = TREE_TYPE (arg1);
1387 tree r1 = TREE_REALPART (arg1);
1388 tree i1 = TREE_IMAGPART (arg1);
1389 tree r2 = TREE_REALPART (arg2);
1390 tree i2 = TREE_IMAGPART (arg2);
1391 tree real, imag;
1392
1393 switch (code)
1394 {
1395 case PLUS_EXPR:
1396 case MINUS_EXPR:
1397 real = const_binop (code, r1, r2);
1398 imag = const_binop (code, i1, i2);
1399 break;
1400
1401 case MULT_EXPR:
1402 if (COMPLEX_FLOAT_TYPE_P (type))
1403 return do_mpc_arg2 (arg1, arg2, type,
1404 /* do_nonfinite= */ folding_initializer,
1405 mpc_mul);
1406
1407 real = const_binop (MINUS_EXPR,
1408 const_binop (MULT_EXPR, r1, r2),
1409 const_binop (MULT_EXPR, i1, i2));
1410 imag = const_binop (PLUS_EXPR,
1411 const_binop (MULT_EXPR, r1, i2),
1412 const_binop (MULT_EXPR, i1, r2));
1413 break;
1414
1415 case RDIV_EXPR:
1416 if (COMPLEX_FLOAT_TYPE_P (type))
1417 return do_mpc_arg2 (arg1, arg2, type,
1418 /* do_nonfinite= */ folding_initializer,
1419 mpc_div);
1420 /* Fallthru. */
1421 case TRUNC_DIV_EXPR:
1422 case CEIL_DIV_EXPR:
1423 case FLOOR_DIV_EXPR:
1424 case ROUND_DIV_EXPR:
1425 if (flag_complex_method == 0)
1426 {
1427 /* Keep this algorithm in sync with
1428 tree-complex.c:expand_complex_div_straight().
1429
1430 Expand complex division to scalars, straightforward algorithm.
1431 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1432 t = br*br + bi*bi
1433 */
1434 tree magsquared
1435 = const_binop (PLUS_EXPR,
1436 const_binop (MULT_EXPR, r2, r2),
1437 const_binop (MULT_EXPR, i2, i2));
1438 tree t1
1439 = const_binop (PLUS_EXPR,
1440 const_binop (MULT_EXPR, r1, r2),
1441 const_binop (MULT_EXPR, i1, i2));
1442 tree t2
1443 = const_binop (MINUS_EXPR,
1444 const_binop (MULT_EXPR, i1, r2),
1445 const_binop (MULT_EXPR, r1, i2));
1446
1447 real = const_binop (code, t1, magsquared);
1448 imag = const_binop (code, t2, magsquared);
1449 }
1450 else
1451 {
1452 /* Keep this algorithm in sync with
1453 tree-complex.c:expand_complex_div_wide().
1454
1455 Expand complex division to scalars, modified algorithm to minimize
1456 overflow with wide input ranges. */
1457 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1458 fold_abs_const (r2, TREE_TYPE (type)),
1459 fold_abs_const (i2, TREE_TYPE (type)));
1460
1461 if (integer_nonzerop (compare))
1462 {
1463 /* In the TRUE branch, we compute
1464 ratio = br/bi;
1465 div = (br * ratio) + bi;
1466 tr = (ar * ratio) + ai;
1467 ti = (ai * ratio) - ar;
1468 tr = tr / div;
1469 ti = ti / div; */
1470 tree ratio = const_binop (code, r2, i2);
1471 tree div = const_binop (PLUS_EXPR, i2,
1472 const_binop (MULT_EXPR, r2, ratio));
1473 real = const_binop (MULT_EXPR, r1, ratio);
1474 real = const_binop (PLUS_EXPR, real, i1);
1475 real = const_binop (code, real, div);
1476
1477 imag = const_binop (MULT_EXPR, i1, ratio);
1478 imag = const_binop (MINUS_EXPR, imag, r1);
1479 imag = const_binop (code, imag, div);
1480 }
1481 else
1482 {
1483 /* In the FALSE branch, we compute
1484 ratio = d/c;
1485 divisor = (d * ratio) + c;
1486 tr = (b * ratio) + a;
1487 ti = b - (a * ratio);
1488 tr = tr / div;
1489 ti = ti / div; */
1490 tree ratio = const_binop (code, i2, r2);
1491 tree div = const_binop (PLUS_EXPR, r2,
1492 const_binop (MULT_EXPR, i2, ratio));
1493
1494 real = const_binop (MULT_EXPR, i1, ratio);
1495 real = const_binop (PLUS_EXPR, real, r1);
1496 real = const_binop (code, real, div);
1497
1498 imag = const_binop (MULT_EXPR, r1, ratio);
1499 imag = const_binop (MINUS_EXPR, i1, imag);
1500 imag = const_binop (code, imag, div);
1501 }
1502 }
1503 break;
1504
1505 default:
1506 return NULL_TREE;
1507 }
1508
1509 if (real && imag)
1510 return build_complex (type, real, imag);
1511 }
1512
1513 if (TREE_CODE (arg1) == VECTOR_CST
1514 && TREE_CODE (arg2) == VECTOR_CST
1515 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)),
1516 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2))))
1517 {
1518 tree type = TREE_TYPE (arg1);
1519 bool step_ok_p;
1520 if (VECTOR_CST_STEPPED_P (arg1)
1521 && VECTOR_CST_STEPPED_P (arg2))
1522 /* We can operate directly on the encoding if:
1523
1524 a3 - a2 == a2 - a1 && b3 - b2 == b2 - b1
1525 implies
1526 (a3 op b3) - (a2 op b2) == (a2 op b2) - (a1 op b1)
1527
1528 Addition and subtraction are the supported operators
1529 for which this is true. */
1530 step_ok_p = (code == PLUS_EXPR || code == MINUS_EXPR);
1531 else if (VECTOR_CST_STEPPED_P (arg1))
1532 /* We can operate directly on stepped encodings if:
1533
1534 a3 - a2 == a2 - a1
1535 implies:
1536 (a3 op c) - (a2 op c) == (a2 op c) - (a1 op c)
1537
1538 which is true if (x -> x op c) distributes over addition. */
1539 step_ok_p = distributes_over_addition_p (code, 1);
1540 else
1541 /* Similarly in reverse. */
1542 step_ok_p = distributes_over_addition_p (code, 2);
1543 tree_vector_builder elts;
1544 if (!elts.new_binary_operation (type, arg1, arg2, step_ok_p))
1545 return NULL_TREE;
1546 unsigned int count = elts.encoded_nelts ();
1547 for (unsigned int i = 0; i < count; ++i)
1548 {
1549 tree elem1 = VECTOR_CST_ELT (arg1, i);
1550 tree elem2 = VECTOR_CST_ELT (arg2, i);
1551
1552 tree elt = const_binop (code, elem1, elem2);
1553
1554 /* It is possible that const_binop cannot handle the given
1555 code and return NULL_TREE */
1556 if (elt == NULL_TREE)
1557 return NULL_TREE;
1558 elts.quick_push (elt);
1559 }
1560
1561 return elts.build ();
1562 }
1563
1564 /* Shifts allow a scalar offset for a vector. */
1565 if (TREE_CODE (arg1) == VECTOR_CST
1566 && TREE_CODE (arg2) == INTEGER_CST)
1567 {
1568 tree type = TREE_TYPE (arg1);
1569 bool step_ok_p = distributes_over_addition_p (code, 1);
1570 tree_vector_builder elts;
1571 if (!elts.new_unary_operation (type, arg1, step_ok_p))
1572 return NULL_TREE;
1573 unsigned int count = elts.encoded_nelts ();
1574 for (unsigned int i = 0; i < count; ++i)
1575 {
1576 tree elem1 = VECTOR_CST_ELT (arg1, i);
1577
1578 tree elt = const_binop (code, elem1, arg2);
1579
1580 /* It is possible that const_binop cannot handle the given
1581 code and return NULL_TREE. */
1582 if (elt == NULL_TREE)
1583 return NULL_TREE;
1584 elts.quick_push (elt);
1585 }
1586
1587 return elts.build ();
1588 }
1589 return NULL_TREE;
1590 }
1591
1592 /* Overload that adds a TYPE parameter to be able to dispatch
1593 to fold_relational_const. */
1594
1595 tree
1596 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1597 {
1598 if (TREE_CODE_CLASS (code) == tcc_comparison)
1599 return fold_relational_const (code, type, arg1, arg2);
1600
1601 /* ??? Until we make the const_binop worker take the type of the
1602 result as argument put those cases that need it here. */
1603 switch (code)
1604 {
1605 case VEC_SERIES_EXPR:
1606 if (CONSTANT_CLASS_P (arg1)
1607 && CONSTANT_CLASS_P (arg2))
1608 return build_vec_series (type, arg1, arg2);
1609 return NULL_TREE;
1610
1611 case COMPLEX_EXPR:
1612 if ((TREE_CODE (arg1) == REAL_CST
1613 && TREE_CODE (arg2) == REAL_CST)
1614 || (TREE_CODE (arg1) == INTEGER_CST
1615 && TREE_CODE (arg2) == INTEGER_CST))
1616 return build_complex (type, arg1, arg2);
1617 return NULL_TREE;
1618
1619 case POINTER_DIFF_EXPR:
1620 if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1621 {
1622 poly_offset_int res = (wi::to_poly_offset (arg1)
1623 - wi::to_poly_offset (arg2));
1624 return force_fit_type (type, res, 1,
1625 TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1626 }
1627 return NULL_TREE;
1628
1629 case VEC_PACK_TRUNC_EXPR:
1630 case VEC_PACK_FIX_TRUNC_EXPR:
1631 case VEC_PACK_FLOAT_EXPR:
1632 {
1633 unsigned int HOST_WIDE_INT out_nelts, in_nelts, i;
1634
1635 if (TREE_CODE (arg1) != VECTOR_CST
1636 || TREE_CODE (arg2) != VECTOR_CST)
1637 return NULL_TREE;
1638
1639 if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1640 return NULL_TREE;
1641
1642 out_nelts = in_nelts * 2;
1643 gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1644 && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1645
1646 tree_vector_builder elts (type, out_nelts, 1);
1647 for (i = 0; i < out_nelts; i++)
1648 {
1649 tree elt = (i < in_nelts
1650 ? VECTOR_CST_ELT (arg1, i)
1651 : VECTOR_CST_ELT (arg2, i - in_nelts));
1652 elt = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1653 ? NOP_EXPR
1654 : code == VEC_PACK_FLOAT_EXPR
1655 ? FLOAT_EXPR : FIX_TRUNC_EXPR,
1656 TREE_TYPE (type), elt);
1657 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1658 return NULL_TREE;
1659 elts.quick_push (elt);
1660 }
1661
1662 return elts.build ();
1663 }
1664
1665 case VEC_WIDEN_MULT_LO_EXPR:
1666 case VEC_WIDEN_MULT_HI_EXPR:
1667 case VEC_WIDEN_MULT_EVEN_EXPR:
1668 case VEC_WIDEN_MULT_ODD_EXPR:
1669 {
1670 unsigned HOST_WIDE_INT out_nelts, in_nelts, out, ofs, scale;
1671
1672 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1673 return NULL_TREE;
1674
1675 if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1676 return NULL_TREE;
1677 out_nelts = in_nelts / 2;
1678 gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1679 && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1680
1681 if (code == VEC_WIDEN_MULT_LO_EXPR)
1682 scale = 0, ofs = BYTES_BIG_ENDIAN ? out_nelts : 0;
1683 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1684 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : out_nelts;
1685 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1686 scale = 1, ofs = 0;
1687 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1688 scale = 1, ofs = 1;
1689
1690 tree_vector_builder elts (type, out_nelts, 1);
1691 for (out = 0; out < out_nelts; out++)
1692 {
1693 unsigned int in = (out << scale) + ofs;
1694 tree t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1695 VECTOR_CST_ELT (arg1, in));
1696 tree t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1697 VECTOR_CST_ELT (arg2, in));
1698
1699 if (t1 == NULL_TREE || t2 == NULL_TREE)
1700 return NULL_TREE;
1701 tree elt = const_binop (MULT_EXPR, t1, t2);
1702 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1703 return NULL_TREE;
1704 elts.quick_push (elt);
1705 }
1706
1707 return elts.build ();
1708 }
1709
1710 default:;
1711 }
1712
1713 if (TREE_CODE_CLASS (code) != tcc_binary)
1714 return NULL_TREE;
1715
1716 /* Make sure type and arg0 have the same saturating flag. */
1717 gcc_checking_assert (TYPE_SATURATING (type)
1718 == TYPE_SATURATING (TREE_TYPE (arg1)));
1719
1720 return const_binop (code, arg1, arg2);
1721 }
1722
1723 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1724 Return zero if computing the constants is not possible. */
1725
1726 tree
1727 const_unop (enum tree_code code, tree type, tree arg0)
1728 {
1729 /* Don't perform the operation, other than NEGATE and ABS, if
1730 flag_signaling_nans is on and the operand is a signaling NaN. */
1731 if (TREE_CODE (arg0) == REAL_CST
1732 && HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
1733 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0))
1734 && code != NEGATE_EXPR
1735 && code != ABS_EXPR
1736 && code != ABSU_EXPR)
1737 return NULL_TREE;
1738
1739 switch (code)
1740 {
1741 CASE_CONVERT:
1742 case FLOAT_EXPR:
1743 case FIX_TRUNC_EXPR:
1744 case FIXED_CONVERT_EXPR:
1745 return fold_convert_const (code, type, arg0);
1746
1747 case ADDR_SPACE_CONVERT_EXPR:
1748 /* If the source address is 0, and the source address space
1749 cannot have a valid object at 0, fold to dest type null. */
1750 if (integer_zerop (arg0)
1751 && !(targetm.addr_space.zero_address_valid
1752 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))))))
1753 return fold_convert_const (code, type, arg0);
1754 break;
1755
1756 case VIEW_CONVERT_EXPR:
1757 return fold_view_convert_expr (type, arg0);
1758
1759 case NEGATE_EXPR:
1760 {
1761 /* Can't call fold_negate_const directly here as that doesn't
1762 handle all cases and we might not be able to negate some
1763 constants. */
1764 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1765 if (tem && CONSTANT_CLASS_P (tem))
1766 return tem;
1767 break;
1768 }
1769
1770 case ABS_EXPR:
1771 case ABSU_EXPR:
1772 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1773 return fold_abs_const (arg0, type);
1774 break;
1775
1776 case CONJ_EXPR:
1777 if (TREE_CODE (arg0) == COMPLEX_CST)
1778 {
1779 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1780 TREE_TYPE (type));
1781 return build_complex (type, TREE_REALPART (arg0), ipart);
1782 }
1783 break;
1784
1785 case BIT_NOT_EXPR:
1786 if (TREE_CODE (arg0) == INTEGER_CST)
1787 return fold_not_const (arg0, type);
1788 else if (POLY_INT_CST_P (arg0))
1789 return wide_int_to_tree (type, -poly_int_cst_value (arg0));
1790 /* Perform BIT_NOT_EXPR on each element individually. */
1791 else if (TREE_CODE (arg0) == VECTOR_CST)
1792 {
1793 tree elem;
1794
1795 /* This can cope with stepped encodings because ~x == -1 - x. */
1796 tree_vector_builder elements;
1797 elements.new_unary_operation (type, arg0, true);
1798 unsigned int i, count = elements.encoded_nelts ();
1799 for (i = 0; i < count; ++i)
1800 {
1801 elem = VECTOR_CST_ELT (arg0, i);
1802 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1803 if (elem == NULL_TREE)
1804 break;
1805 elements.quick_push (elem);
1806 }
1807 if (i == count)
1808 return elements.build ();
1809 }
1810 break;
1811
1812 case TRUTH_NOT_EXPR:
1813 if (TREE_CODE (arg0) == INTEGER_CST)
1814 return constant_boolean_node (integer_zerop (arg0), type);
1815 break;
1816
1817 case REALPART_EXPR:
1818 if (TREE_CODE (arg0) == COMPLEX_CST)
1819 return fold_convert (type, TREE_REALPART (arg0));
1820 break;
1821
1822 case IMAGPART_EXPR:
1823 if (TREE_CODE (arg0) == COMPLEX_CST)
1824 return fold_convert (type, TREE_IMAGPART (arg0));
1825 break;
1826
1827 case VEC_UNPACK_LO_EXPR:
1828 case VEC_UNPACK_HI_EXPR:
1829 case VEC_UNPACK_FLOAT_LO_EXPR:
1830 case VEC_UNPACK_FLOAT_HI_EXPR:
1831 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
1832 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
1833 {
1834 unsigned HOST_WIDE_INT out_nelts, in_nelts, i;
1835 enum tree_code subcode;
1836
1837 if (TREE_CODE (arg0) != VECTOR_CST)
1838 return NULL_TREE;
1839
1840 if (!VECTOR_CST_NELTS (arg0).is_constant (&in_nelts))
1841 return NULL_TREE;
1842 out_nelts = in_nelts / 2;
1843 gcc_assert (known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1844
1845 unsigned int offset = 0;
1846 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1847 || code == VEC_UNPACK_FLOAT_LO_EXPR
1848 || code == VEC_UNPACK_FIX_TRUNC_LO_EXPR))
1849 offset = out_nelts;
1850
1851 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1852 subcode = NOP_EXPR;
1853 else if (code == VEC_UNPACK_FLOAT_LO_EXPR
1854 || code == VEC_UNPACK_FLOAT_HI_EXPR)
1855 subcode = FLOAT_EXPR;
1856 else
1857 subcode = FIX_TRUNC_EXPR;
1858
1859 tree_vector_builder elts (type, out_nelts, 1);
1860 for (i = 0; i < out_nelts; i++)
1861 {
1862 tree elt = fold_convert_const (subcode, TREE_TYPE (type),
1863 VECTOR_CST_ELT (arg0, i + offset));
1864 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1865 return NULL_TREE;
1866 elts.quick_push (elt);
1867 }
1868
1869 return elts.build ();
1870 }
1871
1872 case VEC_DUPLICATE_EXPR:
1873 if (CONSTANT_CLASS_P (arg0))
1874 return build_vector_from_val (type, arg0);
1875 return NULL_TREE;
1876
1877 default:
1878 break;
1879 }
1880
1881 return NULL_TREE;
1882 }
1883
1884 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1885 indicates which particular sizetype to create. */
1886
1887 tree
1888 size_int_kind (poly_int64 number, enum size_type_kind kind)
1889 {
1890 return build_int_cst (sizetype_tab[(int) kind], number);
1891 }
1892 \f
1893 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1894 is a tree code. The type of the result is taken from the operands.
1895 Both must be equivalent integer types, ala int_binop_types_match_p.
1896 If the operands are constant, so is the result. */
1897
1898 tree
1899 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1900 {
1901 tree type = TREE_TYPE (arg0);
1902
1903 if (arg0 == error_mark_node || arg1 == error_mark_node)
1904 return error_mark_node;
1905
1906 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1907 TREE_TYPE (arg1)));
1908
1909 /* Handle the special case of two poly_int constants faster. */
1910 if (poly_int_tree_p (arg0) && poly_int_tree_p (arg1))
1911 {
1912 /* And some specific cases even faster than that. */
1913 if (code == PLUS_EXPR)
1914 {
1915 if (integer_zerop (arg0)
1916 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0)))
1917 return arg1;
1918 if (integer_zerop (arg1)
1919 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1)))
1920 return arg0;
1921 }
1922 else if (code == MINUS_EXPR)
1923 {
1924 if (integer_zerop (arg1)
1925 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1)))
1926 return arg0;
1927 }
1928 else if (code == MULT_EXPR)
1929 {
1930 if (integer_onep (arg0)
1931 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0)))
1932 return arg1;
1933 }
1934
1935 /* Handle general case of two integer constants. For sizetype
1936 constant calculations we always want to know about overflow,
1937 even in the unsigned case. */
1938 tree res = int_const_binop (code, arg0, arg1, -1);
1939 if (res != NULL_TREE)
1940 return res;
1941 }
1942
1943 return fold_build2_loc (loc, code, type, arg0, arg1);
1944 }
1945
1946 /* Given two values, either both of sizetype or both of bitsizetype,
1947 compute the difference between the two values. Return the value
1948 in signed type corresponding to the type of the operands. */
1949
1950 tree
1951 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1952 {
1953 tree type = TREE_TYPE (arg0);
1954 tree ctype;
1955
1956 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1957 TREE_TYPE (arg1)));
1958
1959 /* If the type is already signed, just do the simple thing. */
1960 if (!TYPE_UNSIGNED (type))
1961 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1962
1963 if (type == sizetype)
1964 ctype = ssizetype;
1965 else if (type == bitsizetype)
1966 ctype = sbitsizetype;
1967 else
1968 ctype = signed_type_for (type);
1969
1970 /* If either operand is not a constant, do the conversions to the signed
1971 type and subtract. The hardware will do the right thing with any
1972 overflow in the subtraction. */
1973 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1974 return size_binop_loc (loc, MINUS_EXPR,
1975 fold_convert_loc (loc, ctype, arg0),
1976 fold_convert_loc (loc, ctype, arg1));
1977
1978 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1979 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1980 overflow) and negate (which can't either). Special-case a result
1981 of zero while we're here. */
1982 if (tree_int_cst_equal (arg0, arg1))
1983 return build_int_cst (ctype, 0);
1984 else if (tree_int_cst_lt (arg1, arg0))
1985 return fold_convert_loc (loc, ctype,
1986 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1987 else
1988 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1989 fold_convert_loc (loc, ctype,
1990 size_binop_loc (loc,
1991 MINUS_EXPR,
1992 arg1, arg0)));
1993 }
1994 \f
1995 /* A subroutine of fold_convert_const handling conversions of an
1996 INTEGER_CST to another integer type. */
1997
1998 static tree
1999 fold_convert_const_int_from_int (tree type, const_tree arg1)
2000 {
2001 /* Given an integer constant, make new constant with new type,
2002 appropriately sign-extended or truncated. Use widest_int
2003 so that any extension is done according ARG1's type. */
2004 return force_fit_type (type, wi::to_widest (arg1),
2005 !POINTER_TYPE_P (TREE_TYPE (arg1)),
2006 TREE_OVERFLOW (arg1));
2007 }
2008
2009 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2010 to an integer type. */
2011
2012 static tree
2013 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
2014 {
2015 bool overflow = false;
2016 tree t;
2017
2018 /* The following code implements the floating point to integer
2019 conversion rules required by the Java Language Specification,
2020 that IEEE NaNs are mapped to zero and values that overflow
2021 the target precision saturate, i.e. values greater than
2022 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2023 are mapped to INT_MIN. These semantics are allowed by the
2024 C and C++ standards that simply state that the behavior of
2025 FP-to-integer conversion is unspecified upon overflow. */
2026
2027 wide_int val;
2028 REAL_VALUE_TYPE r;
2029 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2030
2031 switch (code)
2032 {
2033 case FIX_TRUNC_EXPR:
2034 real_trunc (&r, VOIDmode, &x);
2035 break;
2036
2037 default:
2038 gcc_unreachable ();
2039 }
2040
2041 /* If R is NaN, return zero and show we have an overflow. */
2042 if (REAL_VALUE_ISNAN (r))
2043 {
2044 overflow = true;
2045 val = wi::zero (TYPE_PRECISION (type));
2046 }
2047
2048 /* See if R is less than the lower bound or greater than the
2049 upper bound. */
2050
2051 if (! overflow)
2052 {
2053 tree lt = TYPE_MIN_VALUE (type);
2054 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2055 if (real_less (&r, &l))
2056 {
2057 overflow = true;
2058 val = wi::to_wide (lt);
2059 }
2060 }
2061
2062 if (! overflow)
2063 {
2064 tree ut = TYPE_MAX_VALUE (type);
2065 if (ut)
2066 {
2067 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2068 if (real_less (&u, &r))
2069 {
2070 overflow = true;
2071 val = wi::to_wide (ut);
2072 }
2073 }
2074 }
2075
2076 if (! overflow)
2077 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
2078
2079 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
2080 return t;
2081 }
2082
2083 /* A subroutine of fold_convert_const handling conversions of a
2084 FIXED_CST to an integer type. */
2085
2086 static tree
2087 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2088 {
2089 tree t;
2090 double_int temp, temp_trunc;
2091 scalar_mode mode;
2092
2093 /* Right shift FIXED_CST to temp by fbit. */
2094 temp = TREE_FIXED_CST (arg1).data;
2095 mode = TREE_FIXED_CST (arg1).mode;
2096 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
2097 {
2098 temp = temp.rshift (GET_MODE_FBIT (mode),
2099 HOST_BITS_PER_DOUBLE_INT,
2100 SIGNED_FIXED_POINT_MODE_P (mode));
2101
2102 /* Left shift temp to temp_trunc by fbit. */
2103 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
2104 HOST_BITS_PER_DOUBLE_INT,
2105 SIGNED_FIXED_POINT_MODE_P (mode));
2106 }
2107 else
2108 {
2109 temp = double_int_zero;
2110 temp_trunc = double_int_zero;
2111 }
2112
2113 /* If FIXED_CST is negative, we need to round the value toward 0.
2114 By checking if the fractional bits are not zero to add 1 to temp. */
2115 if (SIGNED_FIXED_POINT_MODE_P (mode)
2116 && temp_trunc.is_negative ()
2117 && TREE_FIXED_CST (arg1).data != temp_trunc)
2118 temp += double_int_one;
2119
2120 /* Given a fixed-point constant, make new constant with new type,
2121 appropriately sign-extended or truncated. */
2122 t = force_fit_type (type, temp, -1,
2123 (temp.is_negative ()
2124 && (TYPE_UNSIGNED (type)
2125 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2126 | TREE_OVERFLOW (arg1));
2127
2128 return t;
2129 }
2130
2131 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2132 to another floating point type. */
2133
2134 static tree
2135 fold_convert_const_real_from_real (tree type, const_tree arg1)
2136 {
2137 REAL_VALUE_TYPE value;
2138 tree t;
2139
2140 /* Don't perform the operation if flag_signaling_nans is on
2141 and the operand is a signaling NaN. */
2142 if (HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
2143 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1)))
2144 return NULL_TREE;
2145
2146 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2147 t = build_real (type, value);
2148
2149 /* If converting an infinity or NAN to a representation that doesn't
2150 have one, set the overflow bit so that we can produce some kind of
2151 error message at the appropriate point if necessary. It's not the
2152 most user-friendly message, but it's better than nothing. */
2153 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2154 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2155 TREE_OVERFLOW (t) = 1;
2156 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2157 && !MODE_HAS_NANS (TYPE_MODE (type)))
2158 TREE_OVERFLOW (t) = 1;
2159 /* Regular overflow, conversion produced an infinity in a mode that
2160 can't represent them. */
2161 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2162 && REAL_VALUE_ISINF (value)
2163 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2164 TREE_OVERFLOW (t) = 1;
2165 else
2166 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2167 return t;
2168 }
2169
2170 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2171 to a floating point type. */
2172
2173 static tree
2174 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2175 {
2176 REAL_VALUE_TYPE value;
2177 tree t;
2178
2179 real_convert_from_fixed (&value, SCALAR_FLOAT_TYPE_MODE (type),
2180 &TREE_FIXED_CST (arg1));
2181 t = build_real (type, value);
2182
2183 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2184 return t;
2185 }
2186
2187 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2188 to another fixed-point type. */
2189
2190 static tree
2191 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2192 {
2193 FIXED_VALUE_TYPE value;
2194 tree t;
2195 bool overflow_p;
2196
2197 overflow_p = fixed_convert (&value, SCALAR_TYPE_MODE (type),
2198 &TREE_FIXED_CST (arg1), TYPE_SATURATING (type));
2199 t = build_fixed (type, value);
2200
2201 /* Propagate overflow flags. */
2202 if (overflow_p | TREE_OVERFLOW (arg1))
2203 TREE_OVERFLOW (t) = 1;
2204 return t;
2205 }
2206
2207 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2208 to a fixed-point type. */
2209
2210 static tree
2211 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2212 {
2213 FIXED_VALUE_TYPE value;
2214 tree t;
2215 bool overflow_p;
2216 double_int di;
2217
2218 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2219
2220 di.low = TREE_INT_CST_ELT (arg1, 0);
2221 if (TREE_INT_CST_NUNITS (arg1) == 1)
2222 di.high = (HOST_WIDE_INT) di.low < 0 ? HOST_WIDE_INT_M1 : 0;
2223 else
2224 di.high = TREE_INT_CST_ELT (arg1, 1);
2225
2226 overflow_p = fixed_convert_from_int (&value, SCALAR_TYPE_MODE (type), di,
2227 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2228 TYPE_SATURATING (type));
2229 t = build_fixed (type, value);
2230
2231 /* Propagate overflow flags. */
2232 if (overflow_p | TREE_OVERFLOW (arg1))
2233 TREE_OVERFLOW (t) = 1;
2234 return t;
2235 }
2236
2237 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2238 to a fixed-point type. */
2239
2240 static tree
2241 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2242 {
2243 FIXED_VALUE_TYPE value;
2244 tree t;
2245 bool overflow_p;
2246
2247 overflow_p = fixed_convert_from_real (&value, SCALAR_TYPE_MODE (type),
2248 &TREE_REAL_CST (arg1),
2249 TYPE_SATURATING (type));
2250 t = build_fixed (type, value);
2251
2252 /* Propagate overflow flags. */
2253 if (overflow_p | TREE_OVERFLOW (arg1))
2254 TREE_OVERFLOW (t) = 1;
2255 return t;
2256 }
2257
2258 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2259 type TYPE. If no simplification can be done return NULL_TREE. */
2260
2261 static tree
2262 fold_convert_const (enum tree_code code, tree type, tree arg1)
2263 {
2264 tree arg_type = TREE_TYPE (arg1);
2265 if (arg_type == type)
2266 return arg1;
2267
2268 /* We can't widen types, since the runtime value could overflow the
2269 original type before being extended to the new type. */
2270 if (POLY_INT_CST_P (arg1)
2271 && (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2272 && TYPE_PRECISION (type) <= TYPE_PRECISION (arg_type))
2273 return build_poly_int_cst (type,
2274 poly_wide_int::from (poly_int_cst_value (arg1),
2275 TYPE_PRECISION (type),
2276 TYPE_SIGN (arg_type)));
2277
2278 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2279 || TREE_CODE (type) == OFFSET_TYPE)
2280 {
2281 if (TREE_CODE (arg1) == INTEGER_CST)
2282 return fold_convert_const_int_from_int (type, arg1);
2283 else if (TREE_CODE (arg1) == REAL_CST)
2284 return fold_convert_const_int_from_real (code, type, arg1);
2285 else if (TREE_CODE (arg1) == FIXED_CST)
2286 return fold_convert_const_int_from_fixed (type, arg1);
2287 }
2288 else if (TREE_CODE (type) == REAL_TYPE)
2289 {
2290 if (TREE_CODE (arg1) == INTEGER_CST)
2291 return build_real_from_int_cst (type, arg1);
2292 else if (TREE_CODE (arg1) == REAL_CST)
2293 return fold_convert_const_real_from_real (type, arg1);
2294 else if (TREE_CODE (arg1) == FIXED_CST)
2295 return fold_convert_const_real_from_fixed (type, arg1);
2296 }
2297 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2298 {
2299 if (TREE_CODE (arg1) == FIXED_CST)
2300 return fold_convert_const_fixed_from_fixed (type, arg1);
2301 else if (TREE_CODE (arg1) == INTEGER_CST)
2302 return fold_convert_const_fixed_from_int (type, arg1);
2303 else if (TREE_CODE (arg1) == REAL_CST)
2304 return fold_convert_const_fixed_from_real (type, arg1);
2305 }
2306 else if (TREE_CODE (type) == VECTOR_TYPE)
2307 {
2308 if (TREE_CODE (arg1) == VECTOR_CST
2309 && known_eq (TYPE_VECTOR_SUBPARTS (type), VECTOR_CST_NELTS (arg1)))
2310 {
2311 tree elttype = TREE_TYPE (type);
2312 tree arg1_elttype = TREE_TYPE (TREE_TYPE (arg1));
2313 /* We can't handle steps directly when extending, since the
2314 values need to wrap at the original precision first. */
2315 bool step_ok_p
2316 = (INTEGRAL_TYPE_P (elttype)
2317 && INTEGRAL_TYPE_P (arg1_elttype)
2318 && TYPE_PRECISION (elttype) <= TYPE_PRECISION (arg1_elttype));
2319 tree_vector_builder v;
2320 if (!v.new_unary_operation (type, arg1, step_ok_p))
2321 return NULL_TREE;
2322 unsigned int len = v.encoded_nelts ();
2323 for (unsigned int i = 0; i < len; ++i)
2324 {
2325 tree elt = VECTOR_CST_ELT (arg1, i);
2326 tree cvt = fold_convert_const (code, elttype, elt);
2327 if (cvt == NULL_TREE)
2328 return NULL_TREE;
2329 v.quick_push (cvt);
2330 }
2331 return v.build ();
2332 }
2333 }
2334 return NULL_TREE;
2335 }
2336
2337 /* Construct a vector of zero elements of vector type TYPE. */
2338
2339 static tree
2340 build_zero_vector (tree type)
2341 {
2342 tree t;
2343
2344 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2345 return build_vector_from_val (type, t);
2346 }
2347
2348 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2349
2350 bool
2351 fold_convertible_p (const_tree type, const_tree arg)
2352 {
2353 tree orig = TREE_TYPE (arg);
2354
2355 if (type == orig)
2356 return true;
2357
2358 if (TREE_CODE (arg) == ERROR_MARK
2359 || TREE_CODE (type) == ERROR_MARK
2360 || TREE_CODE (orig) == ERROR_MARK)
2361 return false;
2362
2363 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2364 return true;
2365
2366 switch (TREE_CODE (type))
2367 {
2368 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2369 case POINTER_TYPE: case REFERENCE_TYPE:
2370 case OFFSET_TYPE:
2371 return (INTEGRAL_TYPE_P (orig)
2372 || (POINTER_TYPE_P (orig)
2373 && TYPE_PRECISION (type) <= TYPE_PRECISION (orig))
2374 || TREE_CODE (orig) == OFFSET_TYPE);
2375
2376 case REAL_TYPE:
2377 case FIXED_POINT_TYPE:
2378 case VOID_TYPE:
2379 return TREE_CODE (type) == TREE_CODE (orig);
2380
2381 case VECTOR_TYPE:
2382 return (VECTOR_TYPE_P (orig)
2383 && known_eq (TYPE_VECTOR_SUBPARTS (type),
2384 TYPE_VECTOR_SUBPARTS (orig))
2385 && fold_convertible_p (TREE_TYPE (type), TREE_TYPE (orig)));
2386
2387 default:
2388 return false;
2389 }
2390 }
2391
2392 /* Convert expression ARG to type TYPE. Used by the middle-end for
2393 simple conversions in preference to calling the front-end's convert. */
2394
2395 tree
2396 fold_convert_loc (location_t loc, tree type, tree arg)
2397 {
2398 tree orig = TREE_TYPE (arg);
2399 tree tem;
2400
2401 if (type == orig)
2402 return arg;
2403
2404 if (TREE_CODE (arg) == ERROR_MARK
2405 || TREE_CODE (type) == ERROR_MARK
2406 || TREE_CODE (orig) == ERROR_MARK)
2407 return error_mark_node;
2408
2409 switch (TREE_CODE (type))
2410 {
2411 case POINTER_TYPE:
2412 case REFERENCE_TYPE:
2413 /* Handle conversions between pointers to different address spaces. */
2414 if (POINTER_TYPE_P (orig)
2415 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2416 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2417 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2418 /* fall through */
2419
2420 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2421 case OFFSET_TYPE:
2422 if (TREE_CODE (arg) == INTEGER_CST)
2423 {
2424 tem = fold_convert_const (NOP_EXPR, type, arg);
2425 if (tem != NULL_TREE)
2426 return tem;
2427 }
2428 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2429 || TREE_CODE (orig) == OFFSET_TYPE)
2430 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2431 if (TREE_CODE (orig) == COMPLEX_TYPE)
2432 return fold_convert_loc (loc, type,
2433 fold_build1_loc (loc, REALPART_EXPR,
2434 TREE_TYPE (orig), arg));
2435 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2436 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2437 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2438
2439 case REAL_TYPE:
2440 if (TREE_CODE (arg) == INTEGER_CST)
2441 {
2442 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2443 if (tem != NULL_TREE)
2444 return tem;
2445 }
2446 else if (TREE_CODE (arg) == REAL_CST)
2447 {
2448 tem = fold_convert_const (NOP_EXPR, type, arg);
2449 if (tem != NULL_TREE)
2450 return tem;
2451 }
2452 else if (TREE_CODE (arg) == FIXED_CST)
2453 {
2454 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2455 if (tem != NULL_TREE)
2456 return tem;
2457 }
2458
2459 switch (TREE_CODE (orig))
2460 {
2461 case INTEGER_TYPE:
2462 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2463 case POINTER_TYPE: case REFERENCE_TYPE:
2464 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2465
2466 case REAL_TYPE:
2467 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2468
2469 case FIXED_POINT_TYPE:
2470 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2471
2472 case COMPLEX_TYPE:
2473 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2474 return fold_convert_loc (loc, type, tem);
2475
2476 default:
2477 gcc_unreachable ();
2478 }
2479
2480 case FIXED_POINT_TYPE:
2481 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2482 || TREE_CODE (arg) == REAL_CST)
2483 {
2484 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2485 if (tem != NULL_TREE)
2486 goto fold_convert_exit;
2487 }
2488
2489 switch (TREE_CODE (orig))
2490 {
2491 case FIXED_POINT_TYPE:
2492 case INTEGER_TYPE:
2493 case ENUMERAL_TYPE:
2494 case BOOLEAN_TYPE:
2495 case REAL_TYPE:
2496 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2497
2498 case COMPLEX_TYPE:
2499 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2500 return fold_convert_loc (loc, type, tem);
2501
2502 default:
2503 gcc_unreachable ();
2504 }
2505
2506 case COMPLEX_TYPE:
2507 switch (TREE_CODE (orig))
2508 {
2509 case INTEGER_TYPE:
2510 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2511 case POINTER_TYPE: case REFERENCE_TYPE:
2512 case REAL_TYPE:
2513 case FIXED_POINT_TYPE:
2514 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2515 fold_convert_loc (loc, TREE_TYPE (type), arg),
2516 fold_convert_loc (loc, TREE_TYPE (type),
2517 integer_zero_node));
2518 case COMPLEX_TYPE:
2519 {
2520 tree rpart, ipart;
2521
2522 if (TREE_CODE (arg) == COMPLEX_EXPR)
2523 {
2524 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2525 TREE_OPERAND (arg, 0));
2526 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2527 TREE_OPERAND (arg, 1));
2528 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2529 }
2530
2531 arg = save_expr (arg);
2532 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2533 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2534 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2535 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2536 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2537 }
2538
2539 default:
2540 gcc_unreachable ();
2541 }
2542
2543 case VECTOR_TYPE:
2544 if (integer_zerop (arg))
2545 return build_zero_vector (type);
2546 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2547 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2548 || TREE_CODE (orig) == VECTOR_TYPE);
2549 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2550
2551 case VOID_TYPE:
2552 tem = fold_ignored_result (arg);
2553 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2554
2555 default:
2556 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2557 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2558 gcc_unreachable ();
2559 }
2560 fold_convert_exit:
2561 protected_set_expr_location_unshare (tem, loc);
2562 return tem;
2563 }
2564 \f
2565 /* Return false if expr can be assumed not to be an lvalue, true
2566 otherwise. */
2567
2568 static bool
2569 maybe_lvalue_p (const_tree x)
2570 {
2571 /* We only need to wrap lvalue tree codes. */
2572 switch (TREE_CODE (x))
2573 {
2574 case VAR_DECL:
2575 case PARM_DECL:
2576 case RESULT_DECL:
2577 case LABEL_DECL:
2578 case FUNCTION_DECL:
2579 case SSA_NAME:
2580
2581 case COMPONENT_REF:
2582 case MEM_REF:
2583 case INDIRECT_REF:
2584 case ARRAY_REF:
2585 case ARRAY_RANGE_REF:
2586 case BIT_FIELD_REF:
2587 case OBJ_TYPE_REF:
2588
2589 case REALPART_EXPR:
2590 case IMAGPART_EXPR:
2591 case PREINCREMENT_EXPR:
2592 case PREDECREMENT_EXPR:
2593 case SAVE_EXPR:
2594 case TRY_CATCH_EXPR:
2595 case WITH_CLEANUP_EXPR:
2596 case COMPOUND_EXPR:
2597 case MODIFY_EXPR:
2598 case TARGET_EXPR:
2599 case COND_EXPR:
2600 case BIND_EXPR:
2601 case VIEW_CONVERT_EXPR:
2602 break;
2603
2604 default:
2605 /* Assume the worst for front-end tree codes. */
2606 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2607 break;
2608 return false;
2609 }
2610
2611 return true;
2612 }
2613
2614 /* Return an expr equal to X but certainly not valid as an lvalue. */
2615
2616 tree
2617 non_lvalue_loc (location_t loc, tree x)
2618 {
2619 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2620 us. */
2621 if (in_gimple_form)
2622 return x;
2623
2624 if (! maybe_lvalue_p (x))
2625 return x;
2626 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2627 }
2628
2629 /* When pedantic, return an expr equal to X but certainly not valid as a
2630 pedantic lvalue. Otherwise, return X. */
2631
2632 static tree
2633 pedantic_non_lvalue_loc (location_t loc, tree x)
2634 {
2635 return protected_set_expr_location_unshare (x, loc);
2636 }
2637 \f
2638 /* Given a tree comparison code, return the code that is the logical inverse.
2639 It is generally not safe to do this for floating-point comparisons, except
2640 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2641 ERROR_MARK in this case. */
2642
2643 enum tree_code
2644 invert_tree_comparison (enum tree_code code, bool honor_nans)
2645 {
2646 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2647 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2648 return ERROR_MARK;
2649
2650 switch (code)
2651 {
2652 case EQ_EXPR:
2653 return NE_EXPR;
2654 case NE_EXPR:
2655 return EQ_EXPR;
2656 case GT_EXPR:
2657 return honor_nans ? UNLE_EXPR : LE_EXPR;
2658 case GE_EXPR:
2659 return honor_nans ? UNLT_EXPR : LT_EXPR;
2660 case LT_EXPR:
2661 return honor_nans ? UNGE_EXPR : GE_EXPR;
2662 case LE_EXPR:
2663 return honor_nans ? UNGT_EXPR : GT_EXPR;
2664 case LTGT_EXPR:
2665 return UNEQ_EXPR;
2666 case UNEQ_EXPR:
2667 return LTGT_EXPR;
2668 case UNGT_EXPR:
2669 return LE_EXPR;
2670 case UNGE_EXPR:
2671 return LT_EXPR;
2672 case UNLT_EXPR:
2673 return GE_EXPR;
2674 case UNLE_EXPR:
2675 return GT_EXPR;
2676 case ORDERED_EXPR:
2677 return UNORDERED_EXPR;
2678 case UNORDERED_EXPR:
2679 return ORDERED_EXPR;
2680 default:
2681 gcc_unreachable ();
2682 }
2683 }
2684
2685 /* Similar, but return the comparison that results if the operands are
2686 swapped. This is safe for floating-point. */
2687
2688 enum tree_code
2689 swap_tree_comparison (enum tree_code code)
2690 {
2691 switch (code)
2692 {
2693 case EQ_EXPR:
2694 case NE_EXPR:
2695 case ORDERED_EXPR:
2696 case UNORDERED_EXPR:
2697 case LTGT_EXPR:
2698 case UNEQ_EXPR:
2699 return code;
2700 case GT_EXPR:
2701 return LT_EXPR;
2702 case GE_EXPR:
2703 return LE_EXPR;
2704 case LT_EXPR:
2705 return GT_EXPR;
2706 case LE_EXPR:
2707 return GE_EXPR;
2708 case UNGT_EXPR:
2709 return UNLT_EXPR;
2710 case UNGE_EXPR:
2711 return UNLE_EXPR;
2712 case UNLT_EXPR:
2713 return UNGT_EXPR;
2714 case UNLE_EXPR:
2715 return UNGE_EXPR;
2716 default:
2717 gcc_unreachable ();
2718 }
2719 }
2720
2721
2722 /* Convert a comparison tree code from an enum tree_code representation
2723 into a compcode bit-based encoding. This function is the inverse of
2724 compcode_to_comparison. */
2725
2726 static enum comparison_code
2727 comparison_to_compcode (enum tree_code code)
2728 {
2729 switch (code)
2730 {
2731 case LT_EXPR:
2732 return COMPCODE_LT;
2733 case EQ_EXPR:
2734 return COMPCODE_EQ;
2735 case LE_EXPR:
2736 return COMPCODE_LE;
2737 case GT_EXPR:
2738 return COMPCODE_GT;
2739 case NE_EXPR:
2740 return COMPCODE_NE;
2741 case GE_EXPR:
2742 return COMPCODE_GE;
2743 case ORDERED_EXPR:
2744 return COMPCODE_ORD;
2745 case UNORDERED_EXPR:
2746 return COMPCODE_UNORD;
2747 case UNLT_EXPR:
2748 return COMPCODE_UNLT;
2749 case UNEQ_EXPR:
2750 return COMPCODE_UNEQ;
2751 case UNLE_EXPR:
2752 return COMPCODE_UNLE;
2753 case UNGT_EXPR:
2754 return COMPCODE_UNGT;
2755 case LTGT_EXPR:
2756 return COMPCODE_LTGT;
2757 case UNGE_EXPR:
2758 return COMPCODE_UNGE;
2759 default:
2760 gcc_unreachable ();
2761 }
2762 }
2763
2764 /* Convert a compcode bit-based encoding of a comparison operator back
2765 to GCC's enum tree_code representation. This function is the
2766 inverse of comparison_to_compcode. */
2767
2768 static enum tree_code
2769 compcode_to_comparison (enum comparison_code code)
2770 {
2771 switch (code)
2772 {
2773 case COMPCODE_LT:
2774 return LT_EXPR;
2775 case COMPCODE_EQ:
2776 return EQ_EXPR;
2777 case COMPCODE_LE:
2778 return LE_EXPR;
2779 case COMPCODE_GT:
2780 return GT_EXPR;
2781 case COMPCODE_NE:
2782 return NE_EXPR;
2783 case COMPCODE_GE:
2784 return GE_EXPR;
2785 case COMPCODE_ORD:
2786 return ORDERED_EXPR;
2787 case COMPCODE_UNORD:
2788 return UNORDERED_EXPR;
2789 case COMPCODE_UNLT:
2790 return UNLT_EXPR;
2791 case COMPCODE_UNEQ:
2792 return UNEQ_EXPR;
2793 case COMPCODE_UNLE:
2794 return UNLE_EXPR;
2795 case COMPCODE_UNGT:
2796 return UNGT_EXPR;
2797 case COMPCODE_LTGT:
2798 return LTGT_EXPR;
2799 case COMPCODE_UNGE:
2800 return UNGE_EXPR;
2801 default:
2802 gcc_unreachable ();
2803 }
2804 }
2805
2806 /* Return true if COND1 tests the opposite condition of COND2. */
2807
2808 bool
2809 inverse_conditions_p (const_tree cond1, const_tree cond2)
2810 {
2811 return (COMPARISON_CLASS_P (cond1)
2812 && COMPARISON_CLASS_P (cond2)
2813 && (invert_tree_comparison
2814 (TREE_CODE (cond1),
2815 HONOR_NANS (TREE_OPERAND (cond1, 0))) == TREE_CODE (cond2))
2816 && operand_equal_p (TREE_OPERAND (cond1, 0),
2817 TREE_OPERAND (cond2, 0), 0)
2818 && operand_equal_p (TREE_OPERAND (cond1, 1),
2819 TREE_OPERAND (cond2, 1), 0));
2820 }
2821
2822 /* Return a tree for the comparison which is the combination of
2823 doing the AND or OR (depending on CODE) of the two operations LCODE
2824 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2825 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2826 if this makes the transformation invalid. */
2827
2828 tree
2829 combine_comparisons (location_t loc,
2830 enum tree_code code, enum tree_code lcode,
2831 enum tree_code rcode, tree truth_type,
2832 tree ll_arg, tree lr_arg)
2833 {
2834 bool honor_nans = HONOR_NANS (ll_arg);
2835 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2836 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2837 int compcode;
2838
2839 switch (code)
2840 {
2841 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2842 compcode = lcompcode & rcompcode;
2843 break;
2844
2845 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2846 compcode = lcompcode | rcompcode;
2847 break;
2848
2849 default:
2850 return NULL_TREE;
2851 }
2852
2853 if (!honor_nans)
2854 {
2855 /* Eliminate unordered comparisons, as well as LTGT and ORD
2856 which are not used unless the mode has NaNs. */
2857 compcode &= ~COMPCODE_UNORD;
2858 if (compcode == COMPCODE_LTGT)
2859 compcode = COMPCODE_NE;
2860 else if (compcode == COMPCODE_ORD)
2861 compcode = COMPCODE_TRUE;
2862 }
2863 else if (flag_trapping_math)
2864 {
2865 /* Check that the original operation and the optimized ones will trap
2866 under the same condition. */
2867 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2868 && (lcompcode != COMPCODE_EQ)
2869 && (lcompcode != COMPCODE_ORD);
2870 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2871 && (rcompcode != COMPCODE_EQ)
2872 && (rcompcode != COMPCODE_ORD);
2873 bool trap = (compcode & COMPCODE_UNORD) == 0
2874 && (compcode != COMPCODE_EQ)
2875 && (compcode != COMPCODE_ORD);
2876
2877 /* In a short-circuited boolean expression the LHS might be
2878 such that the RHS, if evaluated, will never trap. For
2879 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2880 if neither x nor y is NaN. (This is a mixed blessing: for
2881 example, the expression above will never trap, hence
2882 optimizing it to x < y would be invalid). */
2883 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2884 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2885 rtrap = false;
2886
2887 /* If the comparison was short-circuited, and only the RHS
2888 trapped, we may now generate a spurious trap. */
2889 if (rtrap && !ltrap
2890 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2891 return NULL_TREE;
2892
2893 /* If we changed the conditions that cause a trap, we lose. */
2894 if ((ltrap || rtrap) != trap)
2895 return NULL_TREE;
2896 }
2897
2898 if (compcode == COMPCODE_TRUE)
2899 return constant_boolean_node (true, truth_type);
2900 else if (compcode == COMPCODE_FALSE)
2901 return constant_boolean_node (false, truth_type);
2902 else
2903 {
2904 enum tree_code tcode;
2905
2906 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2907 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2908 }
2909 }
2910 \f
2911 /* Return nonzero if two operands (typically of the same tree node)
2912 are necessarily equal. FLAGS modifies behavior as follows:
2913
2914 If OEP_ONLY_CONST is set, only return nonzero for constants.
2915 This function tests whether the operands are indistinguishable;
2916 it does not test whether they are equal using C's == operation.
2917 The distinction is important for IEEE floating point, because
2918 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2919 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2920
2921 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2922 even though it may hold multiple values during a function.
2923 This is because a GCC tree node guarantees that nothing else is
2924 executed between the evaluation of its "operands" (which may often
2925 be evaluated in arbitrary order). Hence if the operands themselves
2926 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2927 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2928 unset means assuming isochronic (or instantaneous) tree equivalence.
2929 Unless comparing arbitrary expression trees, such as from different
2930 statements, this flag can usually be left unset.
2931
2932 If OEP_PURE_SAME is set, then pure functions with identical arguments
2933 are considered the same. It is used when the caller has other ways
2934 to ensure that global memory is unchanged in between.
2935
2936 If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
2937 not values of expressions.
2938
2939 If OEP_LEXICOGRAPHIC is set, then also handle expressions with side-effects
2940 such as MODIFY_EXPR, RETURN_EXPR, as well as STATEMENT_LISTs.
2941
2942 If OEP_BITWISE is set, then require the values to be bitwise identical
2943 rather than simply numerically equal. Do not take advantage of things
2944 like math-related flags or undefined behavior; only return true for
2945 values that are provably bitwise identical in all circumstances.
2946
2947 Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
2948 any operand with side effect. This is unnecesarily conservative in the
2949 case we know that arg0 and arg1 are in disjoint code paths (such as in
2950 ?: operator). In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
2951 addresses with TREE_CONSTANT flag set so we know that &var == &var
2952 even if var is volatile. */
2953
2954 bool
2955 operand_compare::operand_equal_p (const_tree arg0, const_tree arg1,
2956 unsigned int flags)
2957 {
2958 bool r;
2959 if (verify_hash_value (arg0, arg1, flags, &r))
2960 return r;
2961
2962 STRIP_ANY_LOCATION_WRAPPER (arg0);
2963 STRIP_ANY_LOCATION_WRAPPER (arg1);
2964
2965 /* If either is ERROR_MARK, they aren't equal. */
2966 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2967 || TREE_TYPE (arg0) == error_mark_node
2968 || TREE_TYPE (arg1) == error_mark_node)
2969 return false;
2970
2971 /* Similar, if either does not have a type (like a template id),
2972 they aren't equal. */
2973 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2974 return false;
2975
2976 /* Bitwise identity makes no sense if the values have different layouts. */
2977 if ((flags & OEP_BITWISE)
2978 && !tree_nop_conversion_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
2979 return false;
2980
2981 /* We cannot consider pointers to different address space equal. */
2982 if (POINTER_TYPE_P (TREE_TYPE (arg0))
2983 && POINTER_TYPE_P (TREE_TYPE (arg1))
2984 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2985 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2986 return false;
2987
2988 /* Check equality of integer constants before bailing out due to
2989 precision differences. */
2990 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2991 {
2992 /* Address of INTEGER_CST is not defined; check that we did not forget
2993 to drop the OEP_ADDRESS_OF flags. */
2994 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2995 return tree_int_cst_equal (arg0, arg1);
2996 }
2997
2998 if (!(flags & OEP_ADDRESS_OF))
2999 {
3000 /* If both types don't have the same signedness, then we can't consider
3001 them equal. We must check this before the STRIP_NOPS calls
3002 because they may change the signedness of the arguments. As pointers
3003 strictly don't have a signedness, require either two pointers or
3004 two non-pointers as well. */
3005 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
3006 || POINTER_TYPE_P (TREE_TYPE (arg0))
3007 != POINTER_TYPE_P (TREE_TYPE (arg1)))
3008 return false;
3009
3010 /* If both types don't have the same precision, then it is not safe
3011 to strip NOPs. */
3012 if (element_precision (TREE_TYPE (arg0))
3013 != element_precision (TREE_TYPE (arg1)))
3014 return false;
3015
3016 STRIP_NOPS (arg0);
3017 STRIP_NOPS (arg1);
3018 }
3019 #if 0
3020 /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
3021 sanity check once the issue is solved. */
3022 else
3023 /* Addresses of conversions and SSA_NAMEs (and many other things)
3024 are not defined. Check that we did not forget to drop the
3025 OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
3026 gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
3027 && TREE_CODE (arg0) != SSA_NAME);
3028 #endif
3029
3030 /* In case both args are comparisons but with different comparison
3031 code, try to swap the comparison operands of one arg to produce
3032 a match and compare that variant. */
3033 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3034 && COMPARISON_CLASS_P (arg0)
3035 && COMPARISON_CLASS_P (arg1))
3036 {
3037 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3038
3039 if (TREE_CODE (arg0) == swap_code)
3040 return operand_equal_p (TREE_OPERAND (arg0, 0),
3041 TREE_OPERAND (arg1, 1), flags)
3042 && operand_equal_p (TREE_OPERAND (arg0, 1),
3043 TREE_OPERAND (arg1, 0), flags);
3044 }
3045
3046 if (TREE_CODE (arg0) != TREE_CODE (arg1))
3047 {
3048 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
3049 if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
3050 ;
3051 else if (flags & OEP_ADDRESS_OF)
3052 {
3053 /* If we are interested in comparing addresses ignore
3054 MEM_REF wrappings of the base that can appear just for
3055 TBAA reasons. */
3056 if (TREE_CODE (arg0) == MEM_REF
3057 && DECL_P (arg1)
3058 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
3059 && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
3060 && integer_zerop (TREE_OPERAND (arg0, 1)))
3061 return true;
3062 else if (TREE_CODE (arg1) == MEM_REF
3063 && DECL_P (arg0)
3064 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
3065 && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
3066 && integer_zerop (TREE_OPERAND (arg1, 1)))
3067 return true;
3068 return false;
3069 }
3070 else
3071 return false;
3072 }
3073
3074 /* When not checking adddresses, this is needed for conversions and for
3075 COMPONENT_REF. Might as well play it safe and always test this. */
3076 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3077 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3078 || (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))
3079 && !(flags & OEP_ADDRESS_OF)))
3080 return false;
3081
3082 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3083 We don't care about side effects in that case because the SAVE_EXPR
3084 takes care of that for us. In all other cases, two expressions are
3085 equal if they have no side effects. If we have two identical
3086 expressions with side effects that should be treated the same due
3087 to the only side effects being identical SAVE_EXPR's, that will
3088 be detected in the recursive calls below.
3089 If we are taking an invariant address of two identical objects
3090 they are necessarily equal as well. */
3091 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3092 && (TREE_CODE (arg0) == SAVE_EXPR
3093 || (flags & OEP_MATCH_SIDE_EFFECTS)
3094 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3095 return true;
3096
3097 /* Next handle constant cases, those for which we can return 1 even
3098 if ONLY_CONST is set. */
3099 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3100 switch (TREE_CODE (arg0))
3101 {
3102 case INTEGER_CST:
3103 return tree_int_cst_equal (arg0, arg1);
3104
3105 case FIXED_CST:
3106 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3107 TREE_FIXED_CST (arg1));
3108
3109 case REAL_CST:
3110 if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
3111 return true;
3112
3113 if (!(flags & OEP_BITWISE) && !HONOR_SIGNED_ZEROS (arg0))
3114 {
3115 /* If we do not distinguish between signed and unsigned zero,
3116 consider them equal. */
3117 if (real_zerop (arg0) && real_zerop (arg1))
3118 return true;
3119 }
3120 return false;
3121
3122 case VECTOR_CST:
3123 {
3124 if (VECTOR_CST_LOG2_NPATTERNS (arg0)
3125 != VECTOR_CST_LOG2_NPATTERNS (arg1))
3126 return false;
3127
3128 if (VECTOR_CST_NELTS_PER_PATTERN (arg0)
3129 != VECTOR_CST_NELTS_PER_PATTERN (arg1))
3130 return false;
3131
3132 unsigned int count = vector_cst_encoded_nelts (arg0);
3133 for (unsigned int i = 0; i < count; ++i)
3134 if (!operand_equal_p (VECTOR_CST_ENCODED_ELT (arg0, i),
3135 VECTOR_CST_ENCODED_ELT (arg1, i), flags))
3136 return false;
3137 return true;
3138 }
3139
3140 case COMPLEX_CST:
3141 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3142 flags)
3143 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3144 flags));
3145
3146 case STRING_CST:
3147 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3148 && ! memcmp (TREE_STRING_POINTER (arg0),
3149 TREE_STRING_POINTER (arg1),
3150 TREE_STRING_LENGTH (arg0)));
3151
3152 case ADDR_EXPR:
3153 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3154 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3155 flags | OEP_ADDRESS_OF
3156 | OEP_MATCH_SIDE_EFFECTS);
3157 case CONSTRUCTOR:
3158 /* In GIMPLE empty constructors are allowed in initializers of
3159 aggregates. */
3160 return !CONSTRUCTOR_NELTS (arg0) && !CONSTRUCTOR_NELTS (arg1);
3161 default:
3162 break;
3163 }
3164
3165 /* Don't handle more cases for OEP_BITWISE, since we can't guarantee that
3166 two instances of undefined behavior will give identical results. */
3167 if (flags & (OEP_ONLY_CONST | OEP_BITWISE))
3168 return false;
3169
3170 /* Define macros to test an operand from arg0 and arg1 for equality and a
3171 variant that allows null and views null as being different from any
3172 non-null value. In the latter case, if either is null, the both
3173 must be; otherwise, do the normal comparison. */
3174 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3175 TREE_OPERAND (arg1, N), flags)
3176
3177 #define OP_SAME_WITH_NULL(N) \
3178 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3179 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3180
3181 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3182 {
3183 case tcc_unary:
3184 /* Two conversions are equal only if signedness and modes match. */
3185 switch (TREE_CODE (arg0))
3186 {
3187 CASE_CONVERT:
3188 case FIX_TRUNC_EXPR:
3189 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3190 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3191 return false;
3192 break;
3193 default:
3194 break;
3195 }
3196
3197 return OP_SAME (0);
3198
3199
3200 case tcc_comparison:
3201 case tcc_binary:
3202 if (OP_SAME (0) && OP_SAME (1))
3203 return true;
3204
3205 /* For commutative ops, allow the other order. */
3206 return (commutative_tree_code (TREE_CODE (arg0))
3207 && operand_equal_p (TREE_OPERAND (arg0, 0),
3208 TREE_OPERAND (arg1, 1), flags)
3209 && operand_equal_p (TREE_OPERAND (arg0, 1),
3210 TREE_OPERAND (arg1, 0), flags));
3211
3212 case tcc_reference:
3213 /* If either of the pointer (or reference) expressions we are
3214 dereferencing contain a side effect, these cannot be equal,
3215 but their addresses can be. */
3216 if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0
3217 && (TREE_SIDE_EFFECTS (arg0)
3218 || TREE_SIDE_EFFECTS (arg1)))
3219 return false;
3220
3221 switch (TREE_CODE (arg0))
3222 {
3223 case INDIRECT_REF:
3224 if (!(flags & OEP_ADDRESS_OF))
3225 {
3226 if (TYPE_ALIGN (TREE_TYPE (arg0))
3227 != TYPE_ALIGN (TREE_TYPE (arg1)))
3228 return false;
3229 /* Verify that the access types are compatible. */
3230 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg0))
3231 != TYPE_MAIN_VARIANT (TREE_TYPE (arg1)))
3232 return false;
3233 }
3234 flags &= ~OEP_ADDRESS_OF;
3235 return OP_SAME (0);
3236
3237 case IMAGPART_EXPR:
3238 /* Require the same offset. */
3239 if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3240 TYPE_SIZE (TREE_TYPE (arg1)),
3241 flags & ~OEP_ADDRESS_OF))
3242 return false;
3243
3244 /* Fallthru. */
3245 case REALPART_EXPR:
3246 case VIEW_CONVERT_EXPR:
3247 return OP_SAME (0);
3248
3249 case TARGET_MEM_REF:
3250 case MEM_REF:
3251 if (!(flags & OEP_ADDRESS_OF))
3252 {
3253 /* Require equal access sizes */
3254 if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1))
3255 && (!TYPE_SIZE (TREE_TYPE (arg0))
3256 || !TYPE_SIZE (TREE_TYPE (arg1))
3257 || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3258 TYPE_SIZE (TREE_TYPE (arg1)),
3259 flags)))
3260 return false;
3261 /* Verify that access happens in similar types. */
3262 if (!types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3263 return false;
3264 /* Verify that accesses are TBAA compatible. */
3265 if (!alias_ptr_types_compatible_p
3266 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
3267 TREE_TYPE (TREE_OPERAND (arg1, 1)))
3268 || (MR_DEPENDENCE_CLIQUE (arg0)
3269 != MR_DEPENDENCE_CLIQUE (arg1))
3270 || (MR_DEPENDENCE_BASE (arg0)
3271 != MR_DEPENDENCE_BASE (arg1)))
3272 return false;
3273 /* Verify that alignment is compatible. */
3274 if (TYPE_ALIGN (TREE_TYPE (arg0))
3275 != TYPE_ALIGN (TREE_TYPE (arg1)))
3276 return false;
3277 }
3278 flags &= ~OEP_ADDRESS_OF;
3279 return (OP_SAME (0) && OP_SAME (1)
3280 /* TARGET_MEM_REF require equal extra operands. */
3281 && (TREE_CODE (arg0) != TARGET_MEM_REF
3282 || (OP_SAME_WITH_NULL (2)
3283 && OP_SAME_WITH_NULL (3)
3284 && OP_SAME_WITH_NULL (4))));
3285
3286 case ARRAY_REF:
3287 case ARRAY_RANGE_REF:
3288 if (!OP_SAME (0))
3289 return false;
3290 flags &= ~OEP_ADDRESS_OF;
3291 /* Compare the array index by value if it is constant first as we
3292 may have different types but same value here. */
3293 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3294 TREE_OPERAND (arg1, 1))
3295 || OP_SAME (1))
3296 && OP_SAME_WITH_NULL (2)
3297 && OP_SAME_WITH_NULL (3)
3298 /* Compare low bound and element size as with OEP_ADDRESS_OF
3299 we have to account for the offset of the ref. */
3300 && (TREE_TYPE (TREE_OPERAND (arg0, 0))
3301 == TREE_TYPE (TREE_OPERAND (arg1, 0))
3302 || (operand_equal_p (array_ref_low_bound
3303 (CONST_CAST_TREE (arg0)),
3304 array_ref_low_bound
3305 (CONST_CAST_TREE (arg1)), flags)
3306 && operand_equal_p (array_ref_element_size
3307 (CONST_CAST_TREE (arg0)),
3308 array_ref_element_size
3309 (CONST_CAST_TREE (arg1)),
3310 flags))));
3311
3312 case COMPONENT_REF:
3313 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3314 may be NULL when we're called to compare MEM_EXPRs. */
3315 if (!OP_SAME_WITH_NULL (0)
3316 || !OP_SAME (1))
3317 return false;
3318 flags &= ~OEP_ADDRESS_OF;
3319 return OP_SAME_WITH_NULL (2);
3320
3321 case BIT_FIELD_REF:
3322 if (!OP_SAME (0))
3323 return false;
3324 flags &= ~OEP_ADDRESS_OF;
3325 return OP_SAME (1) && OP_SAME (2);
3326
3327 /* Virtual table call. */
3328 case OBJ_TYPE_REF:
3329 {
3330 if (!operand_equal_p (OBJ_TYPE_REF_EXPR (arg0),
3331 OBJ_TYPE_REF_EXPR (arg1), flags))
3332 return false;
3333 if (tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg0))
3334 != tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg1)))
3335 return false;
3336 if (!operand_equal_p (OBJ_TYPE_REF_OBJECT (arg0),
3337 OBJ_TYPE_REF_OBJECT (arg1), flags))
3338 return false;
3339 if (!types_same_for_odr (obj_type_ref_class (arg0),
3340 obj_type_ref_class (arg1)))
3341 return false;
3342 return true;
3343 }
3344
3345 default:
3346 return false;
3347 }
3348
3349 case tcc_expression:
3350 switch (TREE_CODE (arg0))
3351 {
3352 case ADDR_EXPR:
3353 /* Be sure we pass right ADDRESS_OF flag. */
3354 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3355 return operand_equal_p (TREE_OPERAND (arg0, 0),
3356 TREE_OPERAND (arg1, 0),
3357 flags | OEP_ADDRESS_OF);
3358
3359 case TRUTH_NOT_EXPR:
3360 return OP_SAME (0);
3361
3362 case TRUTH_ANDIF_EXPR:
3363 case TRUTH_ORIF_EXPR:
3364 return OP_SAME (0) && OP_SAME (1);
3365
3366 case WIDEN_MULT_PLUS_EXPR:
3367 case WIDEN_MULT_MINUS_EXPR:
3368 if (!OP_SAME (2))
3369 return false;
3370 /* The multiplcation operands are commutative. */
3371 /* FALLTHRU */
3372
3373 case TRUTH_AND_EXPR:
3374 case TRUTH_OR_EXPR:
3375 case TRUTH_XOR_EXPR:
3376 if (OP_SAME (0) && OP_SAME (1))
3377 return true;
3378
3379 /* Otherwise take into account this is a commutative operation. */
3380 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3381 TREE_OPERAND (arg1, 1), flags)
3382 && operand_equal_p (TREE_OPERAND (arg0, 1),
3383 TREE_OPERAND (arg1, 0), flags));
3384
3385 case COND_EXPR:
3386 if (! OP_SAME (1) || ! OP_SAME_WITH_NULL (2))
3387 return false;
3388 flags &= ~OEP_ADDRESS_OF;
3389 return OP_SAME (0);
3390
3391 case BIT_INSERT_EXPR:
3392 /* BIT_INSERT_EXPR has an implict operand as the type precision
3393 of op1. Need to check to make sure they are the same. */
3394 if (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
3395 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
3396 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 1)))
3397 != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 1))))
3398 return false;
3399 /* FALLTHRU */
3400
3401 case VEC_COND_EXPR:
3402 case DOT_PROD_EXPR:
3403 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3404
3405 case MODIFY_EXPR:
3406 case INIT_EXPR:
3407 case COMPOUND_EXPR:
3408 case PREDECREMENT_EXPR:
3409 case PREINCREMENT_EXPR:
3410 case POSTDECREMENT_EXPR:
3411 case POSTINCREMENT_EXPR:
3412 if (flags & OEP_LEXICOGRAPHIC)
3413 return OP_SAME (0) && OP_SAME (1);
3414 return false;
3415
3416 case CLEANUP_POINT_EXPR:
3417 case EXPR_STMT:
3418 case SAVE_EXPR:
3419 if (flags & OEP_LEXICOGRAPHIC)
3420 return OP_SAME (0);
3421 return false;
3422
3423 default:
3424 return false;
3425 }
3426
3427 case tcc_vl_exp:
3428 switch (TREE_CODE (arg0))
3429 {
3430 case CALL_EXPR:
3431 if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3432 != (CALL_EXPR_FN (arg1) == NULL_TREE))
3433 /* If not both CALL_EXPRs are either internal or normal function
3434 functions, then they are not equal. */
3435 return false;
3436 else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3437 {
3438 /* If the CALL_EXPRs call different internal functions, then they
3439 are not equal. */
3440 if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3441 return false;
3442 }
3443 else
3444 {
3445 /* If the CALL_EXPRs call different functions, then they are not
3446 equal. */
3447 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3448 flags))
3449 return false;
3450 }
3451
3452 /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */
3453 {
3454 unsigned int cef = call_expr_flags (arg0);
3455 if (flags & OEP_PURE_SAME)
3456 cef &= ECF_CONST | ECF_PURE;
3457 else
3458 cef &= ECF_CONST;
3459 if (!cef && !(flags & OEP_LEXICOGRAPHIC))
3460 return false;
3461 }
3462
3463 /* Now see if all the arguments are the same. */
3464 {
3465 const_call_expr_arg_iterator iter0, iter1;
3466 const_tree a0, a1;
3467 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3468 a1 = first_const_call_expr_arg (arg1, &iter1);
3469 a0 && a1;
3470 a0 = next_const_call_expr_arg (&iter0),
3471 a1 = next_const_call_expr_arg (&iter1))
3472 if (! operand_equal_p (a0, a1, flags))
3473 return false;
3474
3475 /* If we get here and both argument lists are exhausted
3476 then the CALL_EXPRs are equal. */
3477 return ! (a0 || a1);
3478 }
3479 default:
3480 return false;
3481 }
3482
3483 case tcc_declaration:
3484 /* Consider __builtin_sqrt equal to sqrt. */
3485 return (TREE_CODE (arg0) == FUNCTION_DECL
3486 && fndecl_built_in_p (arg0) && fndecl_built_in_p (arg1)
3487 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3488 && (DECL_UNCHECKED_FUNCTION_CODE (arg0)
3489 == DECL_UNCHECKED_FUNCTION_CODE (arg1)));
3490
3491 case tcc_exceptional:
3492 if (TREE_CODE (arg0) == CONSTRUCTOR)
3493 {
3494 if (CONSTRUCTOR_NO_CLEARING (arg0) != CONSTRUCTOR_NO_CLEARING (arg1))
3495 return false;
3496
3497 /* In GIMPLE constructors are used only to build vectors from
3498 elements. Individual elements in the constructor must be
3499 indexed in increasing order and form an initial sequence.
3500
3501 We make no effort to compare constructors in generic.
3502 (see sem_variable::equals in ipa-icf which can do so for
3503 constants). */
3504 if (!VECTOR_TYPE_P (TREE_TYPE (arg0))
3505 || !VECTOR_TYPE_P (TREE_TYPE (arg1)))
3506 return false;
3507
3508 /* Be sure that vectors constructed have the same representation.
3509 We only tested element precision and modes to match.
3510 Vectors may be BLKmode and thus also check that the number of
3511 parts match. */
3512 if (maybe_ne (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)),
3513 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1))))
3514 return false;
3515
3516 vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3517 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3518 unsigned int len = vec_safe_length (v0);
3519
3520 if (len != vec_safe_length (v1))
3521 return false;
3522
3523 for (unsigned int i = 0; i < len; i++)
3524 {
3525 constructor_elt *c0 = &(*v0)[i];
3526 constructor_elt *c1 = &(*v1)[i];
3527
3528 if (!operand_equal_p (c0->value, c1->value, flags)
3529 /* In GIMPLE the indexes can be either NULL or matching i.
3530 Double check this so we won't get false
3531 positives for GENERIC. */
3532 || (c0->index
3533 && (TREE_CODE (c0->index) != INTEGER_CST
3534 || compare_tree_int (c0->index, i)))
3535 || (c1->index
3536 && (TREE_CODE (c1->index) != INTEGER_CST
3537 || compare_tree_int (c1->index, i))))
3538 return false;
3539 }
3540 return true;
3541 }
3542 else if (TREE_CODE (arg0) == STATEMENT_LIST
3543 && (flags & OEP_LEXICOGRAPHIC))
3544 {
3545 /* Compare the STATEMENT_LISTs. */
3546 tree_stmt_iterator tsi1, tsi2;
3547 tree body1 = CONST_CAST_TREE (arg0);
3548 tree body2 = CONST_CAST_TREE (arg1);
3549 for (tsi1 = tsi_start (body1), tsi2 = tsi_start (body2); ;
3550 tsi_next (&tsi1), tsi_next (&tsi2))
3551 {
3552 /* The lists don't have the same number of statements. */
3553 if (tsi_end_p (tsi1) ^ tsi_end_p (tsi2))
3554 return false;
3555 if (tsi_end_p (tsi1) && tsi_end_p (tsi2))
3556 return true;
3557 if (!operand_equal_p (tsi_stmt (tsi1), tsi_stmt (tsi2),
3558 flags & (OEP_LEXICOGRAPHIC
3559 | OEP_NO_HASH_CHECK)))
3560 return false;
3561 }
3562 }
3563 return false;
3564
3565 case tcc_statement:
3566 switch (TREE_CODE (arg0))
3567 {
3568 case RETURN_EXPR:
3569 if (flags & OEP_LEXICOGRAPHIC)
3570 return OP_SAME_WITH_NULL (0);
3571 return false;
3572 case DEBUG_BEGIN_STMT:
3573 if (flags & OEP_LEXICOGRAPHIC)
3574 return true;
3575 return false;
3576 default:
3577 return false;
3578 }
3579
3580 default:
3581 return false;
3582 }
3583
3584 #undef OP_SAME
3585 #undef OP_SAME_WITH_NULL
3586 }
3587
3588 /* Generate a hash value for an expression. This can be used iteratively
3589 by passing a previous result as the HSTATE argument. */
3590
3591 void
3592 operand_compare::hash_operand (const_tree t, inchash::hash &hstate,
3593 unsigned int flags)
3594 {
3595 int i;
3596 enum tree_code code;
3597 enum tree_code_class tclass;
3598
3599 if (t == NULL_TREE || t == error_mark_node)
3600 {
3601 hstate.merge_hash (0);
3602 return;
3603 }
3604
3605 STRIP_ANY_LOCATION_WRAPPER (t);
3606
3607 if (!(flags & OEP_ADDRESS_OF))
3608 STRIP_NOPS (t);
3609
3610 code = TREE_CODE (t);
3611
3612 switch (code)
3613 {
3614 /* Alas, constants aren't shared, so we can't rely on pointer
3615 identity. */
3616 case VOID_CST:
3617 hstate.merge_hash (0);
3618 return;
3619 case INTEGER_CST:
3620 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3621 for (i = 0; i < TREE_INT_CST_EXT_NUNITS (t); i++)
3622 hstate.add_hwi (TREE_INT_CST_ELT (t, i));
3623 return;
3624 case REAL_CST:
3625 {
3626 unsigned int val2;
3627 if (!HONOR_SIGNED_ZEROS (t) && real_zerop (t))
3628 val2 = rvc_zero;
3629 else
3630 val2 = real_hash (TREE_REAL_CST_PTR (t));
3631 hstate.merge_hash (val2);
3632 return;
3633 }
3634 case FIXED_CST:
3635 {
3636 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
3637 hstate.merge_hash (val2);
3638 return;
3639 }
3640 case STRING_CST:
3641 hstate.add ((const void *) TREE_STRING_POINTER (t),
3642 TREE_STRING_LENGTH (t));
3643 return;
3644 case COMPLEX_CST:
3645 hash_operand (TREE_REALPART (t), hstate, flags);
3646 hash_operand (TREE_IMAGPART (t), hstate, flags);
3647 return;
3648 case VECTOR_CST:
3649 {
3650 hstate.add_int (VECTOR_CST_NPATTERNS (t));
3651 hstate.add_int (VECTOR_CST_NELTS_PER_PATTERN (t));
3652 unsigned int count = vector_cst_encoded_nelts (t);
3653 for (unsigned int i = 0; i < count; ++i)
3654 hash_operand (VECTOR_CST_ENCODED_ELT (t, i), hstate, flags);
3655 return;
3656 }
3657 case SSA_NAME:
3658 /* We can just compare by pointer. */
3659 hstate.add_hwi (SSA_NAME_VERSION (t));
3660 return;
3661 case PLACEHOLDER_EXPR:
3662 /* The node itself doesn't matter. */
3663 return;
3664 case BLOCK:
3665 case OMP_CLAUSE:
3666 /* Ignore. */
3667 return;
3668 case TREE_LIST:
3669 /* A list of expressions, for a CALL_EXPR or as the elements of a
3670 VECTOR_CST. */
3671 for (; t; t = TREE_CHAIN (t))
3672 hash_operand (TREE_VALUE (t), hstate, flags);
3673 return;
3674 case CONSTRUCTOR:
3675 {
3676 unsigned HOST_WIDE_INT idx;
3677 tree field, value;
3678 flags &= ~OEP_ADDRESS_OF;
3679 hstate.add_int (CONSTRUCTOR_NO_CLEARING (t));
3680 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
3681 {
3682 /* In GIMPLE the indexes can be either NULL or matching i. */
3683 if (field == NULL_TREE)
3684 field = bitsize_int (idx);
3685 hash_operand (field, hstate, flags);
3686 hash_operand (value, hstate, flags);
3687 }
3688 return;
3689 }
3690 case STATEMENT_LIST:
3691 {
3692 tree_stmt_iterator i;
3693 for (i = tsi_start (CONST_CAST_TREE (t));
3694 !tsi_end_p (i); tsi_next (&i))
3695 hash_operand (tsi_stmt (i), hstate, flags);
3696 return;
3697 }
3698 case TREE_VEC:
3699 for (i = 0; i < TREE_VEC_LENGTH (t); ++i)
3700 hash_operand (TREE_VEC_ELT (t, i), hstate, flags);
3701 return;
3702 case IDENTIFIER_NODE:
3703 hstate.add_object (IDENTIFIER_HASH_VALUE (t));
3704 return;
3705 case FUNCTION_DECL:
3706 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
3707 Otherwise nodes that compare equal according to operand_equal_p might
3708 get different hash codes. However, don't do this for machine specific
3709 or front end builtins, since the function code is overloaded in those
3710 cases. */
3711 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
3712 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
3713 {
3714 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
3715 code = TREE_CODE (t);
3716 }
3717 /* FALL THROUGH */
3718 default:
3719 if (POLY_INT_CST_P (t))
3720 {
3721 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
3722 hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
3723 return;
3724 }
3725 tclass = TREE_CODE_CLASS (code);
3726
3727 if (tclass == tcc_declaration)
3728 {
3729 /* DECL's have a unique ID */
3730 hstate.add_hwi (DECL_UID (t));
3731 }
3732 else if (tclass == tcc_comparison && !commutative_tree_code (code))
3733 {
3734 /* For comparisons that can be swapped, use the lower
3735 tree code. */
3736 enum tree_code ccode = swap_tree_comparison (code);
3737 if (code < ccode)
3738 ccode = code;
3739 hstate.add_object (ccode);
3740 hash_operand (TREE_OPERAND (t, ccode != code), hstate, flags);
3741 hash_operand (TREE_OPERAND (t, ccode == code), hstate, flags);
3742 }
3743 else if (CONVERT_EXPR_CODE_P (code))
3744 {
3745 /* NOP_EXPR and CONVERT_EXPR are considered equal by
3746 operand_equal_p. */
3747 enum tree_code ccode = NOP_EXPR;
3748 hstate.add_object (ccode);
3749
3750 /* Don't hash the type, that can lead to having nodes which
3751 compare equal according to operand_equal_p, but which
3752 have different hash codes. Make sure to include signedness
3753 in the hash computation. */
3754 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
3755 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
3756 }
3757 /* For OEP_ADDRESS_OF, hash MEM_EXPR[&decl, 0] the same as decl. */
3758 else if (code == MEM_REF
3759 && (flags & OEP_ADDRESS_OF) != 0
3760 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
3761 && DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
3762 && integer_zerop (TREE_OPERAND (t, 1)))
3763 hash_operand (TREE_OPERAND (TREE_OPERAND (t, 0), 0),
3764 hstate, flags);
3765 /* Don't ICE on FE specific trees, or their arguments etc.
3766 during operand_equal_p hash verification. */
3767 else if (!IS_EXPR_CODE_CLASS (tclass))
3768 gcc_assert (flags & OEP_HASH_CHECK);
3769 else
3770 {
3771 unsigned int sflags = flags;
3772
3773 hstate.add_object (code);
3774
3775 switch (code)
3776 {
3777 case ADDR_EXPR:
3778 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3779 flags |= OEP_ADDRESS_OF;
3780 sflags = flags;
3781 break;
3782
3783 case INDIRECT_REF:
3784 case MEM_REF:
3785 case TARGET_MEM_REF:
3786 flags &= ~OEP_ADDRESS_OF;
3787 sflags = flags;
3788 break;
3789
3790 case ARRAY_REF:
3791 case ARRAY_RANGE_REF:
3792 case COMPONENT_REF:
3793 case BIT_FIELD_REF:
3794 sflags &= ~OEP_ADDRESS_OF;
3795 break;
3796
3797 case COND_EXPR:
3798 flags &= ~OEP_ADDRESS_OF;
3799 break;
3800
3801 case WIDEN_MULT_PLUS_EXPR:
3802 case WIDEN_MULT_MINUS_EXPR:
3803 {
3804 /* The multiplication operands are commutative. */
3805 inchash::hash one, two;
3806 hash_operand (TREE_OPERAND (t, 0), one, flags);
3807 hash_operand (TREE_OPERAND (t, 1), two, flags);
3808 hstate.add_commutative (one, two);
3809 hash_operand (TREE_OPERAND (t, 2), two, flags);
3810 return;
3811 }
3812
3813 case CALL_EXPR:
3814 if (CALL_EXPR_FN (t) == NULL_TREE)
3815 hstate.add_int (CALL_EXPR_IFN (t));
3816 break;
3817
3818 case TARGET_EXPR:
3819 /* For TARGET_EXPR, just hash on the TARGET_EXPR_SLOT.
3820 Usually different TARGET_EXPRs just should use
3821 different temporaries in their slots. */
3822 hash_operand (TARGET_EXPR_SLOT (t), hstate, flags);
3823 return;
3824
3825 /* Virtual table call. */
3826 case OBJ_TYPE_REF:
3827 inchash::add_expr (OBJ_TYPE_REF_EXPR (t), hstate, flags);
3828 inchash::add_expr (OBJ_TYPE_REF_TOKEN (t), hstate, flags);
3829 inchash::add_expr (OBJ_TYPE_REF_OBJECT (t), hstate, flags);
3830 return;
3831 default:
3832 break;
3833 }
3834
3835 /* Don't hash the type, that can lead to having nodes which
3836 compare equal according to operand_equal_p, but which
3837 have different hash codes. */
3838 if (code == NON_LVALUE_EXPR)
3839 {
3840 /* Make sure to include signness in the hash computation. */
3841 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
3842 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
3843 }
3844
3845 else if (commutative_tree_code (code))
3846 {
3847 /* It's a commutative expression. We want to hash it the same
3848 however it appears. We do this by first hashing both operands
3849 and then rehashing based on the order of their independent
3850 hashes. */
3851 inchash::hash one, two;
3852 hash_operand (TREE_OPERAND (t, 0), one, flags);
3853 hash_operand (TREE_OPERAND (t, 1), two, flags);
3854 hstate.add_commutative (one, two);
3855 }
3856 else
3857 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
3858 hash_operand (TREE_OPERAND (t, i), hstate,
3859 i == 0 ? flags : sflags);
3860 }
3861 return;
3862 }
3863 }
3864
3865 bool
3866 operand_compare::verify_hash_value (const_tree arg0, const_tree arg1,
3867 unsigned int flags, bool *ret)
3868 {
3869 /* When checking, verify at the outermost operand_equal_p call that
3870 if operand_equal_p returns non-zero then ARG0 and ARG1 has the same
3871 hash value. */
3872 if (flag_checking && !(flags & OEP_NO_HASH_CHECK))
3873 {
3874 if (operand_equal_p (arg0, arg1, flags | OEP_NO_HASH_CHECK))
3875 {
3876 if (arg0 != arg1)
3877 {
3878 inchash::hash hstate0 (0), hstate1 (0);
3879 hash_operand (arg0, hstate0, flags | OEP_HASH_CHECK);
3880 hash_operand (arg1, hstate1, flags | OEP_HASH_CHECK);
3881 hashval_t h0 = hstate0.end ();
3882 hashval_t h1 = hstate1.end ();
3883 gcc_assert (h0 == h1);
3884 }
3885 *ret = true;
3886 }
3887 else
3888 *ret = false;
3889
3890 return true;
3891 }
3892
3893 return false;
3894 }
3895
3896
3897 static operand_compare default_compare_instance;
3898
3899 /* Conveinece wrapper around operand_compare class because usually we do
3900 not need to play with the valueizer. */
3901
3902 bool
3903 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
3904 {
3905 return default_compare_instance.operand_equal_p (arg0, arg1, flags);
3906 }
3907
3908 namespace inchash
3909 {
3910
3911 /* Generate a hash value for an expression. This can be used iteratively
3912 by passing a previous result as the HSTATE argument.
3913
3914 This function is intended to produce the same hash for expressions which
3915 would compare equal using operand_equal_p. */
3916 void
3917 add_expr (const_tree t, inchash::hash &hstate, unsigned int flags)
3918 {
3919 default_compare_instance.hash_operand (t, hstate, flags);
3920 }
3921
3922 }
3923 \f
3924 /* Similar to operand_equal_p, but see if ARG0 might be a variant of ARG1
3925 with a different signedness or a narrower precision. */
3926
3927 static bool
3928 operand_equal_for_comparison_p (tree arg0, tree arg1)
3929 {
3930 if (operand_equal_p (arg0, arg1, 0))
3931 return true;
3932
3933 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3934 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3935 return false;
3936
3937 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3938 and see if the inner values are the same. This removes any
3939 signedness comparison, which doesn't matter here. */
3940 tree op0 = arg0;
3941 tree op1 = arg1;
3942 STRIP_NOPS (op0);
3943 STRIP_NOPS (op1);
3944 if (operand_equal_p (op0, op1, 0))
3945 return true;
3946
3947 /* Discard a single widening conversion from ARG1 and see if the inner
3948 value is the same as ARG0. */
3949 if (CONVERT_EXPR_P (arg1)
3950 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0)))
3951 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))
3952 < TYPE_PRECISION (TREE_TYPE (arg1))
3953 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
3954 return true;
3955
3956 return false;
3957 }
3958 \f
3959 /* See if ARG is an expression that is either a comparison or is performing
3960 arithmetic on comparisons. The comparisons must only be comparing
3961 two different values, which will be stored in *CVAL1 and *CVAL2; if
3962 they are nonzero it means that some operands have already been found.
3963 No variables may be used anywhere else in the expression except in the
3964 comparisons.
3965
3966 If this is true, return 1. Otherwise, return zero. */
3967
3968 static bool
3969 twoval_comparison_p (tree arg, tree *cval1, tree *cval2)
3970 {
3971 enum tree_code code = TREE_CODE (arg);
3972 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3973
3974 /* We can handle some of the tcc_expression cases here. */
3975 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3976 tclass = tcc_unary;
3977 else if (tclass == tcc_expression
3978 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3979 || code == COMPOUND_EXPR))
3980 tclass = tcc_binary;
3981
3982 switch (tclass)
3983 {
3984 case tcc_unary:
3985 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2);
3986
3987 case tcc_binary:
3988 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
3989 && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2));
3990
3991 case tcc_constant:
3992 return true;
3993
3994 case tcc_expression:
3995 if (code == COND_EXPR)
3996 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
3997 && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2)
3998 && twoval_comparison_p (TREE_OPERAND (arg, 2), cval1, cval2));
3999 return false;
4000
4001 case tcc_comparison:
4002 /* First see if we can handle the first operand, then the second. For
4003 the second operand, we know *CVAL1 can't be zero. It must be that
4004 one side of the comparison is each of the values; test for the
4005 case where this isn't true by failing if the two operands
4006 are the same. */
4007
4008 if (operand_equal_p (TREE_OPERAND (arg, 0),
4009 TREE_OPERAND (arg, 1), 0))
4010 return false;
4011
4012 if (*cval1 == 0)
4013 *cval1 = TREE_OPERAND (arg, 0);
4014 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
4015 ;
4016 else if (*cval2 == 0)
4017 *cval2 = TREE_OPERAND (arg, 0);
4018 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
4019 ;
4020 else
4021 return false;
4022
4023 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
4024 ;
4025 else if (*cval2 == 0)
4026 *cval2 = TREE_OPERAND (arg, 1);
4027 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
4028 ;
4029 else
4030 return false;
4031
4032 return true;
4033
4034 default:
4035 return false;
4036 }
4037 }
4038 \f
4039 /* ARG is a tree that is known to contain just arithmetic operations and
4040 comparisons. Evaluate the operations in the tree substituting NEW0 for
4041 any occurrence of OLD0 as an operand of a comparison and likewise for
4042 NEW1 and OLD1. */
4043
4044 static tree
4045 eval_subst (location_t loc, tree arg, tree old0, tree new0,
4046 tree old1, tree new1)
4047 {
4048 tree type = TREE_TYPE (arg);
4049 enum tree_code code = TREE_CODE (arg);
4050 enum tree_code_class tclass = TREE_CODE_CLASS (code);
4051
4052 /* We can handle some of the tcc_expression cases here. */
4053 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
4054 tclass = tcc_unary;
4055 else if (tclass == tcc_expression
4056 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
4057 tclass = tcc_binary;
4058
4059 switch (tclass)
4060 {
4061 case tcc_unary:
4062 return fold_build1_loc (loc, code, type,
4063 eval_subst (loc, TREE_OPERAND (arg, 0),
4064 old0, new0, old1, new1));
4065
4066 case tcc_binary:
4067 return fold_build2_loc (loc, code, type,
4068 eval_subst (loc, TREE_OPERAND (arg, 0),
4069 old0, new0, old1, new1),
4070 eval_subst (loc, TREE_OPERAND (arg, 1),
4071 old0, new0, old1, new1));
4072
4073 case tcc_expression:
4074 switch (code)
4075 {
4076 case SAVE_EXPR:
4077 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
4078 old1, new1);
4079
4080 case COMPOUND_EXPR:
4081 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
4082 old1, new1);
4083
4084 case COND_EXPR:
4085 return fold_build3_loc (loc, code, type,
4086 eval_subst (loc, TREE_OPERAND (arg, 0),
4087 old0, new0, old1, new1),
4088 eval_subst (loc, TREE_OPERAND (arg, 1),
4089 old0, new0, old1, new1),
4090 eval_subst (loc, TREE_OPERAND (arg, 2),
4091 old0, new0, old1, new1));
4092 default:
4093 break;
4094 }
4095 /* Fall through - ??? */
4096
4097 case tcc_comparison:
4098 {
4099 tree arg0 = TREE_OPERAND (arg, 0);
4100 tree arg1 = TREE_OPERAND (arg, 1);
4101
4102 /* We need to check both for exact equality and tree equality. The
4103 former will be true if the operand has a side-effect. In that
4104 case, we know the operand occurred exactly once. */
4105
4106 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
4107 arg0 = new0;
4108 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
4109 arg0 = new1;
4110
4111 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
4112 arg1 = new0;
4113 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
4114 arg1 = new1;
4115
4116 return fold_build2_loc (loc, code, type, arg0, arg1);
4117 }
4118
4119 default:
4120 return arg;
4121 }
4122 }
4123 \f
4124 /* Return a tree for the case when the result of an expression is RESULT
4125 converted to TYPE and OMITTED was previously an operand of the expression
4126 but is now not needed (e.g., we folded OMITTED * 0).
4127
4128 If OMITTED has side effects, we must evaluate it. Otherwise, just do
4129 the conversion of RESULT to TYPE. */
4130
4131 tree
4132 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
4133 {
4134 tree t = fold_convert_loc (loc, type, result);
4135
4136 /* If the resulting operand is an empty statement, just return the omitted
4137 statement casted to void. */
4138 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
4139 return build1_loc (loc, NOP_EXPR, void_type_node,
4140 fold_ignored_result (omitted));
4141
4142 if (TREE_SIDE_EFFECTS (omitted))
4143 return build2_loc (loc, COMPOUND_EXPR, type,
4144 fold_ignored_result (omitted), t);
4145
4146 return non_lvalue_loc (loc, t);
4147 }
4148
4149 /* Return a tree for the case when the result of an expression is RESULT
4150 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
4151 of the expression but are now not needed.
4152
4153 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
4154 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
4155 evaluated before OMITTED2. Otherwise, if neither has side effects,
4156 just do the conversion of RESULT to TYPE. */
4157
4158 tree
4159 omit_two_operands_loc (location_t loc, tree type, tree result,
4160 tree omitted1, tree omitted2)
4161 {
4162 tree t = fold_convert_loc (loc, type, result);
4163
4164 if (TREE_SIDE_EFFECTS (omitted2))
4165 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
4166 if (TREE_SIDE_EFFECTS (omitted1))
4167 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
4168
4169 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
4170 }
4171
4172 \f
4173 /* Return a simplified tree node for the truth-negation of ARG. This
4174 never alters ARG itself. We assume that ARG is an operation that
4175 returns a truth value (0 or 1).
4176
4177 FIXME: one would think we would fold the result, but it causes
4178 problems with the dominator optimizer. */
4179
4180 static tree
4181 fold_truth_not_expr (location_t loc, tree arg)
4182 {
4183 tree type = TREE_TYPE (arg);
4184 enum tree_code code = TREE_CODE (arg);
4185 location_t loc1, loc2;
4186
4187 /* If this is a comparison, we can simply invert it, except for
4188 floating-point non-equality comparisons, in which case we just
4189 enclose a TRUTH_NOT_EXPR around what we have. */
4190
4191 if (TREE_CODE_CLASS (code) == tcc_comparison)
4192 {
4193 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
4194 if (FLOAT_TYPE_P (op_type)
4195 && flag_trapping_math
4196 && code != ORDERED_EXPR && code != UNORDERED_EXPR
4197 && code != NE_EXPR && code != EQ_EXPR)
4198 return NULL_TREE;
4199
4200 code = invert_tree_comparison (code, HONOR_NANS (op_type));
4201 if (code == ERROR_MARK)
4202 return NULL_TREE;
4203
4204 tree ret = build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
4205 TREE_OPERAND (arg, 1));
4206 if (TREE_NO_WARNING (arg))
4207 TREE_NO_WARNING (ret) = 1;
4208 return ret;
4209 }
4210
4211 switch (code)
4212 {
4213 case INTEGER_CST:
4214 return constant_boolean_node (integer_zerop (arg), type);
4215
4216 case TRUTH_AND_EXPR:
4217 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4218 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4219 return build2_loc (loc, TRUTH_OR_EXPR, type,
4220 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4221 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4222
4223 case TRUTH_OR_EXPR:
4224 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4225 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4226 return build2_loc (loc, TRUTH_AND_EXPR, type,
4227 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4228 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4229
4230 case TRUTH_XOR_EXPR:
4231 /* Here we can invert either operand. We invert the first operand
4232 unless the second operand is a TRUTH_NOT_EXPR in which case our
4233 result is the XOR of the first operand with the inside of the
4234 negation of the second operand. */
4235
4236 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
4237 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
4238 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
4239 else
4240 return build2_loc (loc, TRUTH_XOR_EXPR, type,
4241 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
4242 TREE_OPERAND (arg, 1));
4243
4244 case TRUTH_ANDIF_EXPR:
4245 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4246 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4247 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
4248 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4249 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4250
4251 case TRUTH_ORIF_EXPR:
4252 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4253 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4254 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
4255 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4256 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4257
4258 case TRUTH_NOT_EXPR:
4259 return TREE_OPERAND (arg, 0);
4260
4261 case COND_EXPR:
4262 {
4263 tree arg1 = TREE_OPERAND (arg, 1);
4264 tree arg2 = TREE_OPERAND (arg, 2);
4265
4266 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4267 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
4268
4269 /* A COND_EXPR may have a throw as one operand, which
4270 then has void type. Just leave void operands
4271 as they are. */
4272 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
4273 VOID_TYPE_P (TREE_TYPE (arg1))
4274 ? arg1 : invert_truthvalue_loc (loc1, arg1),
4275 VOID_TYPE_P (TREE_TYPE (arg2))
4276 ? arg2 : invert_truthvalue_loc (loc2, arg2));
4277 }
4278
4279 case COMPOUND_EXPR:
4280 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4281 return build2_loc (loc, COMPOUND_EXPR, type,
4282 TREE_OPERAND (arg, 0),
4283 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
4284
4285 case NON_LVALUE_EXPR:
4286 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4287 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
4288
4289 CASE_CONVERT:
4290 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
4291 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
4292
4293 /* fall through */
4294
4295 case FLOAT_EXPR:
4296 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4297 return build1_loc (loc, TREE_CODE (arg), type,
4298 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
4299
4300 case BIT_AND_EXPR:
4301 if (!integer_onep (TREE_OPERAND (arg, 1)))
4302 return NULL_TREE;
4303 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
4304
4305 case SAVE_EXPR:
4306 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
4307
4308 case CLEANUP_POINT_EXPR:
4309 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4310 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
4311 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
4312
4313 default:
4314 return NULL_TREE;
4315 }
4316 }
4317
4318 /* Fold the truth-negation of ARG. This never alters ARG itself. We
4319 assume that ARG is an operation that returns a truth value (0 or 1
4320 for scalars, 0 or -1 for vectors). Return the folded expression if
4321 folding is successful. Otherwise, return NULL_TREE. */
4322
4323 static tree
4324 fold_invert_truthvalue (location_t loc, tree arg)
4325 {
4326 tree type = TREE_TYPE (arg);
4327 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
4328 ? BIT_NOT_EXPR
4329 : TRUTH_NOT_EXPR,
4330 type, arg);
4331 }
4332
4333 /* Return a simplified tree node for the truth-negation of ARG. This
4334 never alters ARG itself. We assume that ARG is an operation that
4335 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
4336
4337 tree
4338 invert_truthvalue_loc (location_t loc, tree arg)
4339 {
4340 if (TREE_CODE (arg) == ERROR_MARK)
4341 return arg;
4342
4343 tree type = TREE_TYPE (arg);
4344 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
4345 ? BIT_NOT_EXPR
4346 : TRUTH_NOT_EXPR,
4347 type, arg);
4348 }
4349 \f
4350 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
4351 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero
4352 and uses reverse storage order if REVERSEP is nonzero. ORIG_INNER
4353 is the original memory reference used to preserve the alias set of
4354 the access. */
4355
4356 static tree
4357 make_bit_field_ref (location_t loc, tree inner, tree orig_inner, tree type,
4358 HOST_WIDE_INT bitsize, poly_int64 bitpos,
4359 int unsignedp, int reversep)
4360 {
4361 tree result, bftype;
4362
4363 /* Attempt not to lose the access path if possible. */
4364 if (TREE_CODE (orig_inner) == COMPONENT_REF)
4365 {
4366 tree ninner = TREE_OPERAND (orig_inner, 0);
4367 machine_mode nmode;
4368 poly_int64 nbitsize, nbitpos;
4369 tree noffset;
4370 int nunsignedp, nreversep, nvolatilep = 0;
4371 tree base = get_inner_reference (ninner, &nbitsize, &nbitpos,
4372 &noffset, &nmode, &nunsignedp,
4373 &nreversep, &nvolatilep);
4374 if (base == inner
4375 && noffset == NULL_TREE
4376 && known_subrange_p (bitpos, bitsize, nbitpos, nbitsize)
4377 && !reversep
4378 && !nreversep
4379 && !nvolatilep)
4380 {
4381 inner = ninner;
4382 bitpos -= nbitpos;
4383 }
4384 }
4385
4386 alias_set_type iset = get_alias_set (orig_inner);
4387 if (iset == 0 && get_alias_set (inner) != iset)
4388 inner = fold_build2 (MEM_REF, TREE_TYPE (inner),
4389 build_fold_addr_expr (inner),
4390 build_int_cst (ptr_type_node, 0));
4391
4392 if (known_eq (bitpos, 0) && !reversep)
4393 {
4394 tree size = TYPE_SIZE (TREE_TYPE (inner));
4395 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
4396 || POINTER_TYPE_P (TREE_TYPE (inner)))
4397 && tree_fits_shwi_p (size)
4398 && tree_to_shwi (size) == bitsize)
4399 return fold_convert_loc (loc, type, inner);
4400 }
4401
4402 bftype = type;
4403 if (TYPE_PRECISION (bftype) != bitsize
4404 || TYPE_UNSIGNED (bftype) == !unsignedp)
4405 bftype = build_nonstandard_integer_type (bitsize, 0);
4406
4407 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
4408 bitsize_int (bitsize), bitsize_int (bitpos));
4409 REF_REVERSE_STORAGE_ORDER (result) = reversep;
4410
4411 if (bftype != type)
4412 result = fold_convert_loc (loc, type, result);
4413
4414 return result;
4415 }
4416
4417 /* Optimize a bit-field compare.
4418
4419 There are two cases: First is a compare against a constant and the
4420 second is a comparison of two items where the fields are at the same
4421 bit position relative to the start of a chunk (byte, halfword, word)
4422 large enough to contain it. In these cases we can avoid the shift
4423 implicit in bitfield extractions.
4424
4425 For constants, we emit a compare of the shifted constant with the
4426 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
4427 compared. For two fields at the same position, we do the ANDs with the
4428 similar mask and compare the result of the ANDs.
4429
4430 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
4431 COMPARE_TYPE is the type of the comparison, and LHS and RHS
4432 are the left and right operands of the comparison, respectively.
4433
4434 If the optimization described above can be done, we return the resulting
4435 tree. Otherwise we return zero. */
4436
4437 static tree
4438 optimize_bit_field_compare (location_t loc, enum tree_code code,
4439 tree compare_type, tree lhs, tree rhs)
4440 {
4441 poly_int64 plbitpos, plbitsize, rbitpos, rbitsize;
4442 HOST_WIDE_INT lbitpos, lbitsize, nbitpos, nbitsize;
4443 tree type = TREE_TYPE (lhs);
4444 tree unsigned_type;
4445 int const_p = TREE_CODE (rhs) == INTEGER_CST;
4446 machine_mode lmode, rmode;
4447 scalar_int_mode nmode;
4448 int lunsignedp, runsignedp;
4449 int lreversep, rreversep;
4450 int lvolatilep = 0, rvolatilep = 0;
4451 tree linner, rinner = NULL_TREE;
4452 tree mask;
4453 tree offset;
4454
4455 /* Get all the information about the extractions being done. If the bit size
4456 is the same as the size of the underlying object, we aren't doing an
4457 extraction at all and so can do nothing. We also don't want to
4458 do anything if the inner expression is a PLACEHOLDER_EXPR since we
4459 then will no longer be able to replace it. */
4460 linner = get_inner_reference (lhs, &plbitsize, &plbitpos, &offset, &lmode,
4461 &lunsignedp, &lreversep, &lvolatilep);
4462 if (linner == lhs
4463 || !known_size_p (plbitsize)
4464 || !plbitsize.is_constant (&lbitsize)
4465 || !plbitpos.is_constant (&lbitpos)
4466 || known_eq (lbitsize, GET_MODE_BITSIZE (lmode))
4467 || offset != 0
4468 || TREE_CODE (linner) == PLACEHOLDER_EXPR
4469 || lvolatilep)
4470 return 0;
4471
4472 if (const_p)
4473 rreversep = lreversep;
4474 else
4475 {
4476 /* If this is not a constant, we can only do something if bit positions,
4477 sizes, signedness and storage order are the same. */
4478 rinner
4479 = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
4480 &runsignedp, &rreversep, &rvolatilep);
4481
4482 if (rinner == rhs
4483 || maybe_ne (lbitpos, rbitpos)
4484 || maybe_ne (lbitsize, rbitsize)
4485 || lunsignedp != runsignedp
4486 || lreversep != rreversep
4487 || offset != 0
4488 || TREE_CODE (rinner) == PLACEHOLDER_EXPR
4489 || rvolatilep)
4490 return 0;
4491 }
4492
4493 /* Honor the C++ memory model and mimic what RTL expansion does. */
4494 poly_uint64 bitstart = 0;
4495 poly_uint64 bitend = 0;
4496 if (TREE_CODE (lhs) == COMPONENT_REF)
4497 {
4498 get_bit_range (&bitstart, &bitend, lhs, &plbitpos, &offset);
4499 if (!plbitpos.is_constant (&lbitpos) || offset != NULL_TREE)
4500 return 0;
4501 }
4502
4503 /* See if we can find a mode to refer to this field. We should be able to,
4504 but fail if we can't. */
4505 if (!get_best_mode (lbitsize, lbitpos, bitstart, bitend,
4506 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
4507 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
4508 TYPE_ALIGN (TREE_TYPE (rinner))),
4509 BITS_PER_WORD, false, &nmode))
4510 return 0;
4511
4512 /* Set signed and unsigned types of the precision of this mode for the
4513 shifts below. */
4514 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
4515
4516 /* Compute the bit position and size for the new reference and our offset
4517 within it. If the new reference is the same size as the original, we
4518 won't optimize anything, so return zero. */
4519 nbitsize = GET_MODE_BITSIZE (nmode);
4520 nbitpos = lbitpos & ~ (nbitsize - 1);
4521 lbitpos -= nbitpos;
4522 if (nbitsize == lbitsize)
4523 return 0;
4524
4525 if (lreversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
4526 lbitpos = nbitsize - lbitsize - lbitpos;
4527
4528 /* Make the mask to be used against the extracted field. */
4529 mask = build_int_cst_type (unsigned_type, -1);
4530 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
4531 mask = const_binop (RSHIFT_EXPR, mask,
4532 size_int (nbitsize - lbitsize - lbitpos));
4533
4534 if (! const_p)
4535 {
4536 if (nbitpos < 0)
4537 return 0;
4538
4539 /* If not comparing with constant, just rework the comparison
4540 and return. */
4541 tree t1 = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4542 nbitsize, nbitpos, 1, lreversep);
4543 t1 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t1, mask);
4544 tree t2 = make_bit_field_ref (loc, rinner, rhs, unsigned_type,
4545 nbitsize, nbitpos, 1, rreversep);
4546 t2 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t2, mask);
4547 return fold_build2_loc (loc, code, compare_type, t1, t2);
4548 }
4549
4550 /* Otherwise, we are handling the constant case. See if the constant is too
4551 big for the field. Warn and return a tree for 0 (false) if so. We do
4552 this not only for its own sake, but to avoid having to test for this
4553 error case below. If we didn't, we might generate wrong code.
4554
4555 For unsigned fields, the constant shifted right by the field length should
4556 be all zero. For signed fields, the high-order bits should agree with
4557 the sign bit. */
4558
4559 if (lunsignedp)
4560 {
4561 if (wi::lrshift (wi::to_wide (rhs), lbitsize) != 0)
4562 {
4563 warning (0, "comparison is always %d due to width of bit-field",
4564 code == NE_EXPR);
4565 return constant_boolean_node (code == NE_EXPR, compare_type);
4566 }
4567 }
4568 else
4569 {
4570 wide_int tem = wi::arshift (wi::to_wide (rhs), lbitsize - 1);
4571 if (tem != 0 && tem != -1)
4572 {
4573 warning (0, "comparison is always %d due to width of bit-field",
4574 code == NE_EXPR);
4575 return constant_boolean_node (code == NE_EXPR, compare_type);
4576 }
4577 }
4578
4579 if (nbitpos < 0)
4580 return 0;
4581
4582 /* Single-bit compares should always be against zero. */
4583 if (lbitsize == 1 && ! integer_zerop (rhs))
4584 {
4585 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4586 rhs = build_int_cst (type, 0);
4587 }
4588
4589 /* Make a new bitfield reference, shift the constant over the
4590 appropriate number of bits and mask it with the computed mask
4591 (in case this was a signed field). If we changed it, make a new one. */
4592 lhs = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4593 nbitsize, nbitpos, 1, lreversep);
4594
4595 rhs = const_binop (BIT_AND_EXPR,
4596 const_binop (LSHIFT_EXPR,
4597 fold_convert_loc (loc, unsigned_type, rhs),
4598 size_int (lbitpos)),
4599 mask);
4600
4601 lhs = build2_loc (loc, code, compare_type,
4602 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
4603 return lhs;
4604 }
4605 \f
4606 /* Subroutine for fold_truth_andor_1: decode a field reference.
4607
4608 If EXP is a comparison reference, we return the innermost reference.
4609
4610 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4611 set to the starting bit number.
4612
4613 If the innermost field can be completely contained in a mode-sized
4614 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4615
4616 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4617 otherwise it is not changed.
4618
4619 *PUNSIGNEDP is set to the signedness of the field.
4620
4621 *PREVERSEP is set to the storage order of the field.
4622
4623 *PMASK is set to the mask used. This is either contained in a
4624 BIT_AND_EXPR or derived from the width of the field.
4625
4626 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4627
4628 Return 0 if this is not a component reference or is one that we can't
4629 do anything with. */
4630
4631 static tree
4632 decode_field_reference (location_t loc, tree *exp_, HOST_WIDE_INT *pbitsize,
4633 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
4634 int *punsignedp, int *preversep, int *pvolatilep,
4635 tree *pmask, tree *pand_mask)
4636 {
4637 tree exp = *exp_;
4638 tree outer_type = 0;
4639 tree and_mask = 0;
4640 tree mask, inner, offset;
4641 tree unsigned_type;
4642 unsigned int precision;
4643
4644 /* All the optimizations using this function assume integer fields.
4645 There are problems with FP fields since the type_for_size call
4646 below can fail for, e.g., XFmode. */
4647 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4648 return NULL_TREE;
4649
4650 /* We are interested in the bare arrangement of bits, so strip everything
4651 that doesn't affect the machine mode. However, record the type of the
4652 outermost expression if it may matter below. */
4653 if (CONVERT_EXPR_P (exp)
4654 || TREE_CODE (exp) == NON_LVALUE_EXPR)
4655 outer_type = TREE_TYPE (exp);
4656 STRIP_NOPS (exp);
4657
4658 if (TREE_CODE (exp) == BIT_AND_EXPR)
4659 {
4660 and_mask = TREE_OPERAND (exp, 1);
4661 exp = TREE_OPERAND (exp, 0);
4662 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4663 if (TREE_CODE (and_mask) != INTEGER_CST)
4664 return NULL_TREE;
4665 }
4666
4667 poly_int64 poly_bitsize, poly_bitpos;
4668 inner = get_inner_reference (exp, &poly_bitsize, &poly_bitpos, &offset,
4669 pmode, punsignedp, preversep, pvolatilep);
4670 if ((inner == exp && and_mask == 0)
4671 || !poly_bitsize.is_constant (pbitsize)
4672 || !poly_bitpos.is_constant (pbitpos)
4673 || *pbitsize < 0
4674 || offset != 0
4675 || TREE_CODE (inner) == PLACEHOLDER_EXPR
4676 /* Reject out-of-bound accesses (PR79731). */
4677 || (! AGGREGATE_TYPE_P (TREE_TYPE (inner))
4678 && compare_tree_int (TYPE_SIZE (TREE_TYPE (inner)),
4679 *pbitpos + *pbitsize) < 0))
4680 return NULL_TREE;
4681
4682 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4683 if (unsigned_type == NULL_TREE)
4684 return NULL_TREE;
4685
4686 *exp_ = exp;
4687
4688 /* If the number of bits in the reference is the same as the bitsize of
4689 the outer type, then the outer type gives the signedness. Otherwise
4690 (in case of a small bitfield) the signedness is unchanged. */
4691 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4692 *punsignedp = TYPE_UNSIGNED (outer_type);
4693
4694 /* Compute the mask to access the bitfield. */
4695 precision = TYPE_PRECISION (unsigned_type);
4696
4697 mask = build_int_cst_type (unsigned_type, -1);
4698
4699 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4700 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4701
4702 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4703 if (and_mask != 0)
4704 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4705 fold_convert_loc (loc, unsigned_type, and_mask), mask);
4706
4707 *pmask = mask;
4708 *pand_mask = and_mask;
4709 return inner;
4710 }
4711
4712 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4713 bit positions and MASK is SIGNED. */
4714
4715 static bool
4716 all_ones_mask_p (const_tree mask, unsigned int size)
4717 {
4718 tree type = TREE_TYPE (mask);
4719 unsigned int precision = TYPE_PRECISION (type);
4720
4721 /* If this function returns true when the type of the mask is
4722 UNSIGNED, then there will be errors. In particular see
4723 gcc.c-torture/execute/990326-1.c. There does not appear to be
4724 any documentation paper trail as to why this is so. But the pre
4725 wide-int worked with that restriction and it has been preserved
4726 here. */
4727 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
4728 return false;
4729
4730 return wi::mask (size, false, precision) == wi::to_wide (mask);
4731 }
4732
4733 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4734 represents the sign bit of EXP's type. If EXP represents a sign
4735 or zero extension, also test VAL against the unextended type.
4736 The return value is the (sub)expression whose sign bit is VAL,
4737 or NULL_TREE otherwise. */
4738
4739 tree
4740 sign_bit_p (tree exp, const_tree val)
4741 {
4742 int width;
4743 tree t;
4744
4745 /* Tree EXP must have an integral type. */
4746 t = TREE_TYPE (exp);
4747 if (! INTEGRAL_TYPE_P (t))
4748 return NULL_TREE;
4749
4750 /* Tree VAL must be an integer constant. */
4751 if (TREE_CODE (val) != INTEGER_CST
4752 || TREE_OVERFLOW (val))
4753 return NULL_TREE;
4754
4755 width = TYPE_PRECISION (t);
4756 if (wi::only_sign_bit_p (wi::to_wide (val), width))
4757 return exp;
4758
4759 /* Handle extension from a narrower type. */
4760 if (TREE_CODE (exp) == NOP_EXPR
4761 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4762 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4763
4764 return NULL_TREE;
4765 }
4766
4767 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
4768 to be evaluated unconditionally. */
4769
4770 static bool
4771 simple_operand_p (const_tree exp)
4772 {
4773 /* Strip any conversions that don't change the machine mode. */
4774 STRIP_NOPS (exp);
4775
4776 return (CONSTANT_CLASS_P (exp)
4777 || TREE_CODE (exp) == SSA_NAME
4778 || (DECL_P (exp)
4779 && ! TREE_ADDRESSABLE (exp)
4780 && ! TREE_THIS_VOLATILE (exp)
4781 && ! DECL_NONLOCAL (exp)
4782 /* Don't regard global variables as simple. They may be
4783 allocated in ways unknown to the compiler (shared memory,
4784 #pragma weak, etc). */
4785 && ! TREE_PUBLIC (exp)
4786 && ! DECL_EXTERNAL (exp)
4787 /* Weakrefs are not safe to be read, since they can be NULL.
4788 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4789 have DECL_WEAK flag set. */
4790 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4791 /* Loading a static variable is unduly expensive, but global
4792 registers aren't expensive. */
4793 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4794 }
4795
4796 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4797 to be evaluated unconditionally.
4798 I addition to simple_operand_p, we assume that comparisons, conversions,
4799 and logic-not operations are simple, if their operands are simple, too. */
4800
4801 static bool
4802 simple_operand_p_2 (tree exp)
4803 {
4804 enum tree_code code;
4805
4806 if (TREE_SIDE_EFFECTS (exp) || generic_expr_could_trap_p (exp))
4807 return false;
4808
4809 while (CONVERT_EXPR_P (exp))
4810 exp = TREE_OPERAND (exp, 0);
4811
4812 code = TREE_CODE (exp);
4813
4814 if (TREE_CODE_CLASS (code) == tcc_comparison)
4815 return (simple_operand_p (TREE_OPERAND (exp, 0))
4816 && simple_operand_p (TREE_OPERAND (exp, 1)));
4817
4818 if (code == TRUTH_NOT_EXPR)
4819 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4820
4821 return simple_operand_p (exp);
4822 }
4823
4824 \f
4825 /* The following functions are subroutines to fold_range_test and allow it to
4826 try to change a logical combination of comparisons into a range test.
4827
4828 For example, both
4829 X == 2 || X == 3 || X == 4 || X == 5
4830 and
4831 X >= 2 && X <= 5
4832 are converted to
4833 (unsigned) (X - 2) <= 3
4834
4835 We describe each set of comparisons as being either inside or outside
4836 a range, using a variable named like IN_P, and then describe the
4837 range with a lower and upper bound. If one of the bounds is omitted,
4838 it represents either the highest or lowest value of the type.
4839
4840 In the comments below, we represent a range by two numbers in brackets
4841 preceded by a "+" to designate being inside that range, or a "-" to
4842 designate being outside that range, so the condition can be inverted by
4843 flipping the prefix. An omitted bound is represented by a "-". For
4844 example, "- [-, 10]" means being outside the range starting at the lowest
4845 possible value and ending at 10, in other words, being greater than 10.
4846 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4847 always false.
4848
4849 We set up things so that the missing bounds are handled in a consistent
4850 manner so neither a missing bound nor "true" and "false" need to be
4851 handled using a special case. */
4852
4853 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4854 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4855 and UPPER1_P are nonzero if the respective argument is an upper bound
4856 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4857 must be specified for a comparison. ARG1 will be converted to ARG0's
4858 type if both are specified. */
4859
4860 static tree
4861 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4862 tree arg1, int upper1_p)
4863 {
4864 tree tem;
4865 int result;
4866 int sgn0, sgn1;
4867
4868 /* If neither arg represents infinity, do the normal operation.
4869 Else, if not a comparison, return infinity. Else handle the special
4870 comparison rules. Note that most of the cases below won't occur, but
4871 are handled for consistency. */
4872
4873 if (arg0 != 0 && arg1 != 0)
4874 {
4875 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4876 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4877 STRIP_NOPS (tem);
4878 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4879 }
4880
4881 if (TREE_CODE_CLASS (code) != tcc_comparison)
4882 return 0;
4883
4884 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4885 for neither. In real maths, we cannot assume open ended ranges are
4886 the same. But, this is computer arithmetic, where numbers are finite.
4887 We can therefore make the transformation of any unbounded range with
4888 the value Z, Z being greater than any representable number. This permits
4889 us to treat unbounded ranges as equal. */
4890 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4891 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4892 switch (code)
4893 {
4894 case EQ_EXPR:
4895 result = sgn0 == sgn1;
4896 break;
4897 case NE_EXPR:
4898 result = sgn0 != sgn1;
4899 break;
4900 case LT_EXPR:
4901 result = sgn0 < sgn1;
4902 break;
4903 case LE_EXPR:
4904 result = sgn0 <= sgn1;
4905 break;
4906 case GT_EXPR:
4907 result = sgn0 > sgn1;
4908 break;
4909 case GE_EXPR:
4910 result = sgn0 >= sgn1;
4911 break;
4912 default:
4913 gcc_unreachable ();
4914 }
4915
4916 return constant_boolean_node (result, type);
4917 }
4918 \f
4919 /* Helper routine for make_range. Perform one step for it, return
4920 new expression if the loop should continue or NULL_TREE if it should
4921 stop. */
4922
4923 tree
4924 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4925 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4926 bool *strict_overflow_p)
4927 {
4928 tree arg0_type = TREE_TYPE (arg0);
4929 tree n_low, n_high, low = *p_low, high = *p_high;
4930 int in_p = *p_in_p, n_in_p;
4931
4932 switch (code)
4933 {
4934 case TRUTH_NOT_EXPR:
4935 /* We can only do something if the range is testing for zero. */
4936 if (low == NULL_TREE || high == NULL_TREE
4937 || ! integer_zerop (low) || ! integer_zerop (high))
4938 return NULL_TREE;
4939 *p_in_p = ! in_p;
4940 return arg0;
4941
4942 case EQ_EXPR: case NE_EXPR:
4943 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4944 /* We can only do something if the range is testing for zero
4945 and if the second operand is an integer constant. Note that
4946 saying something is "in" the range we make is done by
4947 complementing IN_P since it will set in the initial case of
4948 being not equal to zero; "out" is leaving it alone. */
4949 if (low == NULL_TREE || high == NULL_TREE
4950 || ! integer_zerop (low) || ! integer_zerop (high)
4951 || TREE_CODE (arg1) != INTEGER_CST)
4952 return NULL_TREE;
4953
4954 switch (code)
4955 {
4956 case NE_EXPR: /* - [c, c] */
4957 low = high = arg1;
4958 break;
4959 case EQ_EXPR: /* + [c, c] */
4960 in_p = ! in_p, low = high = arg1;
4961 break;
4962 case GT_EXPR: /* - [-, c] */
4963 low = 0, high = arg1;
4964 break;
4965 case GE_EXPR: /* + [c, -] */
4966 in_p = ! in_p, low = arg1, high = 0;
4967 break;
4968 case LT_EXPR: /* - [c, -] */
4969 low = arg1, high = 0;
4970 break;
4971 case LE_EXPR: /* + [-, c] */
4972 in_p = ! in_p, low = 0, high = arg1;
4973 break;
4974 default:
4975 gcc_unreachable ();
4976 }
4977
4978 /* If this is an unsigned comparison, we also know that EXP is
4979 greater than or equal to zero. We base the range tests we make
4980 on that fact, so we record it here so we can parse existing
4981 range tests. We test arg0_type since often the return type
4982 of, e.g. EQ_EXPR, is boolean. */
4983 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4984 {
4985 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4986 in_p, low, high, 1,
4987 build_int_cst (arg0_type, 0),
4988 NULL_TREE))
4989 return NULL_TREE;
4990
4991 in_p = n_in_p, low = n_low, high = n_high;
4992
4993 /* If the high bound is missing, but we have a nonzero low
4994 bound, reverse the range so it goes from zero to the low bound
4995 minus 1. */
4996 if (high == 0 && low && ! integer_zerop (low))
4997 {
4998 in_p = ! in_p;
4999 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
5000 build_int_cst (TREE_TYPE (low), 1), 0);
5001 low = build_int_cst (arg0_type, 0);
5002 }
5003 }
5004
5005 *p_low = low;
5006 *p_high = high;
5007 *p_in_p = in_p;
5008 return arg0;
5009
5010 case NEGATE_EXPR:
5011 /* If flag_wrapv and ARG0_TYPE is signed, make sure
5012 low and high are non-NULL, then normalize will DTRT. */
5013 if (!TYPE_UNSIGNED (arg0_type)
5014 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
5015 {
5016 if (low == NULL_TREE)
5017 low = TYPE_MIN_VALUE (arg0_type);
5018 if (high == NULL_TREE)
5019 high = TYPE_MAX_VALUE (arg0_type);
5020 }
5021
5022 /* (-x) IN [a,b] -> x in [-b, -a] */
5023 n_low = range_binop (MINUS_EXPR, exp_type,
5024 build_int_cst (exp_type, 0),
5025 0, high, 1);
5026 n_high = range_binop (MINUS_EXPR, exp_type,
5027 build_int_cst (exp_type, 0),
5028 0, low, 0);
5029 if (n_high != 0 && TREE_OVERFLOW (n_high))
5030 return NULL_TREE;
5031 goto normalize;
5032
5033 case BIT_NOT_EXPR:
5034 /* ~ X -> -X - 1 */
5035 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
5036 build_int_cst (exp_type, 1));
5037
5038 case PLUS_EXPR:
5039 case MINUS_EXPR:
5040 if (TREE_CODE (arg1) != INTEGER_CST)
5041 return NULL_TREE;
5042
5043 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
5044 move a constant to the other side. */
5045 if (!TYPE_UNSIGNED (arg0_type)
5046 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
5047 return NULL_TREE;
5048
5049 /* If EXP is signed, any overflow in the computation is undefined,
5050 so we don't worry about it so long as our computations on
5051 the bounds don't overflow. For unsigned, overflow is defined
5052 and this is exactly the right thing. */
5053 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
5054 arg0_type, low, 0, arg1, 0);
5055 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
5056 arg0_type, high, 1, arg1, 0);
5057 if ((n_low != 0 && TREE_OVERFLOW (n_low))
5058 || (n_high != 0 && TREE_OVERFLOW (n_high)))
5059 return NULL_TREE;
5060
5061 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
5062 *strict_overflow_p = true;
5063
5064 normalize:
5065 /* Check for an unsigned range which has wrapped around the maximum
5066 value thus making n_high < n_low, and normalize it. */
5067 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
5068 {
5069 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
5070 build_int_cst (TREE_TYPE (n_high), 1), 0);
5071 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
5072 build_int_cst (TREE_TYPE (n_low), 1), 0);
5073
5074 /* If the range is of the form +/- [ x+1, x ], we won't
5075 be able to normalize it. But then, it represents the
5076 whole range or the empty set, so make it
5077 +/- [ -, - ]. */
5078 if (tree_int_cst_equal (n_low, low)
5079 && tree_int_cst_equal (n_high, high))
5080 low = high = 0;
5081 else
5082 in_p = ! in_p;
5083 }
5084 else
5085 low = n_low, high = n_high;
5086
5087 *p_low = low;
5088 *p_high = high;
5089 *p_in_p = in_p;
5090 return arg0;
5091
5092 CASE_CONVERT:
5093 case NON_LVALUE_EXPR:
5094 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
5095 return NULL_TREE;
5096
5097 if (! INTEGRAL_TYPE_P (arg0_type)
5098 || (low != 0 && ! int_fits_type_p (low, arg0_type))
5099 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
5100 return NULL_TREE;
5101
5102 n_low = low, n_high = high;
5103
5104 if (n_low != 0)
5105 n_low = fold_convert_loc (loc, arg0_type, n_low);
5106
5107 if (n_high != 0)
5108 n_high = fold_convert_loc (loc, arg0_type, n_high);
5109
5110 /* If we're converting arg0 from an unsigned type, to exp,
5111 a signed type, we will be doing the comparison as unsigned.
5112 The tests above have already verified that LOW and HIGH
5113 are both positive.
5114
5115 So we have to ensure that we will handle large unsigned
5116 values the same way that the current signed bounds treat
5117 negative values. */
5118
5119 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
5120 {
5121 tree high_positive;
5122 tree equiv_type;
5123 /* For fixed-point modes, we need to pass the saturating flag
5124 as the 2nd parameter. */
5125 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
5126 equiv_type
5127 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
5128 TYPE_SATURATING (arg0_type));
5129 else
5130 equiv_type
5131 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
5132
5133 /* A range without an upper bound is, naturally, unbounded.
5134 Since convert would have cropped a very large value, use
5135 the max value for the destination type. */
5136 high_positive
5137 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
5138 : TYPE_MAX_VALUE (arg0_type);
5139
5140 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
5141 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
5142 fold_convert_loc (loc, arg0_type,
5143 high_positive),
5144 build_int_cst (arg0_type, 1));
5145
5146 /* If the low bound is specified, "and" the range with the
5147 range for which the original unsigned value will be
5148 positive. */
5149 if (low != 0)
5150 {
5151 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
5152 1, fold_convert_loc (loc, arg0_type,
5153 integer_zero_node),
5154 high_positive))
5155 return NULL_TREE;
5156
5157 in_p = (n_in_p == in_p);
5158 }
5159 else
5160 {
5161 /* Otherwise, "or" the range with the range of the input
5162 that will be interpreted as negative. */
5163 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
5164 1, fold_convert_loc (loc, arg0_type,
5165 integer_zero_node),
5166 high_positive))
5167 return NULL_TREE;
5168
5169 in_p = (in_p != n_in_p);
5170 }
5171 }
5172
5173 *p_low = n_low;
5174 *p_high = n_high;
5175 *p_in_p = in_p;
5176 return arg0;
5177
5178 default:
5179 return NULL_TREE;
5180 }
5181 }
5182
5183 /* Given EXP, a logical expression, set the range it is testing into
5184 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
5185 actually being tested. *PLOW and *PHIGH will be made of the same
5186 type as the returned expression. If EXP is not a comparison, we
5187 will most likely not be returning a useful value and range. Set
5188 *STRICT_OVERFLOW_P to true if the return value is only valid
5189 because signed overflow is undefined; otherwise, do not change
5190 *STRICT_OVERFLOW_P. */
5191
5192 tree
5193 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
5194 bool *strict_overflow_p)
5195 {
5196 enum tree_code code;
5197 tree arg0, arg1 = NULL_TREE;
5198 tree exp_type, nexp;
5199 int in_p;
5200 tree low, high;
5201 location_t loc = EXPR_LOCATION (exp);
5202
5203 /* Start with simply saying "EXP != 0" and then look at the code of EXP
5204 and see if we can refine the range. Some of the cases below may not
5205 happen, but it doesn't seem worth worrying about this. We "continue"
5206 the outer loop when we've changed something; otherwise we "break"
5207 the switch, which will "break" the while. */
5208
5209 in_p = 0;
5210 low = high = build_int_cst (TREE_TYPE (exp), 0);
5211
5212 while (1)
5213 {
5214 code = TREE_CODE (exp);
5215 exp_type = TREE_TYPE (exp);
5216 arg0 = NULL_TREE;
5217
5218 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
5219 {
5220 if (TREE_OPERAND_LENGTH (exp) > 0)
5221 arg0 = TREE_OPERAND (exp, 0);
5222 if (TREE_CODE_CLASS (code) == tcc_binary
5223 || TREE_CODE_CLASS (code) == tcc_comparison
5224 || (TREE_CODE_CLASS (code) == tcc_expression
5225 && TREE_OPERAND_LENGTH (exp) > 1))
5226 arg1 = TREE_OPERAND (exp, 1);
5227 }
5228 if (arg0 == NULL_TREE)
5229 break;
5230
5231 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
5232 &high, &in_p, strict_overflow_p);
5233 if (nexp == NULL_TREE)
5234 break;
5235 exp = nexp;
5236 }
5237
5238 /* If EXP is a constant, we can evaluate whether this is true or false. */
5239 if (TREE_CODE (exp) == INTEGER_CST)
5240 {
5241 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
5242 exp, 0, low, 0))
5243 && integer_onep (range_binop (LE_EXPR, integer_type_node,
5244 exp, 1, high, 1)));
5245 low = high = 0;
5246 exp = 0;
5247 }
5248
5249 *pin_p = in_p, *plow = low, *phigh = high;
5250 return exp;
5251 }
5252
5253 /* Returns TRUE if [LOW, HIGH] range check can be optimized to
5254 a bitwise check i.e. when
5255 LOW == 0xXX...X00...0
5256 HIGH == 0xXX...X11...1
5257 Return corresponding mask in MASK and stem in VALUE. */
5258
5259 static bool
5260 maskable_range_p (const_tree low, const_tree high, tree type, tree *mask,
5261 tree *value)
5262 {
5263 if (TREE_CODE (low) != INTEGER_CST
5264 || TREE_CODE (high) != INTEGER_CST)
5265 return false;
5266
5267 unsigned prec = TYPE_PRECISION (type);
5268 wide_int lo = wi::to_wide (low, prec);
5269 wide_int hi = wi::to_wide (high, prec);
5270
5271 wide_int end_mask = lo ^ hi;
5272 if ((end_mask & (end_mask + 1)) != 0
5273 || (lo & end_mask) != 0)
5274 return false;
5275
5276 wide_int stem_mask = ~end_mask;
5277 wide_int stem = lo & stem_mask;
5278 if (stem != (hi & stem_mask))
5279 return false;
5280
5281 *mask = wide_int_to_tree (type, stem_mask);
5282 *value = wide_int_to_tree (type, stem);
5283
5284 return true;
5285 }
5286 \f
5287 /* Helper routine for build_range_check and match.pd. Return the type to
5288 perform the check or NULL if it shouldn't be optimized. */
5289
5290 tree
5291 range_check_type (tree etype)
5292 {
5293 /* First make sure that arithmetics in this type is valid, then make sure
5294 that it wraps around. */
5295 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
5296 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype), 1);
5297
5298 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_UNSIGNED (etype))
5299 {
5300 tree utype, minv, maxv;
5301
5302 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
5303 for the type in question, as we rely on this here. */
5304 utype = unsigned_type_for (etype);
5305 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
5306 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
5307 build_int_cst (TREE_TYPE (maxv), 1), 1);
5308 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
5309
5310 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
5311 minv, 1, maxv, 1)))
5312 etype = utype;
5313 else
5314 return NULL_TREE;
5315 }
5316 else if (POINTER_TYPE_P (etype))
5317 etype = unsigned_type_for (etype);
5318 return etype;
5319 }
5320
5321 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
5322 type, TYPE, return an expression to test if EXP is in (or out of, depending
5323 on IN_P) the range. Return 0 if the test couldn't be created. */
5324
5325 tree
5326 build_range_check (location_t loc, tree type, tree exp, int in_p,
5327 tree low, tree high)
5328 {
5329 tree etype = TREE_TYPE (exp), mask, value;
5330
5331 /* Disable this optimization for function pointer expressions
5332 on targets that require function pointer canonicalization. */
5333 if (targetm.have_canonicalize_funcptr_for_compare ()
5334 && POINTER_TYPE_P (etype)
5335 && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (etype)))
5336 return NULL_TREE;
5337
5338 if (! in_p)
5339 {
5340 value = build_range_check (loc, type, exp, 1, low, high);
5341 if (value != 0)
5342 return invert_truthvalue_loc (loc, value);
5343
5344 return 0;
5345 }
5346
5347 if (low == 0 && high == 0)
5348 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
5349
5350 if (low == 0)
5351 return fold_build2_loc (loc, LE_EXPR, type, exp,
5352 fold_convert_loc (loc, etype, high));
5353
5354 if (high == 0)
5355 return fold_build2_loc (loc, GE_EXPR, type, exp,
5356 fold_convert_loc (loc, etype, low));
5357
5358 if (operand_equal_p (low, high, 0))
5359 return fold_build2_loc (loc, EQ_EXPR, type, exp,
5360 fold_convert_loc (loc, etype, low));
5361
5362 if (TREE_CODE (exp) == BIT_AND_EXPR
5363 && maskable_range_p (low, high, etype, &mask, &value))
5364 return fold_build2_loc (loc, EQ_EXPR, type,
5365 fold_build2_loc (loc, BIT_AND_EXPR, etype,
5366 exp, mask),
5367 value);
5368
5369 if (integer_zerop (low))
5370 {
5371 if (! TYPE_UNSIGNED (etype))
5372 {
5373 etype = unsigned_type_for (etype);
5374 high = fold_convert_loc (loc, etype, high);
5375 exp = fold_convert_loc (loc, etype, exp);
5376 }
5377 return build_range_check (loc, type, exp, 1, 0, high);
5378 }
5379
5380 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
5381 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
5382 {
5383 int prec = TYPE_PRECISION (etype);
5384
5385 if (wi::mask <widest_int> (prec - 1, false) == wi::to_widest (high))
5386 {
5387 if (TYPE_UNSIGNED (etype))
5388 {
5389 tree signed_etype = signed_type_for (etype);
5390 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
5391 etype
5392 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
5393 else
5394 etype = signed_etype;
5395 exp = fold_convert_loc (loc, etype, exp);
5396 }
5397 return fold_build2_loc (loc, GT_EXPR, type, exp,
5398 build_int_cst (etype, 0));
5399 }
5400 }
5401
5402 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
5403 This requires wrap-around arithmetics for the type of the expression. */
5404 etype = range_check_type (etype);
5405 if (etype == NULL_TREE)
5406 return NULL_TREE;
5407
5408 high = fold_convert_loc (loc, etype, high);
5409 low = fold_convert_loc (loc, etype, low);
5410 exp = fold_convert_loc (loc, etype, exp);
5411
5412 value = const_binop (MINUS_EXPR, high, low);
5413
5414 if (value != 0 && !TREE_OVERFLOW (value))
5415 return build_range_check (loc, type,
5416 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
5417 1, build_int_cst (etype, 0), value);
5418
5419 return 0;
5420 }
5421 \f
5422 /* Return the predecessor of VAL in its type, handling the infinite case. */
5423
5424 static tree
5425 range_predecessor (tree val)
5426 {
5427 tree type = TREE_TYPE (val);
5428
5429 if (INTEGRAL_TYPE_P (type)
5430 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
5431 return 0;
5432 else
5433 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
5434 build_int_cst (TREE_TYPE (val), 1), 0);
5435 }
5436
5437 /* Return the successor of VAL in its type, handling the infinite case. */
5438
5439 static tree
5440 range_successor (tree val)
5441 {
5442 tree type = TREE_TYPE (val);
5443
5444 if (INTEGRAL_TYPE_P (type)
5445 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
5446 return 0;
5447 else
5448 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
5449 build_int_cst (TREE_TYPE (val), 1), 0);
5450 }
5451
5452 /* Given two ranges, see if we can merge them into one. Return 1 if we
5453 can, 0 if we can't. Set the output range into the specified parameters. */
5454
5455 bool
5456 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
5457 tree high0, int in1_p, tree low1, tree high1)
5458 {
5459 int no_overlap;
5460 int subset;
5461 int temp;
5462 tree tem;
5463 int in_p;
5464 tree low, high;
5465 int lowequal = ((low0 == 0 && low1 == 0)
5466 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5467 low0, 0, low1, 0)));
5468 int highequal = ((high0 == 0 && high1 == 0)
5469 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5470 high0, 1, high1, 1)));
5471
5472 /* Make range 0 be the range that starts first, or ends last if they
5473 start at the same value. Swap them if it isn't. */
5474 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
5475 low0, 0, low1, 0))
5476 || (lowequal
5477 && integer_onep (range_binop (GT_EXPR, integer_type_node,
5478 high1, 1, high0, 1))))
5479 {
5480 temp = in0_p, in0_p = in1_p, in1_p = temp;
5481 tem = low0, low0 = low1, low1 = tem;
5482 tem = high0, high0 = high1, high1 = tem;
5483 }
5484
5485 /* If the second range is != high1 where high1 is the type maximum of
5486 the type, try first merging with < high1 range. */
5487 if (low1
5488 && high1
5489 && TREE_CODE (low1) == INTEGER_CST
5490 && (TREE_CODE (TREE_TYPE (low1)) == INTEGER_TYPE
5491 || (TREE_CODE (TREE_TYPE (low1)) == ENUMERAL_TYPE
5492 && known_eq (TYPE_PRECISION (TREE_TYPE (low1)),
5493 GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low1))))))
5494 && operand_equal_p (low1, high1, 0))
5495 {
5496 if (tree_int_cst_equal (low1, TYPE_MAX_VALUE (TREE_TYPE (low1)))
5497 && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5498 !in1_p, NULL_TREE, range_predecessor (low1)))
5499 return true;
5500 /* Similarly for the second range != low1 where low1 is the type minimum
5501 of the type, try first merging with > low1 range. */
5502 if (tree_int_cst_equal (low1, TYPE_MIN_VALUE (TREE_TYPE (low1)))
5503 && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5504 !in1_p, range_successor (low1), NULL_TREE))
5505 return true;
5506 }
5507
5508 /* Now flag two cases, whether the ranges are disjoint or whether the
5509 second range is totally subsumed in the first. Note that the tests
5510 below are simplified by the ones above. */
5511 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
5512 high0, 1, low1, 0));
5513 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
5514 high1, 1, high0, 1));
5515
5516 /* We now have four cases, depending on whether we are including or
5517 excluding the two ranges. */
5518 if (in0_p && in1_p)
5519 {
5520 /* If they don't overlap, the result is false. If the second range
5521 is a subset it is the result. Otherwise, the range is from the start
5522 of the second to the end of the first. */
5523 if (no_overlap)
5524 in_p = 0, low = high = 0;
5525 else if (subset)
5526 in_p = 1, low = low1, high = high1;
5527 else
5528 in_p = 1, low = low1, high = high0;
5529 }
5530
5531 else if (in0_p && ! in1_p)
5532 {
5533 /* If they don't overlap, the result is the first range. If they are
5534 equal, the result is false. If the second range is a subset of the
5535 first, and the ranges begin at the same place, we go from just after
5536 the end of the second range to the end of the first. If the second
5537 range is not a subset of the first, or if it is a subset and both
5538 ranges end at the same place, the range starts at the start of the
5539 first range and ends just before the second range.
5540 Otherwise, we can't describe this as a single range. */
5541 if (no_overlap)
5542 in_p = 1, low = low0, high = high0;
5543 else if (lowequal && highequal)
5544 in_p = 0, low = high = 0;
5545 else if (subset && lowequal)
5546 {
5547 low = range_successor (high1);
5548 high = high0;
5549 in_p = 1;
5550 if (low == 0)
5551 {
5552 /* We are in the weird situation where high0 > high1 but
5553 high1 has no successor. Punt. */
5554 return 0;
5555 }
5556 }
5557 else if (! subset || highequal)
5558 {
5559 low = low0;
5560 high = range_predecessor (low1);
5561 in_p = 1;
5562 if (high == 0)
5563 {
5564 /* low0 < low1 but low1 has no predecessor. Punt. */
5565 return 0;
5566 }
5567 }
5568 else
5569 return 0;
5570 }
5571
5572 else if (! in0_p && in1_p)
5573 {
5574 /* If they don't overlap, the result is the second range. If the second
5575 is a subset of the first, the result is false. Otherwise,
5576 the range starts just after the first range and ends at the
5577 end of the second. */
5578 if (no_overlap)
5579 in_p = 1, low = low1, high = high1;
5580 else if (subset || highequal)
5581 in_p = 0, low = high = 0;
5582 else
5583 {
5584 low = range_successor (high0);
5585 high = high1;
5586 in_p = 1;
5587 if (low == 0)
5588 {
5589 /* high1 > high0 but high0 has no successor. Punt. */
5590 return 0;
5591 }
5592 }
5593 }
5594
5595 else
5596 {
5597 /* The case where we are excluding both ranges. Here the complex case
5598 is if they don't overlap. In that case, the only time we have a
5599 range is if they are adjacent. If the second is a subset of the
5600 first, the result is the first. Otherwise, the range to exclude
5601 starts at the beginning of the first range and ends at the end of the
5602 second. */
5603 if (no_overlap)
5604 {
5605 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
5606 range_successor (high0),
5607 1, low1, 0)))
5608 in_p = 0, low = low0, high = high1;
5609 else
5610 {
5611 /* Canonicalize - [min, x] into - [-, x]. */
5612 if (low0 && TREE_CODE (low0) == INTEGER_CST)
5613 switch (TREE_CODE (TREE_TYPE (low0)))
5614 {
5615 case ENUMERAL_TYPE:
5616 if (maybe_ne (TYPE_PRECISION (TREE_TYPE (low0)),
5617 GET_MODE_BITSIZE
5618 (TYPE_MODE (TREE_TYPE (low0)))))
5619 break;
5620 /* FALLTHROUGH */
5621 case INTEGER_TYPE:
5622 if (tree_int_cst_equal (low0,
5623 TYPE_MIN_VALUE (TREE_TYPE (low0))))
5624 low0 = 0;
5625 break;
5626 case POINTER_TYPE:
5627 if (TYPE_UNSIGNED (TREE_TYPE (low0))
5628 && integer_zerop (low0))
5629 low0 = 0;
5630 break;
5631 default:
5632 break;
5633 }
5634
5635 /* Canonicalize - [x, max] into - [x, -]. */
5636 if (high1 && TREE_CODE (high1) == INTEGER_CST)
5637 switch (TREE_CODE (TREE_TYPE (high1)))
5638 {
5639 case ENUMERAL_TYPE:
5640 if (maybe_ne (TYPE_PRECISION (TREE_TYPE (high1)),
5641 GET_MODE_BITSIZE
5642 (TYPE_MODE (TREE_TYPE (high1)))))
5643 break;
5644 /* FALLTHROUGH */
5645 case INTEGER_TYPE:
5646 if (tree_int_cst_equal (high1,
5647 TYPE_MAX_VALUE (TREE_TYPE (high1))))
5648 high1 = 0;
5649 break;
5650 case POINTER_TYPE:
5651 if (TYPE_UNSIGNED (TREE_TYPE (high1))
5652 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
5653 high1, 1,
5654 build_int_cst (TREE_TYPE (high1), 1),
5655 1)))
5656 high1 = 0;
5657 break;
5658 default:
5659 break;
5660 }
5661
5662 /* The ranges might be also adjacent between the maximum and
5663 minimum values of the given type. For
5664 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5665 return + [x + 1, y - 1]. */
5666 if (low0 == 0 && high1 == 0)
5667 {
5668 low = range_successor (high0);
5669 high = range_predecessor (low1);
5670 if (low == 0 || high == 0)
5671 return 0;
5672
5673 in_p = 1;
5674 }
5675 else
5676 return 0;
5677 }
5678 }
5679 else if (subset)
5680 in_p = 0, low = low0, high = high0;
5681 else
5682 in_p = 0, low = low0, high = high1;
5683 }
5684
5685 *pin_p = in_p, *plow = low, *phigh = high;
5686 return 1;
5687 }
5688 \f
5689
5690 /* Subroutine of fold, looking inside expressions of the form
5691 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
5692 of the COND_EXPR. This function is being used also to optimize
5693 A op B ? C : A, by reversing the comparison first.
5694
5695 Return a folded expression whose code is not a COND_EXPR
5696 anymore, or NULL_TREE if no folding opportunity is found. */
5697
5698 static tree
5699 fold_cond_expr_with_comparison (location_t loc, tree type,
5700 tree arg0, tree arg1, tree arg2)
5701 {
5702 enum tree_code comp_code = TREE_CODE (arg0);
5703 tree arg00 = TREE_OPERAND (arg0, 0);
5704 tree arg01 = TREE_OPERAND (arg0, 1);
5705 tree arg1_type = TREE_TYPE (arg1);
5706 tree tem;
5707
5708 STRIP_NOPS (arg1);
5709 STRIP_NOPS (arg2);
5710
5711 /* If we have A op 0 ? A : -A, consider applying the following
5712 transformations:
5713
5714 A == 0? A : -A same as -A
5715 A != 0? A : -A same as A
5716 A >= 0? A : -A same as abs (A)
5717 A > 0? A : -A same as abs (A)
5718 A <= 0? A : -A same as -abs (A)
5719 A < 0? A : -A same as -abs (A)
5720
5721 None of these transformations work for modes with signed
5722 zeros. If A is +/-0, the first two transformations will
5723 change the sign of the result (from +0 to -0, or vice
5724 versa). The last four will fix the sign of the result,
5725 even though the original expressions could be positive or
5726 negative, depending on the sign of A.
5727
5728 Note that all these transformations are correct if A is
5729 NaN, since the two alternatives (A and -A) are also NaNs. */
5730 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5731 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5732 ? real_zerop (arg01)
5733 : integer_zerop (arg01))
5734 && ((TREE_CODE (arg2) == NEGATE_EXPR
5735 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5736 /* In the case that A is of the form X-Y, '-A' (arg2) may
5737 have already been folded to Y-X, check for that. */
5738 || (TREE_CODE (arg1) == MINUS_EXPR
5739 && TREE_CODE (arg2) == MINUS_EXPR
5740 && operand_equal_p (TREE_OPERAND (arg1, 0),
5741 TREE_OPERAND (arg2, 1), 0)
5742 && operand_equal_p (TREE_OPERAND (arg1, 1),
5743 TREE_OPERAND (arg2, 0), 0))))
5744 switch (comp_code)
5745 {
5746 case EQ_EXPR:
5747 case UNEQ_EXPR:
5748 tem = fold_convert_loc (loc, arg1_type, arg1);
5749 return fold_convert_loc (loc, type, negate_expr (tem));
5750 case NE_EXPR:
5751 case LTGT_EXPR:
5752 return fold_convert_loc (loc, type, arg1);
5753 case UNGE_EXPR:
5754 case UNGT_EXPR:
5755 if (flag_trapping_math)
5756 break;
5757 /* Fall through. */
5758 case GE_EXPR:
5759 case GT_EXPR:
5760 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5761 break;
5762 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5763 return fold_convert_loc (loc, type, tem);
5764 case UNLE_EXPR:
5765 case UNLT_EXPR:
5766 if (flag_trapping_math)
5767 break;
5768 /* FALLTHRU */
5769 case LE_EXPR:
5770 case LT_EXPR:
5771 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5772 break;
5773 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg1))
5774 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
5775 {
5776 /* A <= 0 ? A : -A for A INT_MIN is valid, but -abs(INT_MIN)
5777 is not, invokes UB both in abs and in the negation of it.
5778 So, use ABSU_EXPR instead. */
5779 tree utype = unsigned_type_for (TREE_TYPE (arg1));
5780 tem = fold_build1_loc (loc, ABSU_EXPR, utype, arg1);
5781 tem = negate_expr (tem);
5782 return fold_convert_loc (loc, type, tem);
5783 }
5784 else
5785 {
5786 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5787 return negate_expr (fold_convert_loc (loc, type, tem));
5788 }
5789 default:
5790 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5791 break;
5792 }
5793
5794 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5795 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5796 both transformations are correct when A is NaN: A != 0
5797 is then true, and A == 0 is false. */
5798
5799 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5800 && integer_zerop (arg01) && integer_zerop (arg2))
5801 {
5802 if (comp_code == NE_EXPR)
5803 return fold_convert_loc (loc, type, arg1);
5804 else if (comp_code == EQ_EXPR)
5805 return build_zero_cst (type);
5806 }
5807
5808 /* Try some transformations of A op B ? A : B.
5809
5810 A == B? A : B same as B
5811 A != B? A : B same as A
5812 A >= B? A : B same as max (A, B)
5813 A > B? A : B same as max (B, A)
5814 A <= B? A : B same as min (A, B)
5815 A < B? A : B same as min (B, A)
5816
5817 As above, these transformations don't work in the presence
5818 of signed zeros. For example, if A and B are zeros of
5819 opposite sign, the first two transformations will change
5820 the sign of the result. In the last four, the original
5821 expressions give different results for (A=+0, B=-0) and
5822 (A=-0, B=+0), but the transformed expressions do not.
5823
5824 The first two transformations are correct if either A or B
5825 is a NaN. In the first transformation, the condition will
5826 be false, and B will indeed be chosen. In the case of the
5827 second transformation, the condition A != B will be true,
5828 and A will be chosen.
5829
5830 The conversions to max() and min() are not correct if B is
5831 a number and A is not. The conditions in the original
5832 expressions will be false, so all four give B. The min()
5833 and max() versions would give a NaN instead. */
5834 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5835 && operand_equal_for_comparison_p (arg01, arg2)
5836 /* Avoid these transformations if the COND_EXPR may be used
5837 as an lvalue in the C++ front-end. PR c++/19199. */
5838 && (in_gimple_form
5839 || VECTOR_TYPE_P (type)
5840 || (! lang_GNU_CXX ()
5841 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5842 || ! maybe_lvalue_p (arg1)
5843 || ! maybe_lvalue_p (arg2)))
5844 {
5845 tree comp_op0 = arg00;
5846 tree comp_op1 = arg01;
5847 tree comp_type = TREE_TYPE (comp_op0);
5848
5849 switch (comp_code)
5850 {
5851 case EQ_EXPR:
5852 return fold_convert_loc (loc, type, arg2);
5853 case NE_EXPR:
5854 return fold_convert_loc (loc, type, arg1);
5855 case LE_EXPR:
5856 case LT_EXPR:
5857 case UNLE_EXPR:
5858 case UNLT_EXPR:
5859 /* In C++ a ?: expression can be an lvalue, so put the
5860 operand which will be used if they are equal first
5861 so that we can convert this back to the
5862 corresponding COND_EXPR. */
5863 if (!HONOR_NANS (arg1))
5864 {
5865 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5866 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5867 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5868 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5869 : fold_build2_loc (loc, MIN_EXPR, comp_type,
5870 comp_op1, comp_op0);
5871 return fold_convert_loc (loc, type, tem);
5872 }
5873 break;
5874 case GE_EXPR:
5875 case GT_EXPR:
5876 case UNGE_EXPR:
5877 case UNGT_EXPR:
5878 if (!HONOR_NANS (arg1))
5879 {
5880 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5881 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5882 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5883 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5884 : fold_build2_loc (loc, MAX_EXPR, comp_type,
5885 comp_op1, comp_op0);
5886 return fold_convert_loc (loc, type, tem);
5887 }
5888 break;
5889 case UNEQ_EXPR:
5890 if (!HONOR_NANS (arg1))
5891 return fold_convert_loc (loc, type, arg2);
5892 break;
5893 case LTGT_EXPR:
5894 if (!HONOR_NANS (arg1))
5895 return fold_convert_loc (loc, type, arg1);
5896 break;
5897 default:
5898 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5899 break;
5900 }
5901 }
5902
5903 return NULL_TREE;
5904 }
5905
5906
5907 \f
5908 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5909 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5910 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5911 false) >= 2)
5912 #endif
5913
5914 /* EXP is some logical combination of boolean tests. See if we can
5915 merge it into some range test. Return the new tree if so. */
5916
5917 static tree
5918 fold_range_test (location_t loc, enum tree_code code, tree type,
5919 tree op0, tree op1)
5920 {
5921 int or_op = (code == TRUTH_ORIF_EXPR
5922 || code == TRUTH_OR_EXPR);
5923 int in0_p, in1_p, in_p;
5924 tree low0, low1, low, high0, high1, high;
5925 bool strict_overflow_p = false;
5926 tree tem, lhs, rhs;
5927 const char * const warnmsg = G_("assuming signed overflow does not occur "
5928 "when simplifying range test");
5929
5930 if (!INTEGRAL_TYPE_P (type))
5931 return 0;
5932
5933 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5934 /* If op0 is known true or false and this is a short-circuiting
5935 operation we must not merge with op1 since that makes side-effects
5936 unconditional. So special-case this. */
5937 if (!lhs
5938 && ((code == TRUTH_ORIF_EXPR && in0_p)
5939 || (code == TRUTH_ANDIF_EXPR && !in0_p)))
5940 return op0;
5941 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5942
5943 /* If this is an OR operation, invert both sides; we will invert
5944 again at the end. */
5945 if (or_op)
5946 in0_p = ! in0_p, in1_p = ! in1_p;
5947
5948 /* If both expressions are the same, if we can merge the ranges, and we
5949 can build the range test, return it or it inverted. If one of the
5950 ranges is always true or always false, consider it to be the same
5951 expression as the other. */
5952 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5953 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5954 in1_p, low1, high1)
5955 && (tem = (build_range_check (loc, type,
5956 lhs != 0 ? lhs
5957 : rhs != 0 ? rhs : integer_zero_node,
5958 in_p, low, high))) != 0)
5959 {
5960 if (strict_overflow_p)
5961 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5962 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5963 }
5964
5965 /* On machines where the branch cost is expensive, if this is a
5966 short-circuited branch and the underlying object on both sides
5967 is the same, make a non-short-circuit operation. */
5968 bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
5969 if (param_logical_op_non_short_circuit != -1)
5970 logical_op_non_short_circuit
5971 = param_logical_op_non_short_circuit;
5972 if (logical_op_non_short_circuit
5973 && !flag_sanitize_coverage
5974 && lhs != 0 && rhs != 0
5975 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5976 && operand_equal_p (lhs, rhs, 0))
5977 {
5978 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5979 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5980 which cases we can't do this. */
5981 if (simple_operand_p (lhs))
5982 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5983 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5984 type, op0, op1);
5985
5986 else if (!lang_hooks.decls.global_bindings_p ()
5987 && !CONTAINS_PLACEHOLDER_P (lhs))
5988 {
5989 tree common = save_expr (lhs);
5990
5991 if ((lhs = build_range_check (loc, type, common,
5992 or_op ? ! in0_p : in0_p,
5993 low0, high0)) != 0
5994 && (rhs = build_range_check (loc, type, common,
5995 or_op ? ! in1_p : in1_p,
5996 low1, high1)) != 0)
5997 {
5998 if (strict_overflow_p)
5999 fold_overflow_warning (warnmsg,
6000 WARN_STRICT_OVERFLOW_COMPARISON);
6001 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
6002 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
6003 type, lhs, rhs);
6004 }
6005 }
6006 }
6007
6008 return 0;
6009 }
6010 \f
6011 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
6012 bit value. Arrange things so the extra bits will be set to zero if and
6013 only if C is signed-extended to its full width. If MASK is nonzero,
6014 it is an INTEGER_CST that should be AND'ed with the extra bits. */
6015
6016 static tree
6017 unextend (tree c, int p, int unsignedp, tree mask)
6018 {
6019 tree type = TREE_TYPE (c);
6020 int modesize = GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (type));
6021 tree temp;
6022
6023 if (p == modesize || unsignedp)
6024 return c;
6025
6026 /* We work by getting just the sign bit into the low-order bit, then
6027 into the high-order bit, then sign-extend. We then XOR that value
6028 with C. */
6029 temp = build_int_cst (TREE_TYPE (c),
6030 wi::extract_uhwi (wi::to_wide (c), p - 1, 1));
6031
6032 /* We must use a signed type in order to get an arithmetic right shift.
6033 However, we must also avoid introducing accidental overflows, so that
6034 a subsequent call to integer_zerop will work. Hence we must
6035 do the type conversion here. At this point, the constant is either
6036 zero or one, and the conversion to a signed type can never overflow.
6037 We could get an overflow if this conversion is done anywhere else. */
6038 if (TYPE_UNSIGNED (type))
6039 temp = fold_convert (signed_type_for (type), temp);
6040
6041 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
6042 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
6043 if (mask != 0)
6044 temp = const_binop (BIT_AND_EXPR, temp,
6045 fold_convert (TREE_TYPE (c), mask));
6046 /* If necessary, convert the type back to match the type of C. */
6047 if (TYPE_UNSIGNED (type))
6048 temp = fold_convert (type, temp);
6049
6050 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
6051 }
6052 \f
6053 /* For an expression that has the form
6054 (A && B) || ~B
6055 or
6056 (A || B) && ~B,
6057 we can drop one of the inner expressions and simplify to
6058 A || ~B
6059 or
6060 A && ~B
6061 LOC is the location of the resulting expression. OP is the inner
6062 logical operation; the left-hand side in the examples above, while CMPOP
6063 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
6064 removing a condition that guards another, as in
6065 (A != NULL && A->...) || A == NULL
6066 which we must not transform. If RHS_ONLY is true, only eliminate the
6067 right-most operand of the inner logical operation. */
6068
6069 static tree
6070 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
6071 bool rhs_only)
6072 {
6073 tree type = TREE_TYPE (cmpop);
6074 enum tree_code code = TREE_CODE (cmpop);
6075 enum tree_code truthop_code = TREE_CODE (op);
6076 tree lhs = TREE_OPERAND (op, 0);
6077 tree rhs = TREE_OPERAND (op, 1);
6078 tree orig_lhs = lhs, orig_rhs = rhs;
6079 enum tree_code rhs_code = TREE_CODE (rhs);
6080 enum tree_code lhs_code = TREE_CODE (lhs);
6081 enum tree_code inv_code;
6082
6083 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
6084 return NULL_TREE;
6085
6086 if (TREE_CODE_CLASS (code) != tcc_comparison)
6087 return NULL_TREE;
6088
6089 if (rhs_code == truthop_code)
6090 {
6091 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
6092 if (newrhs != NULL_TREE)
6093 {
6094 rhs = newrhs;
6095 rhs_code = TREE_CODE (rhs);
6096 }
6097 }
6098 if (lhs_code == truthop_code && !rhs_only)
6099 {
6100 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
6101 if (newlhs != NULL_TREE)
6102 {
6103 lhs = newlhs;
6104 lhs_code = TREE_CODE (lhs);
6105 }
6106 }
6107
6108 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
6109 if (inv_code == rhs_code
6110 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
6111 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
6112 return lhs;
6113 if (!rhs_only && inv_code == lhs_code
6114 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
6115 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
6116 return rhs;
6117 if (rhs != orig_rhs || lhs != orig_lhs)
6118 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
6119 lhs, rhs);
6120 return NULL_TREE;
6121 }
6122
6123 /* Find ways of folding logical expressions of LHS and RHS:
6124 Try to merge two comparisons to the same innermost item.
6125 Look for range tests like "ch >= '0' && ch <= '9'".
6126 Look for combinations of simple terms on machines with expensive branches
6127 and evaluate the RHS unconditionally.
6128
6129 For example, if we have p->a == 2 && p->b == 4 and we can make an
6130 object large enough to span both A and B, we can do this with a comparison
6131 against the object ANDed with the a mask.
6132
6133 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
6134 operations to do this with one comparison.
6135
6136 We check for both normal comparisons and the BIT_AND_EXPRs made this by
6137 function and the one above.
6138
6139 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
6140 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
6141
6142 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
6143 two operands.
6144
6145 We return the simplified tree or 0 if no optimization is possible. */
6146
6147 static tree
6148 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
6149 tree lhs, tree rhs)
6150 {
6151 /* If this is the "or" of two comparisons, we can do something if
6152 the comparisons are NE_EXPR. If this is the "and", we can do something
6153 if the comparisons are EQ_EXPR. I.e.,
6154 (a->b == 2 && a->c == 4) can become (a->new == NEW).
6155
6156 WANTED_CODE is this operation code. For single bit fields, we can
6157 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
6158 comparison for one-bit fields. */
6159
6160 enum tree_code wanted_code;
6161 enum tree_code lcode, rcode;
6162 tree ll_arg, lr_arg, rl_arg, rr_arg;
6163 tree ll_inner, lr_inner, rl_inner, rr_inner;
6164 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
6165 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
6166 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
6167 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
6168 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
6169 int ll_reversep, lr_reversep, rl_reversep, rr_reversep;
6170 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
6171 scalar_int_mode lnmode, rnmode;
6172 tree ll_mask, lr_mask, rl_mask, rr_mask;
6173 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
6174 tree l_const, r_const;
6175 tree lntype, rntype, result;
6176 HOST_WIDE_INT first_bit, end_bit;
6177 int volatilep;
6178
6179 /* Start by getting the comparison codes. Fail if anything is volatile.
6180 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
6181 it were surrounded with a NE_EXPR. */
6182
6183 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
6184 return 0;
6185
6186 lcode = TREE_CODE (lhs);
6187 rcode = TREE_CODE (rhs);
6188
6189 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
6190 {
6191 lhs = build2 (NE_EXPR, truth_type, lhs,
6192 build_int_cst (TREE_TYPE (lhs), 0));
6193 lcode = NE_EXPR;
6194 }
6195
6196 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
6197 {
6198 rhs = build2 (NE_EXPR, truth_type, rhs,
6199 build_int_cst (TREE_TYPE (rhs), 0));
6200 rcode = NE_EXPR;
6201 }
6202
6203 if (TREE_CODE_CLASS (lcode) != tcc_comparison
6204 || TREE_CODE_CLASS (rcode) != tcc_comparison)
6205 return 0;
6206
6207 ll_arg = TREE_OPERAND (lhs, 0);
6208 lr_arg = TREE_OPERAND (lhs, 1);
6209 rl_arg = TREE_OPERAND (rhs, 0);
6210 rr_arg = TREE_OPERAND (rhs, 1);
6211
6212 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
6213 if (simple_operand_p (ll_arg)
6214 && simple_operand_p (lr_arg))
6215 {
6216 if (operand_equal_p (ll_arg, rl_arg, 0)
6217 && operand_equal_p (lr_arg, rr_arg, 0))
6218 {
6219 result = combine_comparisons (loc, code, lcode, rcode,
6220 truth_type, ll_arg, lr_arg);
6221 if (result)
6222 return result;
6223 }
6224 else if (operand_equal_p (ll_arg, rr_arg, 0)
6225 && operand_equal_p (lr_arg, rl_arg, 0))
6226 {
6227 result = combine_comparisons (loc, code, lcode,
6228 swap_tree_comparison (rcode),
6229 truth_type, ll_arg, lr_arg);
6230 if (result)
6231 return result;
6232 }
6233 }
6234
6235 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
6236 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
6237
6238 /* If the RHS can be evaluated unconditionally and its operands are
6239 simple, it wins to evaluate the RHS unconditionally on machines
6240 with expensive branches. In this case, this isn't a comparison
6241 that can be merged. */
6242
6243 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
6244 false) >= 2
6245 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
6246 && simple_operand_p (rl_arg)
6247 && simple_operand_p (rr_arg))
6248 {
6249 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
6250 if (code == TRUTH_OR_EXPR
6251 && lcode == NE_EXPR && integer_zerop (lr_arg)
6252 && rcode == NE_EXPR && integer_zerop (rr_arg)
6253 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
6254 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
6255 return build2_loc (loc, NE_EXPR, truth_type,
6256 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
6257 ll_arg, rl_arg),
6258 build_int_cst (TREE_TYPE (ll_arg), 0));
6259
6260 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
6261 if (code == TRUTH_AND_EXPR
6262 && lcode == EQ_EXPR && integer_zerop (lr_arg)
6263 && rcode == EQ_EXPR && integer_zerop (rr_arg)
6264 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
6265 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
6266 return build2_loc (loc, EQ_EXPR, truth_type,
6267 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
6268 ll_arg, rl_arg),
6269 build_int_cst (TREE_TYPE (ll_arg), 0));
6270 }
6271
6272 /* See if the comparisons can be merged. Then get all the parameters for
6273 each side. */
6274
6275 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
6276 || (rcode != EQ_EXPR && rcode != NE_EXPR))
6277 return 0;
6278
6279 ll_reversep = lr_reversep = rl_reversep = rr_reversep = 0;
6280 volatilep = 0;
6281 ll_inner = decode_field_reference (loc, &ll_arg,
6282 &ll_bitsize, &ll_bitpos, &ll_mode,
6283 &ll_unsignedp, &ll_reversep, &volatilep,
6284 &ll_mask, &ll_and_mask);
6285 lr_inner = decode_field_reference (loc, &lr_arg,
6286 &lr_bitsize, &lr_bitpos, &lr_mode,
6287 &lr_unsignedp, &lr_reversep, &volatilep,
6288 &lr_mask, &lr_and_mask);
6289 rl_inner = decode_field_reference (loc, &rl_arg,
6290 &rl_bitsize, &rl_bitpos, &rl_mode,
6291 &rl_unsignedp, &rl_reversep, &volatilep,
6292 &rl_mask, &rl_and_mask);
6293 rr_inner = decode_field_reference (loc, &rr_arg,
6294 &rr_bitsize, &rr_bitpos, &rr_mode,
6295 &rr_unsignedp, &rr_reversep, &volatilep,
6296 &rr_mask, &rr_and_mask);
6297
6298 /* It must be true that the inner operation on the lhs of each
6299 comparison must be the same if we are to be able to do anything.
6300 Then see if we have constants. If not, the same must be true for
6301 the rhs's. */
6302 if (volatilep
6303 || ll_reversep != rl_reversep
6304 || ll_inner == 0 || rl_inner == 0
6305 || ! operand_equal_p (ll_inner, rl_inner, 0))
6306 return 0;
6307
6308 if (TREE_CODE (lr_arg) == INTEGER_CST
6309 && TREE_CODE (rr_arg) == INTEGER_CST)
6310 {
6311 l_const = lr_arg, r_const = rr_arg;
6312 lr_reversep = ll_reversep;
6313 }
6314 else if (lr_reversep != rr_reversep
6315 || lr_inner == 0 || rr_inner == 0
6316 || ! operand_equal_p (lr_inner, rr_inner, 0))
6317 return 0;
6318 else
6319 l_const = r_const = 0;
6320
6321 /* If either comparison code is not correct for our logical operation,
6322 fail. However, we can convert a one-bit comparison against zero into
6323 the opposite comparison against that bit being set in the field. */
6324
6325 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
6326 if (lcode != wanted_code)
6327 {
6328 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
6329 {
6330 /* Make the left operand unsigned, since we are only interested
6331 in the value of one bit. Otherwise we are doing the wrong
6332 thing below. */
6333 ll_unsignedp = 1;
6334 l_const = ll_mask;
6335 }
6336 else
6337 return 0;
6338 }
6339
6340 /* This is analogous to the code for l_const above. */
6341 if (rcode != wanted_code)
6342 {
6343 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
6344 {
6345 rl_unsignedp = 1;
6346 r_const = rl_mask;
6347 }
6348 else
6349 return 0;
6350 }
6351
6352 /* See if we can find a mode that contains both fields being compared on
6353 the left. If we can't, fail. Otherwise, update all constants and masks
6354 to be relative to a field of that size. */
6355 first_bit = MIN (ll_bitpos, rl_bitpos);
6356 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
6357 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
6358 TYPE_ALIGN (TREE_TYPE (ll_inner)), BITS_PER_WORD,
6359 volatilep, &lnmode))
6360 return 0;
6361
6362 lnbitsize = GET_MODE_BITSIZE (lnmode);
6363 lnbitpos = first_bit & ~ (lnbitsize - 1);
6364 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
6365 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
6366
6367 if (ll_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
6368 {
6369 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
6370 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
6371 }
6372
6373 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
6374 size_int (xll_bitpos));
6375 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
6376 size_int (xrl_bitpos));
6377
6378 if (l_const)
6379 {
6380 l_const = fold_convert_loc (loc, lntype, l_const);
6381 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
6382 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
6383 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
6384 fold_build1_loc (loc, BIT_NOT_EXPR,
6385 lntype, ll_mask))))
6386 {
6387 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6388
6389 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6390 }
6391 }
6392 if (r_const)
6393 {
6394 r_const = fold_convert_loc (loc, lntype, r_const);
6395 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
6396 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
6397 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
6398 fold_build1_loc (loc, BIT_NOT_EXPR,
6399 lntype, rl_mask))))
6400 {
6401 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6402
6403 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6404 }
6405 }
6406
6407 /* If the right sides are not constant, do the same for it. Also,
6408 disallow this optimization if a size, signedness or storage order
6409 mismatch occurs between the left and right sides. */
6410 if (l_const == 0)
6411 {
6412 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
6413 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
6414 || ll_reversep != lr_reversep
6415 /* Make sure the two fields on the right
6416 correspond to the left without being swapped. */
6417 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
6418 return 0;
6419
6420 first_bit = MIN (lr_bitpos, rr_bitpos);
6421 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
6422 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
6423 TYPE_ALIGN (TREE_TYPE (lr_inner)), BITS_PER_WORD,
6424 volatilep, &rnmode))
6425 return 0;
6426
6427 rnbitsize = GET_MODE_BITSIZE (rnmode);
6428 rnbitpos = first_bit & ~ (rnbitsize - 1);
6429 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
6430 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
6431
6432 if (lr_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
6433 {
6434 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
6435 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
6436 }
6437
6438 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6439 rntype, lr_mask),
6440 size_int (xlr_bitpos));
6441 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6442 rntype, rr_mask),
6443 size_int (xrr_bitpos));
6444
6445 /* Make a mask that corresponds to both fields being compared.
6446 Do this for both items being compared. If the operands are the
6447 same size and the bits being compared are in the same position
6448 then we can do this by masking both and comparing the masked
6449 results. */
6450 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6451 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
6452 if (lnbitsize == rnbitsize
6453 && xll_bitpos == xlr_bitpos
6454 && lnbitpos >= 0
6455 && rnbitpos >= 0)
6456 {
6457 lhs = make_bit_field_ref (loc, ll_inner, ll_arg,
6458 lntype, lnbitsize, lnbitpos,
6459 ll_unsignedp || rl_unsignedp, ll_reversep);
6460 if (! all_ones_mask_p (ll_mask, lnbitsize))
6461 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
6462
6463 rhs = make_bit_field_ref (loc, lr_inner, lr_arg,
6464 rntype, rnbitsize, rnbitpos,
6465 lr_unsignedp || rr_unsignedp, lr_reversep);
6466 if (! all_ones_mask_p (lr_mask, rnbitsize))
6467 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
6468
6469 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6470 }
6471
6472 /* There is still another way we can do something: If both pairs of
6473 fields being compared are adjacent, we may be able to make a wider
6474 field containing them both.
6475
6476 Note that we still must mask the lhs/rhs expressions. Furthermore,
6477 the mask must be shifted to account for the shift done by
6478 make_bit_field_ref. */
6479 if (((ll_bitsize + ll_bitpos == rl_bitpos
6480 && lr_bitsize + lr_bitpos == rr_bitpos)
6481 || (ll_bitpos == rl_bitpos + rl_bitsize
6482 && lr_bitpos == rr_bitpos + rr_bitsize))
6483 && ll_bitpos >= 0
6484 && rl_bitpos >= 0
6485 && lr_bitpos >= 0
6486 && rr_bitpos >= 0)
6487 {
6488 tree type;
6489
6490 lhs = make_bit_field_ref (loc, ll_inner, ll_arg, lntype,
6491 ll_bitsize + rl_bitsize,
6492 MIN (ll_bitpos, rl_bitpos),
6493 ll_unsignedp, ll_reversep);
6494 rhs = make_bit_field_ref (loc, lr_inner, lr_arg, rntype,
6495 lr_bitsize + rr_bitsize,
6496 MIN (lr_bitpos, rr_bitpos),
6497 lr_unsignedp, lr_reversep);
6498
6499 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
6500 size_int (MIN (xll_bitpos, xrl_bitpos)));
6501 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
6502 size_int (MIN (xlr_bitpos, xrr_bitpos)));
6503
6504 /* Convert to the smaller type before masking out unwanted bits. */
6505 type = lntype;
6506 if (lntype != rntype)
6507 {
6508 if (lnbitsize > rnbitsize)
6509 {
6510 lhs = fold_convert_loc (loc, rntype, lhs);
6511 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
6512 type = rntype;
6513 }
6514 else if (lnbitsize < rnbitsize)
6515 {
6516 rhs = fold_convert_loc (loc, lntype, rhs);
6517 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
6518 type = lntype;
6519 }
6520 }
6521
6522 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
6523 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
6524
6525 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
6526 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
6527
6528 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6529 }
6530
6531 return 0;
6532 }
6533
6534 /* Handle the case of comparisons with constants. If there is something in
6535 common between the masks, those bits of the constants must be the same.
6536 If not, the condition is always false. Test for this to avoid generating
6537 incorrect code below. */
6538 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
6539 if (! integer_zerop (result)
6540 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
6541 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
6542 {
6543 if (wanted_code == NE_EXPR)
6544 {
6545 warning (0, "%<or%> of unmatched not-equal tests is always 1");
6546 return constant_boolean_node (true, truth_type);
6547 }
6548 else
6549 {
6550 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
6551 return constant_boolean_node (false, truth_type);
6552 }
6553 }
6554
6555 if (lnbitpos < 0)
6556 return 0;
6557
6558 /* Construct the expression we will return. First get the component
6559 reference we will make. Unless the mask is all ones the width of
6560 that field, perform the mask operation. Then compare with the
6561 merged constant. */
6562 result = make_bit_field_ref (loc, ll_inner, ll_arg,
6563 lntype, lnbitsize, lnbitpos,
6564 ll_unsignedp || rl_unsignedp, ll_reversep);
6565
6566 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6567 if (! all_ones_mask_p (ll_mask, lnbitsize))
6568 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
6569
6570 return build2_loc (loc, wanted_code, truth_type, result,
6571 const_binop (BIT_IOR_EXPR, l_const, r_const));
6572 }
6573 \f
6574 /* T is an integer expression that is being multiplied, divided, or taken a
6575 modulus (CODE says which and what kind of divide or modulus) by a
6576 constant C. See if we can eliminate that operation by folding it with
6577 other operations already in T. WIDE_TYPE, if non-null, is a type that
6578 should be used for the computation if wider than our type.
6579
6580 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6581 (X * 2) + (Y * 4). We must, however, be assured that either the original
6582 expression would not overflow or that overflow is undefined for the type
6583 in the language in question.
6584
6585 If we return a non-null expression, it is an equivalent form of the
6586 original computation, but need not be in the original type.
6587
6588 We set *STRICT_OVERFLOW_P to true if the return values depends on
6589 signed overflow being undefined. Otherwise we do not change
6590 *STRICT_OVERFLOW_P. */
6591
6592 static tree
6593 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6594 bool *strict_overflow_p)
6595 {
6596 /* To avoid exponential search depth, refuse to allow recursion past
6597 three levels. Beyond that (1) it's highly unlikely that we'll find
6598 something interesting and (2) we've probably processed it before
6599 when we built the inner expression. */
6600
6601 static int depth;
6602 tree ret;
6603
6604 if (depth > 3)
6605 return NULL;
6606
6607 depth++;
6608 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6609 depth--;
6610
6611 return ret;
6612 }
6613
6614 static tree
6615 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6616 bool *strict_overflow_p)
6617 {
6618 tree type = TREE_TYPE (t);
6619 enum tree_code tcode = TREE_CODE (t);
6620 tree ctype = (wide_type != 0
6621 && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (wide_type))
6622 > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type)))
6623 ? wide_type : type);
6624 tree t1, t2;
6625 int same_p = tcode == code;
6626 tree op0 = NULL_TREE, op1 = NULL_TREE;
6627 bool sub_strict_overflow_p;
6628
6629 /* Don't deal with constants of zero here; they confuse the code below. */
6630 if (integer_zerop (c))
6631 return NULL_TREE;
6632
6633 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6634 op0 = TREE_OPERAND (t, 0);
6635
6636 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6637 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6638
6639 /* Note that we need not handle conditional operations here since fold
6640 already handles those cases. So just do arithmetic here. */
6641 switch (tcode)
6642 {
6643 case INTEGER_CST:
6644 /* For a constant, we can always simplify if we are a multiply
6645 or (for divide and modulus) if it is a multiple of our constant. */
6646 if (code == MULT_EXPR
6647 || wi::multiple_of_p (wi::to_wide (t), wi::to_wide (c),
6648 TYPE_SIGN (type)))
6649 {
6650 tree tem = const_binop (code, fold_convert (ctype, t),
6651 fold_convert (ctype, c));
6652 /* If the multiplication overflowed, we lost information on it.
6653 See PR68142 and PR69845. */
6654 if (TREE_OVERFLOW (tem))
6655 return NULL_TREE;
6656 return tem;
6657 }
6658 break;
6659
6660 CASE_CONVERT: case NON_LVALUE_EXPR:
6661 /* If op0 is an expression ... */
6662 if ((COMPARISON_CLASS_P (op0)
6663 || UNARY_CLASS_P (op0)
6664 || BINARY_CLASS_P (op0)
6665 || VL_EXP_CLASS_P (op0)
6666 || EXPRESSION_CLASS_P (op0))
6667 /* ... and has wrapping overflow, and its type is smaller
6668 than ctype, then we cannot pass through as widening. */
6669 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6670 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0)))
6671 && (TYPE_PRECISION (ctype)
6672 > TYPE_PRECISION (TREE_TYPE (op0))))
6673 /* ... or this is a truncation (t is narrower than op0),
6674 then we cannot pass through this narrowing. */
6675 || (TYPE_PRECISION (type)
6676 < TYPE_PRECISION (TREE_TYPE (op0)))
6677 /* ... or signedness changes for division or modulus,
6678 then we cannot pass through this conversion. */
6679 || (code != MULT_EXPR
6680 && (TYPE_UNSIGNED (ctype)
6681 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6682 /* ... or has undefined overflow while the converted to
6683 type has not, we cannot do the operation in the inner type
6684 as that would introduce undefined overflow. */
6685 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6686 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0)))
6687 && !TYPE_OVERFLOW_UNDEFINED (type))))
6688 break;
6689
6690 /* Pass the constant down and see if we can make a simplification. If
6691 we can, replace this expression with the inner simplification for
6692 possible later conversion to our or some other type. */
6693 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6694 && TREE_CODE (t2) == INTEGER_CST
6695 && !TREE_OVERFLOW (t2)
6696 && (t1 = extract_muldiv (op0, t2, code,
6697 code == MULT_EXPR ? ctype : NULL_TREE,
6698 strict_overflow_p)) != 0)
6699 return t1;
6700 break;
6701
6702 case ABS_EXPR:
6703 /* If widening the type changes it from signed to unsigned, then we
6704 must avoid building ABS_EXPR itself as unsigned. */
6705 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6706 {
6707 tree cstype = (*signed_type_for) (ctype);
6708 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6709 != 0)
6710 {
6711 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6712 return fold_convert (ctype, t1);
6713 }
6714 break;
6715 }
6716 /* If the constant is negative, we cannot simplify this. */
6717 if (tree_int_cst_sgn (c) == -1)
6718 break;
6719 /* FALLTHROUGH */
6720 case NEGATE_EXPR:
6721 /* For division and modulus, type can't be unsigned, as e.g.
6722 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6723 For signed types, even with wrapping overflow, this is fine. */
6724 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6725 break;
6726 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6727 != 0)
6728 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6729 break;
6730
6731 case MIN_EXPR: case MAX_EXPR:
6732 /* If widening the type changes the signedness, then we can't perform
6733 this optimization as that changes the result. */
6734 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6735 break;
6736
6737 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6738 sub_strict_overflow_p = false;
6739 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6740 &sub_strict_overflow_p)) != 0
6741 && (t2 = extract_muldiv (op1, c, code, wide_type,
6742 &sub_strict_overflow_p)) != 0)
6743 {
6744 if (tree_int_cst_sgn (c) < 0)
6745 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6746 if (sub_strict_overflow_p)
6747 *strict_overflow_p = true;
6748 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6749 fold_convert (ctype, t2));
6750 }
6751 break;
6752
6753 case LSHIFT_EXPR: case RSHIFT_EXPR:
6754 /* If the second operand is constant, this is a multiplication
6755 or floor division, by a power of two, so we can treat it that
6756 way unless the multiplier or divisor overflows. Signed
6757 left-shift overflow is implementation-defined rather than
6758 undefined in C90, so do not convert signed left shift into
6759 multiplication. */
6760 if (TREE_CODE (op1) == INTEGER_CST
6761 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6762 /* const_binop may not detect overflow correctly,
6763 so check for it explicitly here. */
6764 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
6765 wi::to_wide (op1))
6766 && (t1 = fold_convert (ctype,
6767 const_binop (LSHIFT_EXPR, size_one_node,
6768 op1))) != 0
6769 && !TREE_OVERFLOW (t1))
6770 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6771 ? MULT_EXPR : FLOOR_DIV_EXPR,
6772 ctype,
6773 fold_convert (ctype, op0),
6774 t1),
6775 c, code, wide_type, strict_overflow_p);
6776 break;
6777
6778 case PLUS_EXPR: case MINUS_EXPR:
6779 /* See if we can eliminate the operation on both sides. If we can, we
6780 can return a new PLUS or MINUS. If we can't, the only remaining
6781 cases where we can do anything are if the second operand is a
6782 constant. */
6783 sub_strict_overflow_p = false;
6784 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6785 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6786 if (t1 != 0 && t2 != 0
6787 && TYPE_OVERFLOW_WRAPS (ctype)
6788 && (code == MULT_EXPR
6789 /* If not multiplication, we can only do this if both operands
6790 are divisible by c. */
6791 || (multiple_of_p (ctype, op0, c)
6792 && multiple_of_p (ctype, op1, c))))
6793 {
6794 if (sub_strict_overflow_p)
6795 *strict_overflow_p = true;
6796 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6797 fold_convert (ctype, t2));
6798 }
6799
6800 /* If this was a subtraction, negate OP1 and set it to be an addition.
6801 This simplifies the logic below. */
6802 if (tcode == MINUS_EXPR)
6803 {
6804 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6805 /* If OP1 was not easily negatable, the constant may be OP0. */
6806 if (TREE_CODE (op0) == INTEGER_CST)
6807 {
6808 std::swap (op0, op1);
6809 std::swap (t1, t2);
6810 }
6811 }
6812
6813 if (TREE_CODE (op1) != INTEGER_CST)
6814 break;
6815
6816 /* If either OP1 or C are negative, this optimization is not safe for
6817 some of the division and remainder types while for others we need
6818 to change the code. */
6819 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6820 {
6821 if (code == CEIL_DIV_EXPR)
6822 code = FLOOR_DIV_EXPR;
6823 else if (code == FLOOR_DIV_EXPR)
6824 code = CEIL_DIV_EXPR;
6825 else if (code != MULT_EXPR
6826 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6827 break;
6828 }
6829
6830 /* If it's a multiply or a division/modulus operation of a multiple
6831 of our constant, do the operation and verify it doesn't overflow. */
6832 if (code == MULT_EXPR
6833 || wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6834 TYPE_SIGN (type)))
6835 {
6836 op1 = const_binop (code, fold_convert (ctype, op1),
6837 fold_convert (ctype, c));
6838 /* We allow the constant to overflow with wrapping semantics. */
6839 if (op1 == 0
6840 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6841 break;
6842 }
6843 else
6844 break;
6845
6846 /* If we have an unsigned type, we cannot widen the operation since it
6847 will change the result if the original computation overflowed. */
6848 if (TYPE_UNSIGNED (ctype) && ctype != type)
6849 break;
6850
6851 /* The last case is if we are a multiply. In that case, we can
6852 apply the distributive law to commute the multiply and addition
6853 if the multiplication of the constants doesn't overflow
6854 and overflow is defined. With undefined overflow
6855 op0 * c might overflow, while (op0 + orig_op1) * c doesn't.
6856 But fold_plusminus_mult_expr would factor back any power-of-two
6857 value so do not distribute in the first place in this case. */
6858 if (code == MULT_EXPR
6859 && TYPE_OVERFLOW_WRAPS (ctype)
6860 && !(tree_fits_shwi_p (c) && pow2p_hwi (absu_hwi (tree_to_shwi (c)))))
6861 return fold_build2 (tcode, ctype,
6862 fold_build2 (code, ctype,
6863 fold_convert (ctype, op0),
6864 fold_convert (ctype, c)),
6865 op1);
6866
6867 break;
6868
6869 case MULT_EXPR:
6870 /* We have a special case here if we are doing something like
6871 (C * 8) % 4 since we know that's zero. */
6872 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6873 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6874 /* If the multiplication can overflow we cannot optimize this. */
6875 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6876 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6877 && wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6878 TYPE_SIGN (type)))
6879 {
6880 *strict_overflow_p = true;
6881 return omit_one_operand (type, integer_zero_node, op0);
6882 }
6883
6884 /* ... fall through ... */
6885
6886 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6887 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6888 /* If we can extract our operation from the LHS, do so and return a
6889 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6890 do something only if the second operand is a constant. */
6891 if (same_p
6892 && TYPE_OVERFLOW_WRAPS (ctype)
6893 && (t1 = extract_muldiv (op0, c, code, wide_type,
6894 strict_overflow_p)) != 0)
6895 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6896 fold_convert (ctype, op1));
6897 else if (tcode == MULT_EXPR && code == MULT_EXPR
6898 && TYPE_OVERFLOW_WRAPS (ctype)
6899 && (t1 = extract_muldiv (op1, c, code, wide_type,
6900 strict_overflow_p)) != 0)
6901 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6902 fold_convert (ctype, t1));
6903 else if (TREE_CODE (op1) != INTEGER_CST)
6904 return 0;
6905
6906 /* If these are the same operation types, we can associate them
6907 assuming no overflow. */
6908 if (tcode == code)
6909 {
6910 bool overflow_p = false;
6911 wi::overflow_type overflow_mul;
6912 signop sign = TYPE_SIGN (ctype);
6913 unsigned prec = TYPE_PRECISION (ctype);
6914 wide_int mul = wi::mul (wi::to_wide (op1, prec),
6915 wi::to_wide (c, prec),
6916 sign, &overflow_mul);
6917 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6918 if (overflow_mul
6919 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6920 overflow_p = true;
6921 if (!overflow_p)
6922 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6923 wide_int_to_tree (ctype, mul));
6924 }
6925
6926 /* If these operations "cancel" each other, we have the main
6927 optimizations of this pass, which occur when either constant is a
6928 multiple of the other, in which case we replace this with either an
6929 operation or CODE or TCODE.
6930
6931 If we have an unsigned type, we cannot do this since it will change
6932 the result if the original computation overflowed. */
6933 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6934 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6935 || (tcode == MULT_EXPR
6936 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6937 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6938 && code != MULT_EXPR)))
6939 {
6940 if (wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6941 TYPE_SIGN (type)))
6942 {
6943 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6944 *strict_overflow_p = true;
6945 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6946 fold_convert (ctype,
6947 const_binop (TRUNC_DIV_EXPR,
6948 op1, c)));
6949 }
6950 else if (wi::multiple_of_p (wi::to_wide (c), wi::to_wide (op1),
6951 TYPE_SIGN (type)))
6952 {
6953 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6954 *strict_overflow_p = true;
6955 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6956 fold_convert (ctype,
6957 const_binop (TRUNC_DIV_EXPR,
6958 c, op1)));
6959 }
6960 }
6961 break;
6962
6963 default:
6964 break;
6965 }
6966
6967 return 0;
6968 }
6969 \f
6970 /* Return a node which has the indicated constant VALUE (either 0 or
6971 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6972 and is of the indicated TYPE. */
6973
6974 tree
6975 constant_boolean_node (bool value, tree type)
6976 {
6977 if (type == integer_type_node)
6978 return value ? integer_one_node : integer_zero_node;
6979 else if (type == boolean_type_node)
6980 return value ? boolean_true_node : boolean_false_node;
6981 else if (TREE_CODE (type) == VECTOR_TYPE)
6982 return build_vector_from_val (type,
6983 build_int_cst (TREE_TYPE (type),
6984 value ? -1 : 0));
6985 else
6986 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6987 }
6988
6989
6990 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6991 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6992 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6993 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6994 COND is the first argument to CODE; otherwise (as in the example
6995 given here), it is the second argument. TYPE is the type of the
6996 original expression. Return NULL_TREE if no simplification is
6997 possible. */
6998
6999 static tree
7000 fold_binary_op_with_conditional_arg (location_t loc,
7001 enum tree_code code,
7002 tree type, tree op0, tree op1,
7003 tree cond, tree arg, int cond_first_p)
7004 {
7005 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
7006 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
7007 tree test, true_value, false_value;
7008 tree lhs = NULL_TREE;
7009 tree rhs = NULL_TREE;
7010 enum tree_code cond_code = COND_EXPR;
7011
7012 /* Do not move possibly trapping operations into the conditional as this
7013 pessimizes code and causes gimplification issues when applied late. */
7014 if (operation_could_trap_p (code, FLOAT_TYPE_P (type),
7015 ANY_INTEGRAL_TYPE_P (type)
7016 && TYPE_OVERFLOW_TRAPS (type), op1))
7017 return NULL_TREE;
7018
7019 if (TREE_CODE (cond) == COND_EXPR
7020 || TREE_CODE (cond) == VEC_COND_EXPR)
7021 {
7022 test = TREE_OPERAND (cond, 0);
7023 true_value = TREE_OPERAND (cond, 1);
7024 false_value = TREE_OPERAND (cond, 2);
7025 /* If this operand throws an expression, then it does not make
7026 sense to try to perform a logical or arithmetic operation
7027 involving it. */
7028 if (VOID_TYPE_P (TREE_TYPE (true_value)))
7029 lhs = true_value;
7030 if (VOID_TYPE_P (TREE_TYPE (false_value)))
7031 rhs = false_value;
7032 }
7033 else if (!(TREE_CODE (type) != VECTOR_TYPE
7034 && TREE_CODE (TREE_TYPE (cond)) == VECTOR_TYPE))
7035 {
7036 tree testtype = TREE_TYPE (cond);
7037 test = cond;
7038 true_value = constant_boolean_node (true, testtype);
7039 false_value = constant_boolean_node (false, testtype);
7040 }
7041 else
7042 /* Detect the case of mixing vector and scalar types - bail out. */
7043 return NULL_TREE;
7044
7045 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
7046 cond_code = VEC_COND_EXPR;
7047
7048 /* This transformation is only worthwhile if we don't have to wrap ARG
7049 in a SAVE_EXPR and the operation can be simplified without recursing
7050 on at least one of the branches once its pushed inside the COND_EXPR. */
7051 if (!TREE_CONSTANT (arg)
7052 && (TREE_SIDE_EFFECTS (arg)
7053 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
7054 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
7055 return NULL_TREE;
7056
7057 arg = fold_convert_loc (loc, arg_type, arg);
7058 if (lhs == 0)
7059 {
7060 true_value = fold_convert_loc (loc, cond_type, true_value);
7061 if (cond_first_p)
7062 lhs = fold_build2_loc (loc, code, type, true_value, arg);
7063 else
7064 lhs = fold_build2_loc (loc, code, type, arg, true_value);
7065 }
7066 if (rhs == 0)
7067 {
7068 false_value = fold_convert_loc (loc, cond_type, false_value);
7069 if (cond_first_p)
7070 rhs = fold_build2_loc (loc, code, type, false_value, arg);
7071 else
7072 rhs = fold_build2_loc (loc, code, type, arg, false_value);
7073 }
7074
7075 /* Check that we have simplified at least one of the branches. */
7076 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
7077 return NULL_TREE;
7078
7079 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
7080 }
7081
7082 \f
7083 /* Subroutine of fold() that checks for the addition of +/- 0.0.
7084
7085 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
7086 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
7087 ADDEND is the same as X.
7088
7089 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
7090 and finite. The problematic cases are when X is zero, and its mode
7091 has signed zeros. In the case of rounding towards -infinity,
7092 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
7093 modes, X + 0 is not the same as X because -0 + 0 is 0. */
7094
7095 bool
7096 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
7097 {
7098 if (!real_zerop (addend))
7099 return false;
7100
7101 /* Don't allow the fold with -fsignaling-nans. */
7102 if (HONOR_SNANS (type))
7103 return false;
7104
7105 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
7106 if (!HONOR_SIGNED_ZEROS (type))
7107 return true;
7108
7109 /* There is no case that is safe for all rounding modes. */
7110 if (HONOR_SIGN_DEPENDENT_ROUNDING (type))
7111 return false;
7112
7113 /* In a vector or complex, we would need to check the sign of all zeros. */
7114 if (TREE_CODE (addend) == VECTOR_CST)
7115 addend = uniform_vector_p (addend);
7116 if (!addend || TREE_CODE (addend) != REAL_CST)
7117 return false;
7118
7119 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
7120 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
7121 negate = !negate;
7122
7123 /* The mode has signed zeros, and we have to honor their sign.
7124 In this situation, there is only one case we can return true for.
7125 X - 0 is the same as X with default rounding. */
7126 return negate;
7127 }
7128
7129 /* Subroutine of match.pd that optimizes comparisons of a division by
7130 a nonzero integer constant against an integer constant, i.e.
7131 X/C1 op C2.
7132
7133 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
7134 GE_EXPR or LE_EXPR. ARG01 and ARG1 must be a INTEGER_CST. */
7135
7136 enum tree_code
7137 fold_div_compare (enum tree_code code, tree c1, tree c2, tree *lo,
7138 tree *hi, bool *neg_overflow)
7139 {
7140 tree prod, tmp, type = TREE_TYPE (c1);
7141 signop sign = TYPE_SIGN (type);
7142 wi::overflow_type overflow;
7143
7144 /* We have to do this the hard way to detect unsigned overflow.
7145 prod = int_const_binop (MULT_EXPR, c1, c2); */
7146 wide_int val = wi::mul (wi::to_wide (c1), wi::to_wide (c2), sign, &overflow);
7147 prod = force_fit_type (type, val, -1, overflow);
7148 *neg_overflow = false;
7149
7150 if (sign == UNSIGNED)
7151 {
7152 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
7153 *lo = prod;
7154
7155 /* Likewise *hi = int_const_binop (PLUS_EXPR, prod, tmp). */
7156 val = wi::add (wi::to_wide (prod), wi::to_wide (tmp), sign, &overflow);
7157 *hi = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (prod));
7158 }
7159 else if (tree_int_cst_sgn (c1) >= 0)
7160 {
7161 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
7162 switch (tree_int_cst_sgn (c2))
7163 {
7164 case -1:
7165 *neg_overflow = true;
7166 *lo = int_const_binop (MINUS_EXPR, prod, tmp);
7167 *hi = prod;
7168 break;
7169
7170 case 0:
7171 *lo = fold_negate_const (tmp, type);
7172 *hi = tmp;
7173 break;
7174
7175 case 1:
7176 *hi = int_const_binop (PLUS_EXPR, prod, tmp);
7177 *lo = prod;
7178 break;
7179
7180 default:
7181 gcc_unreachable ();
7182 }
7183 }
7184 else
7185 {
7186 /* A negative divisor reverses the relational operators. */
7187 code = swap_tree_comparison (code);
7188
7189 tmp = int_const_binop (PLUS_EXPR, c1, build_int_cst (type, 1));
7190 switch (tree_int_cst_sgn (c2))
7191 {
7192 case -1:
7193 *hi = int_const_binop (MINUS_EXPR, prod, tmp);
7194 *lo = prod;
7195 break;
7196
7197 case 0:
7198 *hi = fold_negate_const (tmp, type);
7199 *lo = tmp;
7200 break;
7201
7202 case 1:
7203 *neg_overflow = true;
7204 *lo = int_const_binop (PLUS_EXPR, prod, tmp);
7205 *hi = prod;
7206 break;
7207
7208 default:
7209 gcc_unreachable ();
7210 }
7211 }
7212
7213 if (code != EQ_EXPR && code != NE_EXPR)
7214 return code;
7215
7216 if (TREE_OVERFLOW (*lo)
7217 || operand_equal_p (*lo, TYPE_MIN_VALUE (type), 0))
7218 *lo = NULL_TREE;
7219 if (TREE_OVERFLOW (*hi)
7220 || operand_equal_p (*hi, TYPE_MAX_VALUE (type), 0))
7221 *hi = NULL_TREE;
7222
7223 return code;
7224 }
7225
7226
7227 /* If CODE with arguments ARG0 and ARG1 represents a single bit
7228 equality/inequality test, then return a simplified form of the test
7229 using a sign testing. Otherwise return NULL. TYPE is the desired
7230 result type. */
7231
7232 static tree
7233 fold_single_bit_test_into_sign_test (location_t loc,
7234 enum tree_code code, tree arg0, tree arg1,
7235 tree result_type)
7236 {
7237 /* If this is testing a single bit, we can optimize the test. */
7238 if ((code == NE_EXPR || code == EQ_EXPR)
7239 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7240 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7241 {
7242 /* If we have (A & C) != 0 where C is the sign bit of A, convert
7243 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
7244 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
7245
7246 if (arg00 != NULL_TREE
7247 /* This is only a win if casting to a signed type is cheap,
7248 i.e. when arg00's type is not a partial mode. */
7249 && type_has_mode_precision_p (TREE_TYPE (arg00)))
7250 {
7251 tree stype = signed_type_for (TREE_TYPE (arg00));
7252 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
7253 result_type,
7254 fold_convert_loc (loc, stype, arg00),
7255 build_int_cst (stype, 0));
7256 }
7257 }
7258
7259 return NULL_TREE;
7260 }
7261
7262 /* If CODE with arguments ARG0 and ARG1 represents a single bit
7263 equality/inequality test, then return a simplified form of
7264 the test using shifts and logical operations. Otherwise return
7265 NULL. TYPE is the desired result type. */
7266
7267 tree
7268 fold_single_bit_test (location_t loc, enum tree_code code,
7269 tree arg0, tree arg1, tree result_type)
7270 {
7271 /* If this is testing a single bit, we can optimize the test. */
7272 if ((code == NE_EXPR || code == EQ_EXPR)
7273 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7274 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7275 {
7276 tree inner = TREE_OPERAND (arg0, 0);
7277 tree type = TREE_TYPE (arg0);
7278 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
7279 scalar_int_mode operand_mode = SCALAR_INT_TYPE_MODE (type);
7280 int ops_unsigned;
7281 tree signed_type, unsigned_type, intermediate_type;
7282 tree tem, one;
7283
7284 /* First, see if we can fold the single bit test into a sign-bit
7285 test. */
7286 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
7287 result_type);
7288 if (tem)
7289 return tem;
7290
7291 /* Otherwise we have (A & C) != 0 where C is a single bit,
7292 convert that into ((A >> C2) & 1). Where C2 = log2(C).
7293 Similarly for (A & C) == 0. */
7294
7295 /* If INNER is a right shift of a constant and it plus BITNUM does
7296 not overflow, adjust BITNUM and INNER. */
7297 if (TREE_CODE (inner) == RSHIFT_EXPR
7298 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
7299 && bitnum < TYPE_PRECISION (type)
7300 && wi::ltu_p (wi::to_wide (TREE_OPERAND (inner, 1)),
7301 TYPE_PRECISION (type) - bitnum))
7302 {
7303 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
7304 inner = TREE_OPERAND (inner, 0);
7305 }
7306
7307 /* If we are going to be able to omit the AND below, we must do our
7308 operations as unsigned. If we must use the AND, we have a choice.
7309 Normally unsigned is faster, but for some machines signed is. */
7310 ops_unsigned = (load_extend_op (operand_mode) == SIGN_EXTEND
7311 && !flag_syntax_only) ? 0 : 1;
7312
7313 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
7314 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
7315 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
7316 inner = fold_convert_loc (loc, intermediate_type, inner);
7317
7318 if (bitnum != 0)
7319 inner = build2 (RSHIFT_EXPR, intermediate_type,
7320 inner, size_int (bitnum));
7321
7322 one = build_int_cst (intermediate_type, 1);
7323
7324 if (code == EQ_EXPR)
7325 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
7326
7327 /* Put the AND last so it can combine with more things. */
7328 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
7329
7330 /* Make sure to return the proper type. */
7331 inner = fold_convert_loc (loc, result_type, inner);
7332
7333 return inner;
7334 }
7335 return NULL_TREE;
7336 }
7337
7338 /* Test whether it is preferable two swap two operands, ARG0 and
7339 ARG1, for example because ARG0 is an integer constant and ARG1
7340 isn't. */
7341
7342 bool
7343 tree_swap_operands_p (const_tree arg0, const_tree arg1)
7344 {
7345 if (CONSTANT_CLASS_P (arg1))
7346 return 0;
7347 if (CONSTANT_CLASS_P (arg0))
7348 return 1;
7349
7350 STRIP_NOPS (arg0);
7351 STRIP_NOPS (arg1);
7352
7353 if (TREE_CONSTANT (arg1))
7354 return 0;
7355 if (TREE_CONSTANT (arg0))
7356 return 1;
7357
7358 /* It is preferable to swap two SSA_NAME to ensure a canonical form
7359 for commutative and comparison operators. Ensuring a canonical
7360 form allows the optimizers to find additional redundancies without
7361 having to explicitly check for both orderings. */
7362 if (TREE_CODE (arg0) == SSA_NAME
7363 && TREE_CODE (arg1) == SSA_NAME
7364 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
7365 return 1;
7366
7367 /* Put SSA_NAMEs last. */
7368 if (TREE_CODE (arg1) == SSA_NAME)
7369 return 0;
7370 if (TREE_CODE (arg0) == SSA_NAME)
7371 return 1;
7372
7373 /* Put variables last. */
7374 if (DECL_P (arg1))
7375 return 0;
7376 if (DECL_P (arg0))
7377 return 1;
7378
7379 return 0;
7380 }
7381
7382
7383 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7384 means A >= Y && A != MAX, but in this case we know that
7385 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7386
7387 static tree
7388 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7389 {
7390 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7391
7392 if (TREE_CODE (bound) == LT_EXPR)
7393 a = TREE_OPERAND (bound, 0);
7394 else if (TREE_CODE (bound) == GT_EXPR)
7395 a = TREE_OPERAND (bound, 1);
7396 else
7397 return NULL_TREE;
7398
7399 typea = TREE_TYPE (a);
7400 if (!INTEGRAL_TYPE_P (typea)
7401 && !POINTER_TYPE_P (typea))
7402 return NULL_TREE;
7403
7404 if (TREE_CODE (ineq) == LT_EXPR)
7405 {
7406 a1 = TREE_OPERAND (ineq, 1);
7407 y = TREE_OPERAND (ineq, 0);
7408 }
7409 else if (TREE_CODE (ineq) == GT_EXPR)
7410 {
7411 a1 = TREE_OPERAND (ineq, 0);
7412 y = TREE_OPERAND (ineq, 1);
7413 }
7414 else
7415 return NULL_TREE;
7416
7417 if (TREE_TYPE (a1) != typea)
7418 return NULL_TREE;
7419
7420 if (POINTER_TYPE_P (typea))
7421 {
7422 /* Convert the pointer types into integer before taking the difference. */
7423 tree ta = fold_convert_loc (loc, ssizetype, a);
7424 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7425 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7426 }
7427 else
7428 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7429
7430 if (!diff || !integer_onep (diff))
7431 return NULL_TREE;
7432
7433 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7434 }
7435
7436 /* Fold a sum or difference of at least one multiplication.
7437 Returns the folded tree or NULL if no simplification could be made. */
7438
7439 static tree
7440 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7441 tree arg0, tree arg1)
7442 {
7443 tree arg00, arg01, arg10, arg11;
7444 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7445
7446 /* (A * C) +- (B * C) -> (A+-B) * C.
7447 (A * C) +- A -> A * (C+-1).
7448 We are most concerned about the case where C is a constant,
7449 but other combinations show up during loop reduction. Since
7450 it is not difficult, try all four possibilities. */
7451
7452 if (TREE_CODE (arg0) == MULT_EXPR)
7453 {
7454 arg00 = TREE_OPERAND (arg0, 0);
7455 arg01 = TREE_OPERAND (arg0, 1);
7456 }
7457 else if (TREE_CODE (arg0) == INTEGER_CST)
7458 {
7459 arg00 = build_one_cst (type);
7460 arg01 = arg0;
7461 }
7462 else
7463 {
7464 /* We cannot generate constant 1 for fract. */
7465 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7466 return NULL_TREE;
7467 arg00 = arg0;
7468 arg01 = build_one_cst (type);
7469 }
7470 if (TREE_CODE (arg1) == MULT_EXPR)
7471 {
7472 arg10 = TREE_OPERAND (arg1, 0);
7473 arg11 = TREE_OPERAND (arg1, 1);
7474 }
7475 else if (TREE_CODE (arg1) == INTEGER_CST)
7476 {
7477 arg10 = build_one_cst (type);
7478 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7479 the purpose of this canonicalization. */
7480 if (wi::neg_p (wi::to_wide (arg1), TYPE_SIGN (TREE_TYPE (arg1)))
7481 && negate_expr_p (arg1)
7482 && code == PLUS_EXPR)
7483 {
7484 arg11 = negate_expr (arg1);
7485 code = MINUS_EXPR;
7486 }
7487 else
7488 arg11 = arg1;
7489 }
7490 else
7491 {
7492 /* We cannot generate constant 1 for fract. */
7493 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7494 return NULL_TREE;
7495 arg10 = arg1;
7496 arg11 = build_one_cst (type);
7497 }
7498 same = NULL_TREE;
7499
7500 /* Prefer factoring a common non-constant. */
7501 if (operand_equal_p (arg00, arg10, 0))
7502 same = arg00, alt0 = arg01, alt1 = arg11;
7503 else if (operand_equal_p (arg01, arg11, 0))
7504 same = arg01, alt0 = arg00, alt1 = arg10;
7505 else if (operand_equal_p (arg00, arg11, 0))
7506 same = arg00, alt0 = arg01, alt1 = arg10;
7507 else if (operand_equal_p (arg01, arg10, 0))
7508 same = arg01, alt0 = arg00, alt1 = arg11;
7509
7510 /* No identical multiplicands; see if we can find a common
7511 power-of-two factor in non-power-of-two multiplies. This
7512 can help in multi-dimensional array access. */
7513 else if (tree_fits_shwi_p (arg01) && tree_fits_shwi_p (arg11))
7514 {
7515 HOST_WIDE_INT int01 = tree_to_shwi (arg01);
7516 HOST_WIDE_INT int11 = tree_to_shwi (arg11);
7517 HOST_WIDE_INT tmp;
7518 bool swap = false;
7519 tree maybe_same;
7520
7521 /* Move min of absolute values to int11. */
7522 if (absu_hwi (int01) < absu_hwi (int11))
7523 {
7524 tmp = int01, int01 = int11, int11 = tmp;
7525 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7526 maybe_same = arg01;
7527 swap = true;
7528 }
7529 else
7530 maybe_same = arg11;
7531
7532 const unsigned HOST_WIDE_INT factor = absu_hwi (int11);
7533 if (factor > 1
7534 && pow2p_hwi (factor)
7535 && (int01 & (factor - 1)) == 0
7536 /* The remainder should not be a constant, otherwise we
7537 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7538 increased the number of multiplications necessary. */
7539 && TREE_CODE (arg10) != INTEGER_CST)
7540 {
7541 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7542 build_int_cst (TREE_TYPE (arg00),
7543 int01 / int11));
7544 alt1 = arg10;
7545 same = maybe_same;
7546 if (swap)
7547 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7548 }
7549 }
7550
7551 if (!same)
7552 return NULL_TREE;
7553
7554 if (! ANY_INTEGRAL_TYPE_P (type)
7555 || TYPE_OVERFLOW_WRAPS (type)
7556 /* We are neither factoring zero nor minus one. */
7557 || TREE_CODE (same) == INTEGER_CST)
7558 return fold_build2_loc (loc, MULT_EXPR, type,
7559 fold_build2_loc (loc, code, type,
7560 fold_convert_loc (loc, type, alt0),
7561 fold_convert_loc (loc, type, alt1)),
7562 fold_convert_loc (loc, type, same));
7563
7564 /* Same may be zero and thus the operation 'code' may overflow. Likewise
7565 same may be minus one and thus the multiplication may overflow. Perform
7566 the sum operation in an unsigned type. */
7567 tree utype = unsigned_type_for (type);
7568 tree tem = fold_build2_loc (loc, code, utype,
7569 fold_convert_loc (loc, utype, alt0),
7570 fold_convert_loc (loc, utype, alt1));
7571 /* If the sum evaluated to a constant that is not -INF the multiplication
7572 cannot overflow. */
7573 if (TREE_CODE (tem) == INTEGER_CST
7574 && (wi::to_wide (tem)
7575 != wi::min_value (TYPE_PRECISION (utype), SIGNED)))
7576 return fold_build2_loc (loc, MULT_EXPR, type,
7577 fold_convert (type, tem), same);
7578
7579 /* Do not resort to unsigned multiplication because
7580 we lose the no-overflow property of the expression. */
7581 return NULL_TREE;
7582 }
7583
7584 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7585 specified by EXPR into the buffer PTR of length LEN bytes.
7586 Return the number of bytes placed in the buffer, or zero
7587 upon failure. */
7588
7589 static int
7590 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7591 {
7592 tree type = TREE_TYPE (expr);
7593 int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
7594 int byte, offset, word, words;
7595 unsigned char value;
7596
7597 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7598 return 0;
7599 if (off == -1)
7600 off = 0;
7601
7602 if (ptr == NULL)
7603 /* Dry run. */
7604 return MIN (len, total_bytes - off);
7605
7606 words = total_bytes / UNITS_PER_WORD;
7607
7608 for (byte = 0; byte < total_bytes; byte++)
7609 {
7610 int bitpos = byte * BITS_PER_UNIT;
7611 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7612 number of bytes. */
7613 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7614
7615 if (total_bytes > UNITS_PER_WORD)
7616 {
7617 word = byte / UNITS_PER_WORD;
7618 if (WORDS_BIG_ENDIAN)
7619 word = (words - 1) - word;
7620 offset = word * UNITS_PER_WORD;
7621 if (BYTES_BIG_ENDIAN)
7622 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7623 else
7624 offset += byte % UNITS_PER_WORD;
7625 }
7626 else
7627 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7628 if (offset >= off && offset - off < len)
7629 ptr[offset - off] = value;
7630 }
7631 return MIN (len, total_bytes - off);
7632 }
7633
7634
7635 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7636 specified by EXPR into the buffer PTR of length LEN bytes.
7637 Return the number of bytes placed in the buffer, or zero
7638 upon failure. */
7639
7640 static int
7641 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7642 {
7643 tree type = TREE_TYPE (expr);
7644 scalar_mode mode = SCALAR_TYPE_MODE (type);
7645 int total_bytes = GET_MODE_SIZE (mode);
7646 FIXED_VALUE_TYPE value;
7647 tree i_value, i_type;
7648
7649 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7650 return 0;
7651
7652 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7653
7654 if (NULL_TREE == i_type || TYPE_PRECISION (i_type) != total_bytes)
7655 return 0;
7656
7657 value = TREE_FIXED_CST (expr);
7658 i_value = double_int_to_tree (i_type, value.data);
7659
7660 return native_encode_int (i_value, ptr, len, off);
7661 }
7662
7663
7664 /* Subroutine of native_encode_expr. Encode the REAL_CST
7665 specified by EXPR into the buffer PTR of length LEN bytes.
7666 Return the number of bytes placed in the buffer, or zero
7667 upon failure. */
7668
7669 static int
7670 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7671 {
7672 tree type = TREE_TYPE (expr);
7673 int total_bytes = GET_MODE_SIZE (SCALAR_FLOAT_TYPE_MODE (type));
7674 int byte, offset, word, words, bitpos;
7675 unsigned char value;
7676
7677 /* There are always 32 bits in each long, no matter the size of
7678 the hosts long. We handle floating point representations with
7679 up to 192 bits. */
7680 long tmp[6];
7681
7682 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7683 return 0;
7684 if (off == -1)
7685 off = 0;
7686
7687 if (ptr == NULL)
7688 /* Dry run. */
7689 return MIN (len, total_bytes - off);
7690
7691 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7692
7693 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7694
7695 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7696 bitpos += BITS_PER_UNIT)
7697 {
7698 byte = (bitpos / BITS_PER_UNIT) & 3;
7699 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7700
7701 if (UNITS_PER_WORD < 4)
7702 {
7703 word = byte / UNITS_PER_WORD;
7704 if (WORDS_BIG_ENDIAN)
7705 word = (words - 1) - word;
7706 offset = word * UNITS_PER_WORD;
7707 if (BYTES_BIG_ENDIAN)
7708 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7709 else
7710 offset += byte % UNITS_PER_WORD;
7711 }
7712 else
7713 {
7714 offset = byte;
7715 if (BYTES_BIG_ENDIAN)
7716 {
7717 /* Reverse bytes within each long, or within the entire float
7718 if it's smaller than a long (for HFmode). */
7719 offset = MIN (3, total_bytes - 1) - offset;
7720 gcc_assert (offset >= 0);
7721 }
7722 }
7723 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7724 if (offset >= off
7725 && offset - off < len)
7726 ptr[offset - off] = value;
7727 }
7728 return MIN (len, total_bytes - off);
7729 }
7730
7731 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7732 specified by EXPR into the buffer PTR of length LEN bytes.
7733 Return the number of bytes placed in the buffer, or zero
7734 upon failure. */
7735
7736 static int
7737 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7738 {
7739 int rsize, isize;
7740 tree part;
7741
7742 part = TREE_REALPART (expr);
7743 rsize = native_encode_expr (part, ptr, len, off);
7744 if (off == -1 && rsize == 0)
7745 return 0;
7746 part = TREE_IMAGPART (expr);
7747 if (off != -1)
7748 off = MAX (0, off - GET_MODE_SIZE (SCALAR_TYPE_MODE (TREE_TYPE (part))));
7749 isize = native_encode_expr (part, ptr ? ptr + rsize : NULL,
7750 len - rsize, off);
7751 if (off == -1 && isize != rsize)
7752 return 0;
7753 return rsize + isize;
7754 }
7755
7756 /* Like native_encode_vector, but only encode the first COUNT elements.
7757 The other arguments are as for native_encode_vector. */
7758
7759 static int
7760 native_encode_vector_part (const_tree expr, unsigned char *ptr, int len,
7761 int off, unsigned HOST_WIDE_INT count)
7762 {
7763 tree itype = TREE_TYPE (TREE_TYPE (expr));
7764 if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (expr))
7765 && TYPE_PRECISION (itype) <= BITS_PER_UNIT)
7766 {
7767 /* This is the only case in which elements can be smaller than a byte.
7768 Element 0 is always in the lsb of the containing byte. */
7769 unsigned int elt_bits = TYPE_PRECISION (itype);
7770 int total_bytes = CEIL (elt_bits * count, BITS_PER_UNIT);
7771 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7772 return 0;
7773
7774 if (off == -1)
7775 off = 0;
7776
7777 /* Zero the buffer and then set bits later where necessary. */
7778 int extract_bytes = MIN (len, total_bytes - off);
7779 if (ptr)
7780 memset (ptr, 0, extract_bytes);
7781
7782 unsigned int elts_per_byte = BITS_PER_UNIT / elt_bits;
7783 unsigned int first_elt = off * elts_per_byte;
7784 unsigned int extract_elts = extract_bytes * elts_per_byte;
7785 for (unsigned int i = 0; i < extract_elts; ++i)
7786 {
7787 tree elt = VECTOR_CST_ELT (expr, first_elt + i);
7788 if (TREE_CODE (elt) != INTEGER_CST)
7789 return 0;
7790
7791 if (ptr && wi::extract_uhwi (wi::to_wide (elt), 0, 1))
7792 {
7793 unsigned int bit = i * elt_bits;
7794 ptr[bit / BITS_PER_UNIT] |= 1 << (bit % BITS_PER_UNIT);
7795 }
7796 }
7797 return extract_bytes;
7798 }
7799
7800 int offset = 0;
7801 int size = GET_MODE_SIZE (SCALAR_TYPE_MODE (itype));
7802 for (unsigned HOST_WIDE_INT i = 0; i < count; i++)
7803 {
7804 if (off >= size)
7805 {
7806 off -= size;
7807 continue;
7808 }
7809 tree elem = VECTOR_CST_ELT (expr, i);
7810 int res = native_encode_expr (elem, ptr ? ptr + offset : NULL,
7811 len - offset, off);
7812 if ((off == -1 && res != size) || res == 0)
7813 return 0;
7814 offset += res;
7815 if (offset >= len)
7816 return (off == -1 && i < count - 1) ? 0 : offset;
7817 if (off != -1)
7818 off = 0;
7819 }
7820 return offset;
7821 }
7822
7823 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7824 specified by EXPR into the buffer PTR of length LEN bytes.
7825 Return the number of bytes placed in the buffer, or zero
7826 upon failure. */
7827
7828 static int
7829 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7830 {
7831 unsigned HOST_WIDE_INT count;
7832 if (!VECTOR_CST_NELTS (expr).is_constant (&count))
7833 return 0;
7834 return native_encode_vector_part (expr, ptr, len, off, count);
7835 }
7836
7837
7838 /* Subroutine of native_encode_expr. Encode the STRING_CST
7839 specified by EXPR into the buffer PTR of length LEN bytes.
7840 Return the number of bytes placed in the buffer, or zero
7841 upon failure. */
7842
7843 static int
7844 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7845 {
7846 tree type = TREE_TYPE (expr);
7847
7848 /* Wide-char strings are encoded in target byte-order so native
7849 encoding them is trivial. */
7850 if (BITS_PER_UNIT != CHAR_BIT
7851 || TREE_CODE (type) != ARRAY_TYPE
7852 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7853 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7854 return 0;
7855
7856 HOST_WIDE_INT total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
7857 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7858 return 0;
7859 if (off == -1)
7860 off = 0;
7861 len = MIN (total_bytes - off, len);
7862 if (ptr == NULL)
7863 /* Dry run. */;
7864 else
7865 {
7866 int written = 0;
7867 if (off < TREE_STRING_LENGTH (expr))
7868 {
7869 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7870 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7871 }
7872 memset (ptr + written, 0, len - written);
7873 }
7874 return len;
7875 }
7876
7877
7878 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7879 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7880 buffer PTR of length LEN bytes. If PTR is NULL, don't actually store
7881 anything, just do a dry run. If OFF is not -1 then start
7882 the encoding at byte offset OFF and encode at most LEN bytes.
7883 Return the number of bytes placed in the buffer, or zero upon failure. */
7884
7885 int
7886 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7887 {
7888 /* We don't support starting at negative offset and -1 is special. */
7889 if (off < -1)
7890 return 0;
7891
7892 switch (TREE_CODE (expr))
7893 {
7894 case INTEGER_CST:
7895 return native_encode_int (expr, ptr, len, off);
7896
7897 case REAL_CST:
7898 return native_encode_real (expr, ptr, len, off);
7899
7900 case FIXED_CST:
7901 return native_encode_fixed (expr, ptr, len, off);
7902
7903 case COMPLEX_CST:
7904 return native_encode_complex (expr, ptr, len, off);
7905
7906 case VECTOR_CST:
7907 return native_encode_vector (expr, ptr, len, off);
7908
7909 case STRING_CST:
7910 return native_encode_string (expr, ptr, len, off);
7911
7912 default:
7913 return 0;
7914 }
7915 }
7916
7917 /* Similar to native_encode_expr, but also handle CONSTRUCTORs, VCEs,
7918 NON_LVALUE_EXPRs and nops. */
7919
7920 int
7921 native_encode_initializer (tree init, unsigned char *ptr, int len,
7922 int off)
7923 {
7924 /* We don't support starting at negative offset and -1 is special. */
7925 if (off < -1 || init == NULL_TREE)
7926 return 0;
7927
7928 STRIP_NOPS (init);
7929 switch (TREE_CODE (init))
7930 {
7931 case VIEW_CONVERT_EXPR:
7932 case NON_LVALUE_EXPR:
7933 return native_encode_initializer (TREE_OPERAND (init, 0), ptr, len, off);
7934 default:
7935 return native_encode_expr (init, ptr, len, off);
7936 case CONSTRUCTOR:
7937 tree type = TREE_TYPE (init);
7938 HOST_WIDE_INT total_bytes = int_size_in_bytes (type);
7939 if (total_bytes < 0)
7940 return 0;
7941 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7942 return 0;
7943 int o = off == -1 ? 0 : off;
7944 if (TREE_CODE (type) == ARRAY_TYPE)
7945 {
7946 HOST_WIDE_INT min_index;
7947 unsigned HOST_WIDE_INT cnt;
7948 HOST_WIDE_INT curpos = 0, fieldsize;
7949 constructor_elt *ce;
7950
7951 if (TYPE_DOMAIN (type) == NULL_TREE
7952 || !tree_fits_shwi_p (TYPE_MIN_VALUE (TYPE_DOMAIN (type))))
7953 return 0;
7954
7955 fieldsize = int_size_in_bytes (TREE_TYPE (type));
7956 if (fieldsize <= 0)
7957 return 0;
7958
7959 min_index = tree_to_shwi (TYPE_MIN_VALUE (TYPE_DOMAIN (type)));
7960 if (ptr != NULL)
7961 memset (ptr, '\0', MIN (total_bytes - off, len));
7962
7963 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
7964 {
7965 tree val = ce->value;
7966 tree index = ce->index;
7967 HOST_WIDE_INT pos = curpos, count = 0;
7968 bool full = false;
7969 if (index && TREE_CODE (index) == RANGE_EXPR)
7970 {
7971 if (!tree_fits_shwi_p (TREE_OPERAND (index, 0))
7972 || !tree_fits_shwi_p (TREE_OPERAND (index, 1)))
7973 return 0;
7974 pos = (tree_to_shwi (TREE_OPERAND (index, 0)) - min_index)
7975 * fieldsize;
7976 count = (tree_to_shwi (TREE_OPERAND (index, 1))
7977 - tree_to_shwi (TREE_OPERAND (index, 0)));
7978 }
7979 else if (index)
7980 {
7981 if (!tree_fits_shwi_p (index))
7982 return 0;
7983 pos = (tree_to_shwi (index) - min_index) * fieldsize;
7984 }
7985
7986 curpos = pos;
7987 if (val)
7988 do
7989 {
7990 if (off == -1
7991 || (curpos >= off
7992 && (curpos + fieldsize
7993 <= (HOST_WIDE_INT) off + len)))
7994 {
7995 if (full)
7996 {
7997 if (ptr)
7998 memcpy (ptr + (curpos - o), ptr + (pos - o),
7999 fieldsize);
8000 }
8001 else if (!native_encode_initializer (val,
8002 ptr
8003 ? ptr + curpos - o
8004 : NULL,
8005 fieldsize,
8006 off == -1 ? -1
8007 : 0))
8008 return 0;
8009 else
8010 {
8011 full = true;
8012 pos = curpos;
8013 }
8014 }
8015 else if (curpos + fieldsize > off
8016 && curpos < (HOST_WIDE_INT) off + len)
8017 {
8018 /* Partial overlap. */
8019 unsigned char *p = NULL;
8020 int no = 0;
8021 int l;
8022 if (curpos >= off)
8023 {
8024 if (ptr)
8025 p = ptr + curpos - off;
8026 l = MIN ((HOST_WIDE_INT) off + len - curpos,
8027 fieldsize);
8028 }
8029 else
8030 {
8031 p = ptr;
8032 no = off - curpos;
8033 l = len;
8034 }
8035 if (!native_encode_initializer (val, p, l, no))
8036 return 0;
8037 }
8038 curpos += fieldsize;
8039 }
8040 while (count-- != 0);
8041 }
8042 return MIN (total_bytes - off, len);
8043 }
8044 else if (TREE_CODE (type) == RECORD_TYPE
8045 || TREE_CODE (type) == UNION_TYPE)
8046 {
8047 unsigned HOST_WIDE_INT cnt;
8048 constructor_elt *ce;
8049
8050 if (ptr != NULL)
8051 memset (ptr, '\0', MIN (total_bytes - off, len));
8052 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
8053 {
8054 tree field = ce->index;
8055 tree val = ce->value;
8056 HOST_WIDE_INT pos, fieldsize;
8057 unsigned HOST_WIDE_INT bpos = 0, epos = 0;
8058
8059 if (field == NULL_TREE)
8060 return 0;
8061
8062 pos = int_byte_position (field);
8063 if (off != -1 && (HOST_WIDE_INT) off + len <= pos)
8064 continue;
8065
8066 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
8067 && TYPE_DOMAIN (TREE_TYPE (field))
8068 && ! TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (field))))
8069 return 0;
8070 if (DECL_SIZE_UNIT (field) == NULL_TREE
8071 || !tree_fits_shwi_p (DECL_SIZE_UNIT (field)))
8072 return 0;
8073 fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
8074 if (fieldsize == 0)
8075 continue;
8076
8077 if (DECL_BIT_FIELD (field))
8078 {
8079 if (!tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (field)))
8080 return 0;
8081 fieldsize = TYPE_PRECISION (TREE_TYPE (field));
8082 bpos = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
8083 if (bpos % BITS_PER_UNIT)
8084 bpos %= BITS_PER_UNIT;
8085 else
8086 bpos = 0;
8087 fieldsize += bpos;
8088 epos = fieldsize % BITS_PER_UNIT;
8089 fieldsize += BITS_PER_UNIT - 1;
8090 fieldsize /= BITS_PER_UNIT;
8091 }
8092
8093 if (off != -1 && pos + fieldsize <= off)
8094 continue;
8095
8096 if (val == NULL_TREE)
8097 continue;
8098
8099 if (DECL_BIT_FIELD (field))
8100 {
8101 /* FIXME: Handle PDP endian. */
8102 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
8103 return 0;
8104
8105 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
8106 if (repr == NULL_TREE
8107 || TREE_CODE (val) != INTEGER_CST
8108 || !INTEGRAL_TYPE_P (TREE_TYPE (repr)))
8109 return 0;
8110
8111 HOST_WIDE_INT rpos = int_byte_position (repr);
8112 if (rpos > pos)
8113 return 0;
8114 wide_int w = wi::to_wide (val,
8115 TYPE_PRECISION (TREE_TYPE (repr)));
8116 int diff = (TYPE_PRECISION (TREE_TYPE (repr))
8117 - TYPE_PRECISION (TREE_TYPE (field)));
8118 HOST_WIDE_INT bitoff = (pos - rpos) * BITS_PER_UNIT + bpos;
8119 if (!BYTES_BIG_ENDIAN)
8120 w = wi::lshift (w, bitoff);
8121 else
8122 w = wi::lshift (w, diff - bitoff);
8123 val = wide_int_to_tree (TREE_TYPE (repr), w);
8124
8125 unsigned char buf[MAX_BITSIZE_MODE_ANY_INT
8126 / BITS_PER_UNIT + 1];
8127 int l = native_encode_int (val, buf, sizeof buf, 0);
8128 if (l * BITS_PER_UNIT != TYPE_PRECISION (TREE_TYPE (repr)))
8129 return 0;
8130
8131 if (ptr == NULL)
8132 continue;
8133
8134 /* If the bitfield does not start at byte boundary, handle
8135 the partial byte at the start. */
8136 if (bpos
8137 && (off == -1 || (pos >= off && len >= 1)))
8138 {
8139 if (!BYTES_BIG_ENDIAN)
8140 {
8141 int mask = (1 << bpos) - 1;
8142 buf[pos - rpos] &= ~mask;
8143 buf[pos - rpos] |= ptr[pos - o] & mask;
8144 }
8145 else
8146 {
8147 int mask = (1 << (BITS_PER_UNIT - bpos)) - 1;
8148 buf[pos - rpos] &= mask;
8149 buf[pos - rpos] |= ptr[pos - o] & ~mask;
8150 }
8151 }
8152 /* If the bitfield does not end at byte boundary, handle
8153 the partial byte at the end. */
8154 if (epos
8155 && (off == -1
8156 || pos + fieldsize <= (HOST_WIDE_INT) off + len))
8157 {
8158 if (!BYTES_BIG_ENDIAN)
8159 {
8160 int mask = (1 << epos) - 1;
8161 buf[pos - rpos + fieldsize - 1] &= mask;
8162 buf[pos - rpos + fieldsize - 1]
8163 |= ptr[pos + fieldsize - 1 - o] & ~mask;
8164 }
8165 else
8166 {
8167 int mask = (1 << (BITS_PER_UNIT - epos)) - 1;
8168 buf[pos - rpos + fieldsize - 1] &= ~mask;
8169 buf[pos - rpos + fieldsize - 1]
8170 |= ptr[pos + fieldsize - 1 - o] & mask;
8171 }
8172 }
8173 if (off == -1
8174 || (pos >= off
8175 && (pos + fieldsize <= (HOST_WIDE_INT) off + len)))
8176 memcpy (ptr + pos - o, buf + (pos - rpos), fieldsize);
8177 else
8178 {
8179 /* Partial overlap. */
8180 HOST_WIDE_INT fsz = fieldsize;
8181 if (pos < off)
8182 {
8183 fsz -= (off - pos);
8184 pos = off;
8185 }
8186 if (pos + fsz > (HOST_WIDE_INT) off + len)
8187 fsz = (HOST_WIDE_INT) off + len - pos;
8188 memcpy (ptr + pos - off, buf + (pos - rpos), fsz);
8189 }
8190 continue;
8191 }
8192
8193 if (off == -1
8194 || (pos >= off
8195 && (pos + fieldsize <= (HOST_WIDE_INT) off + len)))
8196 {
8197 if (!native_encode_initializer (val, ptr ? ptr + pos - o
8198 : NULL,
8199 fieldsize,
8200 off == -1 ? -1 : 0))
8201 return 0;
8202 }
8203 else
8204 {
8205 /* Partial overlap. */
8206 unsigned char *p = NULL;
8207 int no = 0;
8208 int l;
8209 if (pos >= off)
8210 {
8211 if (ptr)
8212 p = ptr + pos - off;
8213 l = MIN ((HOST_WIDE_INT) off + len - pos,
8214 fieldsize);
8215 }
8216 else
8217 {
8218 p = ptr;
8219 no = off - pos;
8220 l = len;
8221 }
8222 if (!native_encode_initializer (val, p, l, no))
8223 return 0;
8224 }
8225 }
8226 return MIN (total_bytes - off, len);
8227 }
8228 return 0;
8229 }
8230 }
8231
8232
8233 /* Subroutine of native_interpret_expr. Interpret the contents of
8234 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
8235 If the buffer cannot be interpreted, return NULL_TREE. */
8236
8237 static tree
8238 native_interpret_int (tree type, const unsigned char *ptr, int len)
8239 {
8240 int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
8241
8242 if (total_bytes > len
8243 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
8244 return NULL_TREE;
8245
8246 wide_int result = wi::from_buffer (ptr, total_bytes);
8247
8248 return wide_int_to_tree (type, result);
8249 }
8250
8251
8252 /* Subroutine of native_interpret_expr. Interpret the contents of
8253 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
8254 If the buffer cannot be interpreted, return NULL_TREE. */
8255
8256 static tree
8257 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
8258 {
8259 scalar_mode mode = SCALAR_TYPE_MODE (type);
8260 int total_bytes = GET_MODE_SIZE (mode);
8261 double_int result;
8262 FIXED_VALUE_TYPE fixed_value;
8263
8264 if (total_bytes > len
8265 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
8266 return NULL_TREE;
8267
8268 result = double_int::from_buffer (ptr, total_bytes);
8269 fixed_value = fixed_from_double_int (result, mode);
8270
8271 return build_fixed (type, fixed_value);
8272 }
8273
8274
8275 /* Subroutine of native_interpret_expr. Interpret the contents of
8276 the buffer PTR of length LEN as a REAL_CST of type TYPE.
8277 If the buffer cannot be interpreted, return NULL_TREE. */
8278
8279 static tree
8280 native_interpret_real (tree type, const unsigned char *ptr, int len)
8281 {
8282 scalar_float_mode mode = SCALAR_FLOAT_TYPE_MODE (type);
8283 int total_bytes = GET_MODE_SIZE (mode);
8284 unsigned char value;
8285 /* There are always 32 bits in each long, no matter the size of
8286 the hosts long. We handle floating point representations with
8287 up to 192 bits. */
8288 REAL_VALUE_TYPE r;
8289 long tmp[6];
8290
8291 if (total_bytes > len || total_bytes > 24)
8292 return NULL_TREE;
8293 int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
8294
8295 memset (tmp, 0, sizeof (tmp));
8296 for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
8297 bitpos += BITS_PER_UNIT)
8298 {
8299 /* Both OFFSET and BYTE index within a long;
8300 bitpos indexes the whole float. */
8301 int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
8302 if (UNITS_PER_WORD < 4)
8303 {
8304 int word = byte / UNITS_PER_WORD;
8305 if (WORDS_BIG_ENDIAN)
8306 word = (words - 1) - word;
8307 offset = word * UNITS_PER_WORD;
8308 if (BYTES_BIG_ENDIAN)
8309 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
8310 else
8311 offset += byte % UNITS_PER_WORD;
8312 }
8313 else
8314 {
8315 offset = byte;
8316 if (BYTES_BIG_ENDIAN)
8317 {
8318 /* Reverse bytes within each long, or within the entire float
8319 if it's smaller than a long (for HFmode). */
8320 offset = MIN (3, total_bytes - 1) - offset;
8321 gcc_assert (offset >= 0);
8322 }
8323 }
8324 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
8325
8326 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
8327 }
8328
8329 real_from_target (&r, tmp, mode);
8330 return build_real (type, r);
8331 }
8332
8333
8334 /* Subroutine of native_interpret_expr. Interpret the contents of
8335 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
8336 If the buffer cannot be interpreted, return NULL_TREE. */
8337
8338 static tree
8339 native_interpret_complex (tree type, const unsigned char *ptr, int len)
8340 {
8341 tree etype, rpart, ipart;
8342 int size;
8343
8344 etype = TREE_TYPE (type);
8345 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
8346 if (size * 2 > len)
8347 return NULL_TREE;
8348 rpart = native_interpret_expr (etype, ptr, size);
8349 if (!rpart)
8350 return NULL_TREE;
8351 ipart = native_interpret_expr (etype, ptr+size, size);
8352 if (!ipart)
8353 return NULL_TREE;
8354 return build_complex (type, rpart, ipart);
8355 }
8356
8357 /* Read a vector of type TYPE from the target memory image given by BYTES,
8358 which contains LEN bytes. The vector is known to be encodable using
8359 NPATTERNS interleaved patterns with NELTS_PER_PATTERN elements each.
8360
8361 Return the vector on success, otherwise return null. */
8362
8363 static tree
8364 native_interpret_vector_part (tree type, const unsigned char *bytes,
8365 unsigned int len, unsigned int npatterns,
8366 unsigned int nelts_per_pattern)
8367 {
8368 tree elt_type = TREE_TYPE (type);
8369 if (VECTOR_BOOLEAN_TYPE_P (type)
8370 && TYPE_PRECISION (elt_type) <= BITS_PER_UNIT)
8371 {
8372 /* This is the only case in which elements can be smaller than a byte.
8373 Element 0 is always in the lsb of the containing byte. */
8374 unsigned int elt_bits = TYPE_PRECISION (elt_type);
8375 if (elt_bits * npatterns * nelts_per_pattern > len * BITS_PER_UNIT)
8376 return NULL_TREE;
8377
8378 tree_vector_builder builder (type, npatterns, nelts_per_pattern);
8379 for (unsigned int i = 0; i < builder.encoded_nelts (); ++i)
8380 {
8381 unsigned int bit_index = i * elt_bits;
8382 unsigned int byte_index = bit_index / BITS_PER_UNIT;
8383 unsigned int lsb = bit_index % BITS_PER_UNIT;
8384 builder.quick_push (bytes[byte_index] & (1 << lsb)
8385 ? build_all_ones_cst (elt_type)
8386 : build_zero_cst (elt_type));
8387 }
8388 return builder.build ();
8389 }
8390
8391 unsigned int elt_bytes = tree_to_uhwi (TYPE_SIZE_UNIT (elt_type));
8392 if (elt_bytes * npatterns * nelts_per_pattern > len)
8393 return NULL_TREE;
8394
8395 tree_vector_builder builder (type, npatterns, nelts_per_pattern);
8396 for (unsigned int i = 0; i < builder.encoded_nelts (); ++i)
8397 {
8398 tree elt = native_interpret_expr (elt_type, bytes, elt_bytes);
8399 if (!elt)
8400 return NULL_TREE;
8401 builder.quick_push (elt);
8402 bytes += elt_bytes;
8403 }
8404 return builder.build ();
8405 }
8406
8407 /* Subroutine of native_interpret_expr. Interpret the contents of
8408 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
8409 If the buffer cannot be interpreted, return NULL_TREE. */
8410
8411 static tree
8412 native_interpret_vector (tree type, const unsigned char *ptr, unsigned int len)
8413 {
8414 tree etype;
8415 unsigned int size;
8416 unsigned HOST_WIDE_INT count;
8417
8418 etype = TREE_TYPE (type);
8419 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
8420 if (!TYPE_VECTOR_SUBPARTS (type).is_constant (&count)
8421 || size * count > len)
8422 return NULL_TREE;
8423
8424 return native_interpret_vector_part (type, ptr, len, count, 1);
8425 }
8426
8427
8428 /* Subroutine of fold_view_convert_expr. Interpret the contents of
8429 the buffer PTR of length LEN as a constant of type TYPE. For
8430 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
8431 we return a REAL_CST, etc... If the buffer cannot be interpreted,
8432 return NULL_TREE. */
8433
8434 tree
8435 native_interpret_expr (tree type, const unsigned char *ptr, int len)
8436 {
8437 switch (TREE_CODE (type))
8438 {
8439 case INTEGER_TYPE:
8440 case ENUMERAL_TYPE:
8441 case BOOLEAN_TYPE:
8442 case POINTER_TYPE:
8443 case REFERENCE_TYPE:
8444 return native_interpret_int (type, ptr, len);
8445
8446 case REAL_TYPE:
8447 return native_interpret_real (type, ptr, len);
8448
8449 case FIXED_POINT_TYPE:
8450 return native_interpret_fixed (type, ptr, len);
8451
8452 case COMPLEX_TYPE:
8453 return native_interpret_complex (type, ptr, len);
8454
8455 case VECTOR_TYPE:
8456 return native_interpret_vector (type, ptr, len);
8457
8458 default:
8459 return NULL_TREE;
8460 }
8461 }
8462
8463 /* Returns true if we can interpret the contents of a native encoding
8464 as TYPE. */
8465
8466 bool
8467 can_native_interpret_type_p (tree type)
8468 {
8469 switch (TREE_CODE (type))
8470 {
8471 case INTEGER_TYPE:
8472 case ENUMERAL_TYPE:
8473 case BOOLEAN_TYPE:
8474 case POINTER_TYPE:
8475 case REFERENCE_TYPE:
8476 case FIXED_POINT_TYPE:
8477 case REAL_TYPE:
8478 case COMPLEX_TYPE:
8479 case VECTOR_TYPE:
8480 return true;
8481 default:
8482 return false;
8483 }
8484 }
8485
8486 /* Routines for manipulation of native_encode_expr encoded data if the encoded
8487 or extracted constant positions and/or sizes aren't byte aligned. */
8488
8489 /* Shift left the bytes in PTR of SZ elements by AMNT bits, carrying over the
8490 bits between adjacent elements. AMNT should be within
8491 [0, BITS_PER_UNIT).
8492 Example, AMNT = 2:
8493 00011111|11100000 << 2 = 01111111|10000000
8494 PTR[1] | PTR[0] PTR[1] | PTR[0]. */
8495
8496 void
8497 shift_bytes_in_array_left (unsigned char *ptr, unsigned int sz,
8498 unsigned int amnt)
8499 {
8500 if (amnt == 0)
8501 return;
8502
8503 unsigned char carry_over = 0U;
8504 unsigned char carry_mask = (~0U) << (unsigned char) (BITS_PER_UNIT - amnt);
8505 unsigned char clear_mask = (~0U) << amnt;
8506
8507 for (unsigned int i = 0; i < sz; i++)
8508 {
8509 unsigned prev_carry_over = carry_over;
8510 carry_over = (ptr[i] & carry_mask) >> (BITS_PER_UNIT - amnt);
8511
8512 ptr[i] <<= amnt;
8513 if (i != 0)
8514 {
8515 ptr[i] &= clear_mask;
8516 ptr[i] |= prev_carry_over;
8517 }
8518 }
8519 }
8520
8521 /* Like shift_bytes_in_array_left but for big-endian.
8522 Shift right the bytes in PTR of SZ elements by AMNT bits, carrying over the
8523 bits between adjacent elements. AMNT should be within
8524 [0, BITS_PER_UNIT).
8525 Example, AMNT = 2:
8526 00011111|11100000 >> 2 = 00000111|11111000
8527 PTR[0] | PTR[1] PTR[0] | PTR[1]. */
8528
8529 void
8530 shift_bytes_in_array_right (unsigned char *ptr, unsigned int sz,
8531 unsigned int amnt)
8532 {
8533 if (amnt == 0)
8534 return;
8535
8536 unsigned char carry_over = 0U;
8537 unsigned char carry_mask = ~(~0U << amnt);
8538
8539 for (unsigned int i = 0; i < sz; i++)
8540 {
8541 unsigned prev_carry_over = carry_over;
8542 carry_over = ptr[i] & carry_mask;
8543
8544 carry_over <<= (unsigned char) BITS_PER_UNIT - amnt;
8545 ptr[i] >>= amnt;
8546 ptr[i] |= prev_carry_over;
8547 }
8548 }
8549
8550 /* Try to view-convert VECTOR_CST EXPR to VECTOR_TYPE TYPE by operating
8551 directly on the VECTOR_CST encoding, in a way that works for variable-
8552 length vectors. Return the resulting VECTOR_CST on success or null
8553 on failure. */
8554
8555 static tree
8556 fold_view_convert_vector_encoding (tree type, tree expr)
8557 {
8558 tree expr_type = TREE_TYPE (expr);
8559 poly_uint64 type_bits, expr_bits;
8560 if (!poly_int_tree_p (TYPE_SIZE (type), &type_bits)
8561 || !poly_int_tree_p (TYPE_SIZE (expr_type), &expr_bits))
8562 return NULL_TREE;
8563
8564 poly_uint64 type_units = TYPE_VECTOR_SUBPARTS (type);
8565 poly_uint64 expr_units = TYPE_VECTOR_SUBPARTS (expr_type);
8566 unsigned int type_elt_bits = vector_element_size (type_bits, type_units);
8567 unsigned int expr_elt_bits = vector_element_size (expr_bits, expr_units);
8568
8569 /* We can only preserve the semantics of a stepped pattern if the new
8570 vector element is an integer of the same size. */
8571 if (VECTOR_CST_STEPPED_P (expr)
8572 && (!INTEGRAL_TYPE_P (type) || type_elt_bits != expr_elt_bits))
8573 return NULL_TREE;
8574
8575 /* The number of bits needed to encode one element from every pattern
8576 of the original vector. */
8577 unsigned int expr_sequence_bits
8578 = VECTOR_CST_NPATTERNS (expr) * expr_elt_bits;
8579
8580 /* The number of bits needed to encode one element from every pattern
8581 of the result. */
8582 unsigned int type_sequence_bits
8583 = least_common_multiple (expr_sequence_bits, type_elt_bits);
8584
8585 /* Don't try to read more bytes than are available, which can happen
8586 for constant-sized vectors if TYPE has larger elements than EXPR_TYPE.
8587 The general VIEW_CONVERT handling can cope with that case, so there's
8588 no point complicating things here. */
8589 unsigned int nelts_per_pattern = VECTOR_CST_NELTS_PER_PATTERN (expr);
8590 unsigned int buffer_bytes = CEIL (nelts_per_pattern * type_sequence_bits,
8591 BITS_PER_UNIT);
8592 unsigned int buffer_bits = buffer_bytes * BITS_PER_UNIT;
8593 if (known_gt (buffer_bits, expr_bits))
8594 return NULL_TREE;
8595
8596 /* Get enough bytes of EXPR to form the new encoding. */
8597 auto_vec<unsigned char, 128> buffer (buffer_bytes);
8598 buffer.quick_grow (buffer_bytes);
8599 if (native_encode_vector_part (expr, buffer.address (), buffer_bytes, 0,
8600 buffer_bits / expr_elt_bits)
8601 != (int) buffer_bytes)
8602 return NULL_TREE;
8603
8604 /* Reencode the bytes as TYPE. */
8605 unsigned int type_npatterns = type_sequence_bits / type_elt_bits;
8606 return native_interpret_vector_part (type, &buffer[0], buffer.length (),
8607 type_npatterns, nelts_per_pattern);
8608 }
8609
8610 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
8611 TYPE at compile-time. If we're unable to perform the conversion
8612 return NULL_TREE. */
8613
8614 static tree
8615 fold_view_convert_expr (tree type, tree expr)
8616 {
8617 /* We support up to 512-bit values (for V8DFmode). */
8618 unsigned char buffer[64];
8619 int len;
8620
8621 /* Check that the host and target are sane. */
8622 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
8623 return NULL_TREE;
8624
8625 if (VECTOR_TYPE_P (type) && TREE_CODE (expr) == VECTOR_CST)
8626 if (tree res = fold_view_convert_vector_encoding (type, expr))
8627 return res;
8628
8629 len = native_encode_expr (expr, buffer, sizeof (buffer));
8630 if (len == 0)
8631 return NULL_TREE;
8632
8633 return native_interpret_expr (type, buffer, len);
8634 }
8635
8636 /* Build an expression for the address of T. Folds away INDIRECT_REF
8637 to avoid confusing the gimplify process. */
8638
8639 tree
8640 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
8641 {
8642 /* The size of the object is not relevant when talking about its address. */
8643 if (TREE_CODE (t) == WITH_SIZE_EXPR)
8644 t = TREE_OPERAND (t, 0);
8645
8646 if (TREE_CODE (t) == INDIRECT_REF)
8647 {
8648 t = TREE_OPERAND (t, 0);
8649
8650 if (TREE_TYPE (t) != ptrtype)
8651 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
8652 }
8653 else if (TREE_CODE (t) == MEM_REF
8654 && integer_zerop (TREE_OPERAND (t, 1)))
8655 {
8656 t = TREE_OPERAND (t, 0);
8657
8658 if (TREE_TYPE (t) != ptrtype)
8659 t = fold_convert_loc (loc, ptrtype, t);
8660 }
8661 else if (TREE_CODE (t) == MEM_REF
8662 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
8663 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
8664 TREE_OPERAND (t, 0),
8665 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
8666 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
8667 {
8668 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
8669
8670 if (TREE_TYPE (t) != ptrtype)
8671 t = fold_convert_loc (loc, ptrtype, t);
8672 }
8673 else
8674 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
8675
8676 return t;
8677 }
8678
8679 /* Build an expression for the address of T. */
8680
8681 tree
8682 build_fold_addr_expr_loc (location_t loc, tree t)
8683 {
8684 tree ptrtype = build_pointer_type (TREE_TYPE (t));
8685
8686 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
8687 }
8688
8689 /* Fold a unary expression of code CODE and type TYPE with operand
8690 OP0. Return the folded expression if folding is successful.
8691 Otherwise, return NULL_TREE. */
8692
8693 tree
8694 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
8695 {
8696 tree tem;
8697 tree arg0;
8698 enum tree_code_class kind = TREE_CODE_CLASS (code);
8699
8700 gcc_assert (IS_EXPR_CODE_CLASS (kind)
8701 && TREE_CODE_LENGTH (code) == 1);
8702
8703 arg0 = op0;
8704 if (arg0)
8705 {
8706 if (CONVERT_EXPR_CODE_P (code)
8707 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
8708 {
8709 /* Don't use STRIP_NOPS, because signedness of argument type
8710 matters. */
8711 STRIP_SIGN_NOPS (arg0);
8712 }
8713 else
8714 {
8715 /* Strip any conversions that don't change the mode. This
8716 is safe for every expression, except for a comparison
8717 expression because its signedness is derived from its
8718 operands.
8719
8720 Note that this is done as an internal manipulation within
8721 the constant folder, in order to find the simplest
8722 representation of the arguments so that their form can be
8723 studied. In any cases, the appropriate type conversions
8724 should be put back in the tree that will get out of the
8725 constant folder. */
8726 STRIP_NOPS (arg0);
8727 }
8728
8729 if (CONSTANT_CLASS_P (arg0))
8730 {
8731 tree tem = const_unop (code, type, arg0);
8732 if (tem)
8733 {
8734 if (TREE_TYPE (tem) != type)
8735 tem = fold_convert_loc (loc, type, tem);
8736 return tem;
8737 }
8738 }
8739 }
8740
8741 tem = generic_simplify (loc, code, type, op0);
8742 if (tem)
8743 return tem;
8744
8745 if (TREE_CODE_CLASS (code) == tcc_unary)
8746 {
8747 if (TREE_CODE (arg0) == COMPOUND_EXPR)
8748 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
8749 fold_build1_loc (loc, code, type,
8750 fold_convert_loc (loc, TREE_TYPE (op0),
8751 TREE_OPERAND (arg0, 1))));
8752 else if (TREE_CODE (arg0) == COND_EXPR)
8753 {
8754 tree arg01 = TREE_OPERAND (arg0, 1);
8755 tree arg02 = TREE_OPERAND (arg0, 2);
8756 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
8757 arg01 = fold_build1_loc (loc, code, type,
8758 fold_convert_loc (loc,
8759 TREE_TYPE (op0), arg01));
8760 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
8761 arg02 = fold_build1_loc (loc, code, type,
8762 fold_convert_loc (loc,
8763 TREE_TYPE (op0), arg02));
8764 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
8765 arg01, arg02);
8766
8767 /* If this was a conversion, and all we did was to move into
8768 inside the COND_EXPR, bring it back out. But leave it if
8769 it is a conversion from integer to integer and the
8770 result precision is no wider than a word since such a
8771 conversion is cheap and may be optimized away by combine,
8772 while it couldn't if it were outside the COND_EXPR. Then return
8773 so we don't get into an infinite recursion loop taking the
8774 conversion out and then back in. */
8775
8776 if ((CONVERT_EXPR_CODE_P (code)
8777 || code == NON_LVALUE_EXPR)
8778 && TREE_CODE (tem) == COND_EXPR
8779 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
8780 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
8781 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
8782 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
8783 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
8784 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
8785 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8786 && (INTEGRAL_TYPE_P
8787 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
8788 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
8789 || flag_syntax_only))
8790 tem = build1_loc (loc, code, type,
8791 build3 (COND_EXPR,
8792 TREE_TYPE (TREE_OPERAND
8793 (TREE_OPERAND (tem, 1), 0)),
8794 TREE_OPERAND (tem, 0),
8795 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
8796 TREE_OPERAND (TREE_OPERAND (tem, 2),
8797 0)));
8798 return tem;
8799 }
8800 }
8801
8802 switch (code)
8803 {
8804 case NON_LVALUE_EXPR:
8805 if (!maybe_lvalue_p (op0))
8806 return fold_convert_loc (loc, type, op0);
8807 return NULL_TREE;
8808
8809 CASE_CONVERT:
8810 case FLOAT_EXPR:
8811 case FIX_TRUNC_EXPR:
8812 if (COMPARISON_CLASS_P (op0))
8813 {
8814 /* If we have (type) (a CMP b) and type is an integral type, return
8815 new expression involving the new type. Canonicalize
8816 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
8817 non-integral type.
8818 Do not fold the result as that would not simplify further, also
8819 folding again results in recursions. */
8820 if (TREE_CODE (type) == BOOLEAN_TYPE)
8821 return build2_loc (loc, TREE_CODE (op0), type,
8822 TREE_OPERAND (op0, 0),
8823 TREE_OPERAND (op0, 1));
8824 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
8825 && TREE_CODE (type) != VECTOR_TYPE)
8826 return build3_loc (loc, COND_EXPR, type, op0,
8827 constant_boolean_node (true, type),
8828 constant_boolean_node (false, type));
8829 }
8830
8831 /* Handle (T *)&A.B.C for A being of type T and B and C
8832 living at offset zero. This occurs frequently in
8833 C++ upcasting and then accessing the base. */
8834 if (TREE_CODE (op0) == ADDR_EXPR
8835 && POINTER_TYPE_P (type)
8836 && handled_component_p (TREE_OPERAND (op0, 0)))
8837 {
8838 poly_int64 bitsize, bitpos;
8839 tree offset;
8840 machine_mode mode;
8841 int unsignedp, reversep, volatilep;
8842 tree base
8843 = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
8844 &offset, &mode, &unsignedp, &reversep,
8845 &volatilep);
8846 /* If the reference was to a (constant) zero offset, we can use
8847 the address of the base if it has the same base type
8848 as the result type and the pointer type is unqualified. */
8849 if (!offset
8850 && known_eq (bitpos, 0)
8851 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
8852 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
8853 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
8854 return fold_convert_loc (loc, type,
8855 build_fold_addr_expr_loc (loc, base));
8856 }
8857
8858 if (TREE_CODE (op0) == MODIFY_EXPR
8859 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
8860 /* Detect assigning a bitfield. */
8861 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
8862 && DECL_BIT_FIELD
8863 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
8864 {
8865 /* Don't leave an assignment inside a conversion
8866 unless assigning a bitfield. */
8867 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
8868 /* First do the assignment, then return converted constant. */
8869 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
8870 TREE_NO_WARNING (tem) = 1;
8871 TREE_USED (tem) = 1;
8872 return tem;
8873 }
8874
8875 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8876 constants (if x has signed type, the sign bit cannot be set
8877 in c). This folds extension into the BIT_AND_EXPR.
8878 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8879 very likely don't have maximal range for their precision and this
8880 transformation effectively doesn't preserve non-maximal ranges. */
8881 if (TREE_CODE (type) == INTEGER_TYPE
8882 && TREE_CODE (op0) == BIT_AND_EXPR
8883 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
8884 {
8885 tree and_expr = op0;
8886 tree and0 = TREE_OPERAND (and_expr, 0);
8887 tree and1 = TREE_OPERAND (and_expr, 1);
8888 int change = 0;
8889
8890 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
8891 || (TYPE_PRECISION (type)
8892 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
8893 change = 1;
8894 else if (TYPE_PRECISION (TREE_TYPE (and1))
8895 <= HOST_BITS_PER_WIDE_INT
8896 && tree_fits_uhwi_p (and1))
8897 {
8898 unsigned HOST_WIDE_INT cst;
8899
8900 cst = tree_to_uhwi (and1);
8901 cst &= HOST_WIDE_INT_M1U
8902 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8903 change = (cst == 0);
8904 if (change
8905 && !flag_syntax_only
8906 && (load_extend_op (TYPE_MODE (TREE_TYPE (and0)))
8907 == ZERO_EXTEND))
8908 {
8909 tree uns = unsigned_type_for (TREE_TYPE (and0));
8910 and0 = fold_convert_loc (loc, uns, and0);
8911 and1 = fold_convert_loc (loc, uns, and1);
8912 }
8913 }
8914 if (change)
8915 {
8916 tem = force_fit_type (type, wi::to_widest (and1), 0,
8917 TREE_OVERFLOW (and1));
8918 return fold_build2_loc (loc, BIT_AND_EXPR, type,
8919 fold_convert_loc (loc, type, and0), tem);
8920 }
8921 }
8922
8923 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
8924 cast (T1)X will fold away. We assume that this happens when X itself
8925 is a cast. */
8926 if (POINTER_TYPE_P (type)
8927 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8928 && CONVERT_EXPR_P (TREE_OPERAND (arg0, 0)))
8929 {
8930 tree arg00 = TREE_OPERAND (arg0, 0);
8931 tree arg01 = TREE_OPERAND (arg0, 1);
8932
8933 return fold_build_pointer_plus_loc
8934 (loc, fold_convert_loc (loc, type, arg00), arg01);
8935 }
8936
8937 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8938 of the same precision, and X is an integer type not narrower than
8939 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8940 if (INTEGRAL_TYPE_P (type)
8941 && TREE_CODE (op0) == BIT_NOT_EXPR
8942 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8943 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8944 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8945 {
8946 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8947 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8948 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8949 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8950 fold_convert_loc (loc, type, tem));
8951 }
8952
8953 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8954 type of X and Y (integer types only). */
8955 if (INTEGRAL_TYPE_P (type)
8956 && TREE_CODE (op0) == MULT_EXPR
8957 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8958 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8959 {
8960 /* Be careful not to introduce new overflows. */
8961 tree mult_type;
8962 if (TYPE_OVERFLOW_WRAPS (type))
8963 mult_type = type;
8964 else
8965 mult_type = unsigned_type_for (type);
8966
8967 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8968 {
8969 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8970 fold_convert_loc (loc, mult_type,
8971 TREE_OPERAND (op0, 0)),
8972 fold_convert_loc (loc, mult_type,
8973 TREE_OPERAND (op0, 1)));
8974 return fold_convert_loc (loc, type, tem);
8975 }
8976 }
8977
8978 return NULL_TREE;
8979
8980 case VIEW_CONVERT_EXPR:
8981 if (TREE_CODE (op0) == MEM_REF)
8982 {
8983 if (TYPE_ALIGN (TREE_TYPE (op0)) != TYPE_ALIGN (type))
8984 type = build_aligned_type (type, TYPE_ALIGN (TREE_TYPE (op0)));
8985 tem = fold_build2_loc (loc, MEM_REF, type,
8986 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8987 REF_REVERSE_STORAGE_ORDER (tem) = REF_REVERSE_STORAGE_ORDER (op0);
8988 return tem;
8989 }
8990
8991 return NULL_TREE;
8992
8993 case NEGATE_EXPR:
8994 tem = fold_negate_expr (loc, arg0);
8995 if (tem)
8996 return fold_convert_loc (loc, type, tem);
8997 return NULL_TREE;
8998
8999 case ABS_EXPR:
9000 /* Convert fabs((double)float) into (double)fabsf(float). */
9001 if (TREE_CODE (arg0) == NOP_EXPR
9002 && TREE_CODE (type) == REAL_TYPE)
9003 {
9004 tree targ0 = strip_float_extensions (arg0);
9005 if (targ0 != arg0)
9006 return fold_convert_loc (loc, type,
9007 fold_build1_loc (loc, ABS_EXPR,
9008 TREE_TYPE (targ0),
9009 targ0));
9010 }
9011 return NULL_TREE;
9012
9013 case BIT_NOT_EXPR:
9014 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
9015 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9016 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
9017 fold_convert_loc (loc, type,
9018 TREE_OPERAND (arg0, 0)))))
9019 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
9020 fold_convert_loc (loc, type,
9021 TREE_OPERAND (arg0, 1)));
9022 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
9023 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
9024 fold_convert_loc (loc, type,
9025 TREE_OPERAND (arg0, 1)))))
9026 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
9027 fold_convert_loc (loc, type,
9028 TREE_OPERAND (arg0, 0)), tem);
9029
9030 return NULL_TREE;
9031
9032 case TRUTH_NOT_EXPR:
9033 /* Note that the operand of this must be an int
9034 and its values must be 0 or 1.
9035 ("true" is a fixed value perhaps depending on the language,
9036 but we don't handle values other than 1 correctly yet.) */
9037 tem = fold_truth_not_expr (loc, arg0);
9038 if (!tem)
9039 return NULL_TREE;
9040 return fold_convert_loc (loc, type, tem);
9041
9042 case INDIRECT_REF:
9043 /* Fold *&X to X if X is an lvalue. */
9044 if (TREE_CODE (op0) == ADDR_EXPR)
9045 {
9046 tree op00 = TREE_OPERAND (op0, 0);
9047 if ((VAR_P (op00)
9048 || TREE_CODE (op00) == PARM_DECL
9049 || TREE_CODE (op00) == RESULT_DECL)
9050 && !TREE_READONLY (op00))
9051 return op00;
9052 }
9053 return NULL_TREE;
9054
9055 default:
9056 return NULL_TREE;
9057 } /* switch (code) */
9058 }
9059
9060
9061 /* If the operation was a conversion do _not_ mark a resulting constant
9062 with TREE_OVERFLOW if the original constant was not. These conversions
9063 have implementation defined behavior and retaining the TREE_OVERFLOW
9064 flag here would confuse later passes such as VRP. */
9065 tree
9066 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
9067 tree type, tree op0)
9068 {
9069 tree res = fold_unary_loc (loc, code, type, op0);
9070 if (res
9071 && TREE_CODE (res) == INTEGER_CST
9072 && TREE_CODE (op0) == INTEGER_CST
9073 && CONVERT_EXPR_CODE_P (code))
9074 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
9075
9076 return res;
9077 }
9078
9079 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
9080 operands OP0 and OP1. LOC is the location of the resulting expression.
9081 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
9082 Return the folded expression if folding is successful. Otherwise,
9083 return NULL_TREE. */
9084 static tree
9085 fold_truth_andor (location_t loc, enum tree_code code, tree type,
9086 tree arg0, tree arg1, tree op0, tree op1)
9087 {
9088 tree tem;
9089
9090 /* We only do these simplifications if we are optimizing. */
9091 if (!optimize)
9092 return NULL_TREE;
9093
9094 /* Check for things like (A || B) && (A || C). We can convert this
9095 to A || (B && C). Note that either operator can be any of the four
9096 truth and/or operations and the transformation will still be
9097 valid. Also note that we only care about order for the
9098 ANDIF and ORIF operators. If B contains side effects, this
9099 might change the truth-value of A. */
9100 if (TREE_CODE (arg0) == TREE_CODE (arg1)
9101 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
9102 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
9103 || TREE_CODE (arg0) == TRUTH_AND_EXPR
9104 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
9105 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
9106 {
9107 tree a00 = TREE_OPERAND (arg0, 0);
9108 tree a01 = TREE_OPERAND (arg0, 1);
9109 tree a10 = TREE_OPERAND (arg1, 0);
9110 tree a11 = TREE_OPERAND (arg1, 1);
9111 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
9112 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
9113 && (code == TRUTH_AND_EXPR
9114 || code == TRUTH_OR_EXPR));
9115
9116 if (operand_equal_p (a00, a10, 0))
9117 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
9118 fold_build2_loc (loc, code, type, a01, a11));
9119 else if (commutative && operand_equal_p (a00, a11, 0))
9120 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
9121 fold_build2_loc (loc, code, type, a01, a10));
9122 else if (commutative && operand_equal_p (a01, a10, 0))
9123 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
9124 fold_build2_loc (loc, code, type, a00, a11));
9125
9126 /* This case if tricky because we must either have commutative
9127 operators or else A10 must not have side-effects. */
9128
9129 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
9130 && operand_equal_p (a01, a11, 0))
9131 return fold_build2_loc (loc, TREE_CODE (arg0), type,
9132 fold_build2_loc (loc, code, type, a00, a10),
9133 a01);
9134 }
9135
9136 /* See if we can build a range comparison. */
9137 if ((tem = fold_range_test (loc, code, type, op0, op1)) != 0)
9138 return tem;
9139
9140 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
9141 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
9142 {
9143 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
9144 if (tem)
9145 return fold_build2_loc (loc, code, type, tem, arg1);
9146 }
9147
9148 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
9149 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
9150 {
9151 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
9152 if (tem)
9153 return fold_build2_loc (loc, code, type, arg0, tem);
9154 }
9155
9156 /* Check for the possibility of merging component references. If our
9157 lhs is another similar operation, try to merge its rhs with our
9158 rhs. Then try to merge our lhs and rhs. */
9159 if (TREE_CODE (arg0) == code
9160 && (tem = fold_truth_andor_1 (loc, code, type,
9161 TREE_OPERAND (arg0, 1), arg1)) != 0)
9162 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9163
9164 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
9165 return tem;
9166
9167 bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
9168 if (param_logical_op_non_short_circuit != -1)
9169 logical_op_non_short_circuit
9170 = param_logical_op_non_short_circuit;
9171 if (logical_op_non_short_circuit
9172 && !flag_sanitize_coverage
9173 && (code == TRUTH_AND_EXPR
9174 || code == TRUTH_ANDIF_EXPR
9175 || code == TRUTH_OR_EXPR
9176 || code == TRUTH_ORIF_EXPR))
9177 {
9178 enum tree_code ncode, icode;
9179
9180 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
9181 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
9182 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
9183
9184 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
9185 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
9186 We don't want to pack more than two leafs to a non-IF AND/OR
9187 expression.
9188 If tree-code of left-hand operand isn't an AND/OR-IF code and not
9189 equal to IF-CODE, then we don't want to add right-hand operand.
9190 If the inner right-hand side of left-hand operand has
9191 side-effects, or isn't simple, then we can't add to it,
9192 as otherwise we might destroy if-sequence. */
9193 if (TREE_CODE (arg0) == icode
9194 && simple_operand_p_2 (arg1)
9195 /* Needed for sequence points to handle trappings, and
9196 side-effects. */
9197 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
9198 {
9199 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
9200 arg1);
9201 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
9202 tem);
9203 }
9204 /* Same as above but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
9205 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
9206 else if (TREE_CODE (arg1) == icode
9207 && simple_operand_p_2 (arg0)
9208 /* Needed for sequence points to handle trappings, and
9209 side-effects. */
9210 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
9211 {
9212 tem = fold_build2_loc (loc, ncode, type,
9213 arg0, TREE_OPERAND (arg1, 0));
9214 return fold_build2_loc (loc, icode, type, tem,
9215 TREE_OPERAND (arg1, 1));
9216 }
9217 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
9218 into (A OR B).
9219 For sequence point consistancy, we need to check for trapping,
9220 and side-effects. */
9221 else if (code == icode && simple_operand_p_2 (arg0)
9222 && simple_operand_p_2 (arg1))
9223 return fold_build2_loc (loc, ncode, type, arg0, arg1);
9224 }
9225
9226 return NULL_TREE;
9227 }
9228
9229 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
9230 by changing CODE to reduce the magnitude of constants involved in
9231 ARG0 of the comparison.
9232 Returns a canonicalized comparison tree if a simplification was
9233 possible, otherwise returns NULL_TREE.
9234 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
9235 valid if signed overflow is undefined. */
9236
9237 static tree
9238 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
9239 tree arg0, tree arg1,
9240 bool *strict_overflow_p)
9241 {
9242 enum tree_code code0 = TREE_CODE (arg0);
9243 tree t, cst0 = NULL_TREE;
9244 int sgn0;
9245
9246 /* Match A +- CST code arg1. We can change this only if overflow
9247 is undefined. */
9248 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9249 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
9250 /* In principle pointers also have undefined overflow behavior,
9251 but that causes problems elsewhere. */
9252 && !POINTER_TYPE_P (TREE_TYPE (arg0))
9253 && (code0 == MINUS_EXPR
9254 || code0 == PLUS_EXPR)
9255 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
9256 return NULL_TREE;
9257
9258 /* Identify the constant in arg0 and its sign. */
9259 cst0 = TREE_OPERAND (arg0, 1);
9260 sgn0 = tree_int_cst_sgn (cst0);
9261
9262 /* Overflowed constants and zero will cause problems. */
9263 if (integer_zerop (cst0)
9264 || TREE_OVERFLOW (cst0))
9265 return NULL_TREE;
9266
9267 /* See if we can reduce the magnitude of the constant in
9268 arg0 by changing the comparison code. */
9269 /* A - CST < arg1 -> A - CST-1 <= arg1. */
9270 if (code == LT_EXPR
9271 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
9272 code = LE_EXPR;
9273 /* A + CST > arg1 -> A + CST-1 >= arg1. */
9274 else if (code == GT_EXPR
9275 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
9276 code = GE_EXPR;
9277 /* A + CST <= arg1 -> A + CST-1 < arg1. */
9278 else if (code == LE_EXPR
9279 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
9280 code = LT_EXPR;
9281 /* A - CST >= arg1 -> A - CST-1 > arg1. */
9282 else if (code == GE_EXPR
9283 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
9284 code = GT_EXPR;
9285 else
9286 return NULL_TREE;
9287 *strict_overflow_p = true;
9288
9289 /* Now build the constant reduced in magnitude. But not if that
9290 would produce one outside of its types range. */
9291 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
9292 && ((sgn0 == 1
9293 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
9294 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
9295 || (sgn0 == -1
9296 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
9297 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
9298 return NULL_TREE;
9299
9300 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
9301 cst0, build_int_cst (TREE_TYPE (cst0), 1));
9302 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
9303 t = fold_convert (TREE_TYPE (arg1), t);
9304
9305 return fold_build2_loc (loc, code, type, t, arg1);
9306 }
9307
9308 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
9309 overflow further. Try to decrease the magnitude of constants involved
9310 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
9311 and put sole constants at the second argument position.
9312 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
9313
9314 static tree
9315 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
9316 tree arg0, tree arg1)
9317 {
9318 tree t;
9319 bool strict_overflow_p;
9320 const char * const warnmsg = G_("assuming signed overflow does not occur "
9321 "when reducing constant in comparison");
9322
9323 /* Try canonicalization by simplifying arg0. */
9324 strict_overflow_p = false;
9325 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
9326 &strict_overflow_p);
9327 if (t)
9328 {
9329 if (strict_overflow_p)
9330 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
9331 return t;
9332 }
9333
9334 /* Try canonicalization by simplifying arg1 using the swapped
9335 comparison. */
9336 code = swap_tree_comparison (code);
9337 strict_overflow_p = false;
9338 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
9339 &strict_overflow_p);
9340 if (t && strict_overflow_p)
9341 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
9342 return t;
9343 }
9344
9345 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
9346 space. This is used to avoid issuing overflow warnings for
9347 expressions like &p->x which cannot wrap. */
9348
9349 static bool
9350 pointer_may_wrap_p (tree base, tree offset, poly_int64 bitpos)
9351 {
9352 if (!POINTER_TYPE_P (TREE_TYPE (base)))
9353 return true;
9354
9355 if (maybe_lt (bitpos, 0))
9356 return true;
9357
9358 poly_wide_int wi_offset;
9359 int precision = TYPE_PRECISION (TREE_TYPE (base));
9360 if (offset == NULL_TREE)
9361 wi_offset = wi::zero (precision);
9362 else if (!poly_int_tree_p (offset) || TREE_OVERFLOW (offset))
9363 return true;
9364 else
9365 wi_offset = wi::to_poly_wide (offset);
9366
9367 wi::overflow_type overflow;
9368 poly_wide_int units = wi::shwi (bits_to_bytes_round_down (bitpos),
9369 precision);
9370 poly_wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
9371 if (overflow)
9372 return true;
9373
9374 poly_uint64 total_hwi, size;
9375 if (!total.to_uhwi (&total_hwi)
9376 || !poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (base))),
9377 &size)
9378 || known_eq (size, 0U))
9379 return true;
9380
9381 if (known_le (total_hwi, size))
9382 return false;
9383
9384 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
9385 array. */
9386 if (TREE_CODE (base) == ADDR_EXPR
9387 && poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (base, 0))),
9388 &size)
9389 && maybe_ne (size, 0U)
9390 && known_le (total_hwi, size))
9391 return false;
9392
9393 return true;
9394 }
9395
9396 /* Return a positive integer when the symbol DECL is known to have
9397 a nonzero address, zero when it's known not to (e.g., it's a weak
9398 symbol), and a negative integer when the symbol is not yet in the
9399 symbol table and so whether or not its address is zero is unknown.
9400 For function local objects always return positive integer. */
9401 static int
9402 maybe_nonzero_address (tree decl)
9403 {
9404 if (DECL_P (decl) && decl_in_symtab_p (decl))
9405 if (struct symtab_node *symbol = symtab_node::get_create (decl))
9406 return symbol->nonzero_address ();
9407
9408 /* Function local objects are never NULL. */
9409 if (DECL_P (decl)
9410 && (DECL_CONTEXT (decl)
9411 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
9412 && auto_var_in_fn_p (decl, DECL_CONTEXT (decl))))
9413 return 1;
9414
9415 return -1;
9416 }
9417
9418 /* Subroutine of fold_binary. This routine performs all of the
9419 transformations that are common to the equality/inequality
9420 operators (EQ_EXPR and NE_EXPR) and the ordering operators
9421 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
9422 fold_binary should call fold_binary. Fold a comparison with
9423 tree code CODE and type TYPE with operands OP0 and OP1. Return
9424 the folded comparison or NULL_TREE. */
9425
9426 static tree
9427 fold_comparison (location_t loc, enum tree_code code, tree type,
9428 tree op0, tree op1)
9429 {
9430 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
9431 tree arg0, arg1, tem;
9432
9433 arg0 = op0;
9434 arg1 = op1;
9435
9436 STRIP_SIGN_NOPS (arg0);
9437 STRIP_SIGN_NOPS (arg1);
9438
9439 /* For comparisons of pointers we can decompose it to a compile time
9440 comparison of the base objects and the offsets into the object.
9441 This requires at least one operand being an ADDR_EXPR or a
9442 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
9443 if (POINTER_TYPE_P (TREE_TYPE (arg0))
9444 && (TREE_CODE (arg0) == ADDR_EXPR
9445 || TREE_CODE (arg1) == ADDR_EXPR
9446 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
9447 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
9448 {
9449 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
9450 poly_int64 bitsize, bitpos0 = 0, bitpos1 = 0;
9451 machine_mode mode;
9452 int volatilep, reversep, unsignedp;
9453 bool indirect_base0 = false, indirect_base1 = false;
9454
9455 /* Get base and offset for the access. Strip ADDR_EXPR for
9456 get_inner_reference, but put it back by stripping INDIRECT_REF
9457 off the base object if possible. indirect_baseN will be true
9458 if baseN is not an address but refers to the object itself. */
9459 base0 = arg0;
9460 if (TREE_CODE (arg0) == ADDR_EXPR)
9461 {
9462 base0
9463 = get_inner_reference (TREE_OPERAND (arg0, 0),
9464 &bitsize, &bitpos0, &offset0, &mode,
9465 &unsignedp, &reversep, &volatilep);
9466 if (TREE_CODE (base0) == INDIRECT_REF)
9467 base0 = TREE_OPERAND (base0, 0);
9468 else
9469 indirect_base0 = true;
9470 }
9471 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9472 {
9473 base0 = TREE_OPERAND (arg0, 0);
9474 STRIP_SIGN_NOPS (base0);
9475 if (TREE_CODE (base0) == ADDR_EXPR)
9476 {
9477 base0
9478 = get_inner_reference (TREE_OPERAND (base0, 0),
9479 &bitsize, &bitpos0, &offset0, &mode,
9480 &unsignedp, &reversep, &volatilep);
9481 if (TREE_CODE (base0) == INDIRECT_REF)
9482 base0 = TREE_OPERAND (base0, 0);
9483 else
9484 indirect_base0 = true;
9485 }
9486 if (offset0 == NULL_TREE || integer_zerop (offset0))
9487 offset0 = TREE_OPERAND (arg0, 1);
9488 else
9489 offset0 = size_binop (PLUS_EXPR, offset0,
9490 TREE_OPERAND (arg0, 1));
9491 if (poly_int_tree_p (offset0))
9492 {
9493 poly_offset_int tem = wi::sext (wi::to_poly_offset (offset0),
9494 TYPE_PRECISION (sizetype));
9495 tem <<= LOG2_BITS_PER_UNIT;
9496 tem += bitpos0;
9497 if (tem.to_shwi (&bitpos0))
9498 offset0 = NULL_TREE;
9499 }
9500 }
9501
9502 base1 = arg1;
9503 if (TREE_CODE (arg1) == ADDR_EXPR)
9504 {
9505 base1
9506 = get_inner_reference (TREE_OPERAND (arg1, 0),
9507 &bitsize, &bitpos1, &offset1, &mode,
9508 &unsignedp, &reversep, &volatilep);
9509 if (TREE_CODE (base1) == INDIRECT_REF)
9510 base1 = TREE_OPERAND (base1, 0);
9511 else
9512 indirect_base1 = true;
9513 }
9514 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9515 {
9516 base1 = TREE_OPERAND (arg1, 0);
9517 STRIP_SIGN_NOPS (base1);
9518 if (TREE_CODE (base1) == ADDR_EXPR)
9519 {
9520 base1
9521 = get_inner_reference (TREE_OPERAND (base1, 0),
9522 &bitsize, &bitpos1, &offset1, &mode,
9523 &unsignedp, &reversep, &volatilep);
9524 if (TREE_CODE (base1) == INDIRECT_REF)
9525 base1 = TREE_OPERAND (base1, 0);
9526 else
9527 indirect_base1 = true;
9528 }
9529 if (offset1 == NULL_TREE || integer_zerop (offset1))
9530 offset1 = TREE_OPERAND (arg1, 1);
9531 else
9532 offset1 = size_binop (PLUS_EXPR, offset1,
9533 TREE_OPERAND (arg1, 1));
9534 if (poly_int_tree_p (offset1))
9535 {
9536 poly_offset_int tem = wi::sext (wi::to_poly_offset (offset1),
9537 TYPE_PRECISION (sizetype));
9538 tem <<= LOG2_BITS_PER_UNIT;
9539 tem += bitpos1;
9540 if (tem.to_shwi (&bitpos1))
9541 offset1 = NULL_TREE;
9542 }
9543 }
9544
9545 /* If we have equivalent bases we might be able to simplify. */
9546 if (indirect_base0 == indirect_base1
9547 && operand_equal_p (base0, base1,
9548 indirect_base0 ? OEP_ADDRESS_OF : 0))
9549 {
9550 /* We can fold this expression to a constant if the non-constant
9551 offset parts are equal. */
9552 if ((offset0 == offset1
9553 || (offset0 && offset1
9554 && operand_equal_p (offset0, offset1, 0)))
9555 && (equality_code
9556 || (indirect_base0
9557 && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
9558 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
9559 {
9560 if (!equality_code
9561 && maybe_ne (bitpos0, bitpos1)
9562 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9563 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9564 fold_overflow_warning (("assuming pointer wraparound does not "
9565 "occur when comparing P +- C1 with "
9566 "P +- C2"),
9567 WARN_STRICT_OVERFLOW_CONDITIONAL);
9568
9569 switch (code)
9570 {
9571 case EQ_EXPR:
9572 if (known_eq (bitpos0, bitpos1))
9573 return constant_boolean_node (true, type);
9574 if (known_ne (bitpos0, bitpos1))
9575 return constant_boolean_node (false, type);
9576 break;
9577 case NE_EXPR:
9578 if (known_ne (bitpos0, bitpos1))
9579 return constant_boolean_node (true, type);
9580 if (known_eq (bitpos0, bitpos1))
9581 return constant_boolean_node (false, type);
9582 break;
9583 case LT_EXPR:
9584 if (known_lt (bitpos0, bitpos1))
9585 return constant_boolean_node (true, type);
9586 if (known_ge (bitpos0, bitpos1))
9587 return constant_boolean_node (false, type);
9588 break;
9589 case LE_EXPR:
9590 if (known_le (bitpos0, bitpos1))
9591 return constant_boolean_node (true, type);
9592 if (known_gt (bitpos0, bitpos1))
9593 return constant_boolean_node (false, type);
9594 break;
9595 case GE_EXPR:
9596 if (known_ge (bitpos0, bitpos1))
9597 return constant_boolean_node (true, type);
9598 if (known_lt (bitpos0, bitpos1))
9599 return constant_boolean_node (false, type);
9600 break;
9601 case GT_EXPR:
9602 if (known_gt (bitpos0, bitpos1))
9603 return constant_boolean_node (true, type);
9604 if (known_le (bitpos0, bitpos1))
9605 return constant_boolean_node (false, type);
9606 break;
9607 default:;
9608 }
9609 }
9610 /* We can simplify the comparison to a comparison of the variable
9611 offset parts if the constant offset parts are equal.
9612 Be careful to use signed sizetype here because otherwise we
9613 mess with array offsets in the wrong way. This is possible
9614 because pointer arithmetic is restricted to retain within an
9615 object and overflow on pointer differences is undefined as of
9616 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9617 else if (known_eq (bitpos0, bitpos1)
9618 && (equality_code
9619 || (indirect_base0
9620 && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
9621 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
9622 {
9623 /* By converting to signed sizetype we cover middle-end pointer
9624 arithmetic which operates on unsigned pointer types of size
9625 type size and ARRAY_REF offsets which are properly sign or
9626 zero extended from their type in case it is narrower than
9627 sizetype. */
9628 if (offset0 == NULL_TREE)
9629 offset0 = build_int_cst (ssizetype, 0);
9630 else
9631 offset0 = fold_convert_loc (loc, ssizetype, offset0);
9632 if (offset1 == NULL_TREE)
9633 offset1 = build_int_cst (ssizetype, 0);
9634 else
9635 offset1 = fold_convert_loc (loc, ssizetype, offset1);
9636
9637 if (!equality_code
9638 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9639 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9640 fold_overflow_warning (("assuming pointer wraparound does not "
9641 "occur when comparing P +- C1 with "
9642 "P +- C2"),
9643 WARN_STRICT_OVERFLOW_COMPARISON);
9644
9645 return fold_build2_loc (loc, code, type, offset0, offset1);
9646 }
9647 }
9648 /* For equal offsets we can simplify to a comparison of the
9649 base addresses. */
9650 else if (known_eq (bitpos0, bitpos1)
9651 && (indirect_base0
9652 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9653 && (indirect_base1
9654 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9655 && ((offset0 == offset1)
9656 || (offset0 && offset1
9657 && operand_equal_p (offset0, offset1, 0))))
9658 {
9659 if (indirect_base0)
9660 base0 = build_fold_addr_expr_loc (loc, base0);
9661 if (indirect_base1)
9662 base1 = build_fold_addr_expr_loc (loc, base1);
9663 return fold_build2_loc (loc, code, type, base0, base1);
9664 }
9665 /* Comparison between an ordinary (non-weak) symbol and a null
9666 pointer can be eliminated since such symbols must have a non
9667 null address. In C, relational expressions between pointers
9668 to objects and null pointers are undefined. The results
9669 below follow the C++ rules with the additional property that
9670 every object pointer compares greater than a null pointer.
9671 */
9672 else if (((DECL_P (base0)
9673 && maybe_nonzero_address (base0) > 0
9674 /* Avoid folding references to struct members at offset 0 to
9675 prevent tests like '&ptr->firstmember == 0' from getting
9676 eliminated. When ptr is null, although the -> expression
9677 is strictly speaking invalid, GCC retains it as a matter
9678 of QoI. See PR c/44555. */
9679 && (offset0 == NULL_TREE && known_ne (bitpos0, 0)))
9680 || CONSTANT_CLASS_P (base0))
9681 && indirect_base0
9682 /* The caller guarantees that when one of the arguments is
9683 constant (i.e., null in this case) it is second. */
9684 && integer_zerop (arg1))
9685 {
9686 switch (code)
9687 {
9688 case EQ_EXPR:
9689 case LE_EXPR:
9690 case LT_EXPR:
9691 return constant_boolean_node (false, type);
9692 case GE_EXPR:
9693 case GT_EXPR:
9694 case NE_EXPR:
9695 return constant_boolean_node (true, type);
9696 default:
9697 gcc_unreachable ();
9698 }
9699 }
9700 }
9701
9702 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9703 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9704 the resulting offset is smaller in absolute value than the
9705 original one and has the same sign. */
9706 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9707 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9708 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9709 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9710 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9711 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9712 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9713 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9714 {
9715 tree const1 = TREE_OPERAND (arg0, 1);
9716 tree const2 = TREE_OPERAND (arg1, 1);
9717 tree variable1 = TREE_OPERAND (arg0, 0);
9718 tree variable2 = TREE_OPERAND (arg1, 0);
9719 tree cst;
9720 const char * const warnmsg = G_("assuming signed overflow does not "
9721 "occur when combining constants around "
9722 "a comparison");
9723
9724 /* Put the constant on the side where it doesn't overflow and is
9725 of lower absolute value and of same sign than before. */
9726 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9727 ? MINUS_EXPR : PLUS_EXPR,
9728 const2, const1);
9729 if (!TREE_OVERFLOW (cst)
9730 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
9731 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
9732 {
9733 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9734 return fold_build2_loc (loc, code, type,
9735 variable1,
9736 fold_build2_loc (loc, TREE_CODE (arg1),
9737 TREE_TYPE (arg1),
9738 variable2, cst));
9739 }
9740
9741 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9742 ? MINUS_EXPR : PLUS_EXPR,
9743 const1, const2);
9744 if (!TREE_OVERFLOW (cst)
9745 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
9746 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
9747 {
9748 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9749 return fold_build2_loc (loc, code, type,
9750 fold_build2_loc (loc, TREE_CODE (arg0),
9751 TREE_TYPE (arg0),
9752 variable1, cst),
9753 variable2);
9754 }
9755 }
9756
9757 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9758 if (tem)
9759 return tem;
9760
9761 /* If we are comparing an expression that just has comparisons
9762 of two integer values, arithmetic expressions of those comparisons,
9763 and constants, we can simplify it. There are only three cases
9764 to check: the two values can either be equal, the first can be
9765 greater, or the second can be greater. Fold the expression for
9766 those three values. Since each value must be 0 or 1, we have
9767 eight possibilities, each of which corresponds to the constant 0
9768 or 1 or one of the six possible comparisons.
9769
9770 This handles common cases like (a > b) == 0 but also handles
9771 expressions like ((x > y) - (y > x)) > 0, which supposedly
9772 occur in macroized code. */
9773
9774 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9775 {
9776 tree cval1 = 0, cval2 = 0;
9777
9778 if (twoval_comparison_p (arg0, &cval1, &cval2)
9779 /* Don't handle degenerate cases here; they should already
9780 have been handled anyway. */
9781 && cval1 != 0 && cval2 != 0
9782 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9783 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9784 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9785 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9786 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9787 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9788 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9789 {
9790 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9791 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9792
9793 /* We can't just pass T to eval_subst in case cval1 or cval2
9794 was the same as ARG1. */
9795
9796 tree high_result
9797 = fold_build2_loc (loc, code, type,
9798 eval_subst (loc, arg0, cval1, maxval,
9799 cval2, minval),
9800 arg1);
9801 tree equal_result
9802 = fold_build2_loc (loc, code, type,
9803 eval_subst (loc, arg0, cval1, maxval,
9804 cval2, maxval),
9805 arg1);
9806 tree low_result
9807 = fold_build2_loc (loc, code, type,
9808 eval_subst (loc, arg0, cval1, minval,
9809 cval2, maxval),
9810 arg1);
9811
9812 /* All three of these results should be 0 or 1. Confirm they are.
9813 Then use those values to select the proper code to use. */
9814
9815 if (TREE_CODE (high_result) == INTEGER_CST
9816 && TREE_CODE (equal_result) == INTEGER_CST
9817 && TREE_CODE (low_result) == INTEGER_CST)
9818 {
9819 /* Make a 3-bit mask with the high-order bit being the
9820 value for `>', the next for '=', and the low for '<'. */
9821 switch ((integer_onep (high_result) * 4)
9822 + (integer_onep (equal_result) * 2)
9823 + integer_onep (low_result))
9824 {
9825 case 0:
9826 /* Always false. */
9827 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9828 case 1:
9829 code = LT_EXPR;
9830 break;
9831 case 2:
9832 code = EQ_EXPR;
9833 break;
9834 case 3:
9835 code = LE_EXPR;
9836 break;
9837 case 4:
9838 code = GT_EXPR;
9839 break;
9840 case 5:
9841 code = NE_EXPR;
9842 break;
9843 case 6:
9844 code = GE_EXPR;
9845 break;
9846 case 7:
9847 /* Always true. */
9848 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9849 }
9850
9851 return fold_build2_loc (loc, code, type, cval1, cval2);
9852 }
9853 }
9854 }
9855
9856 return NULL_TREE;
9857 }
9858
9859
9860 /* Subroutine of fold_binary. Optimize complex multiplications of the
9861 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9862 argument EXPR represents the expression "z" of type TYPE. */
9863
9864 static tree
9865 fold_mult_zconjz (location_t loc, tree type, tree expr)
9866 {
9867 tree itype = TREE_TYPE (type);
9868 tree rpart, ipart, tem;
9869
9870 if (TREE_CODE (expr) == COMPLEX_EXPR)
9871 {
9872 rpart = TREE_OPERAND (expr, 0);
9873 ipart = TREE_OPERAND (expr, 1);
9874 }
9875 else if (TREE_CODE (expr) == COMPLEX_CST)
9876 {
9877 rpart = TREE_REALPART (expr);
9878 ipart = TREE_IMAGPART (expr);
9879 }
9880 else
9881 {
9882 expr = save_expr (expr);
9883 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9884 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9885 }
9886
9887 rpart = save_expr (rpart);
9888 ipart = save_expr (ipart);
9889 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9890 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9891 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9892 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9893 build_zero_cst (itype));
9894 }
9895
9896
9897 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9898 CONSTRUCTOR ARG into array ELTS, which has NELTS elements, and return
9899 true if successful. */
9900
9901 static bool
9902 vec_cst_ctor_to_array (tree arg, unsigned int nelts, tree *elts)
9903 {
9904 unsigned HOST_WIDE_INT i, nunits;
9905
9906 if (TREE_CODE (arg) == VECTOR_CST
9907 && VECTOR_CST_NELTS (arg).is_constant (&nunits))
9908 {
9909 for (i = 0; i < nunits; ++i)
9910 elts[i] = VECTOR_CST_ELT (arg, i);
9911 }
9912 else if (TREE_CODE (arg) == CONSTRUCTOR)
9913 {
9914 constructor_elt *elt;
9915
9916 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9917 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9918 return false;
9919 else
9920 elts[i] = elt->value;
9921 }
9922 else
9923 return false;
9924 for (; i < nelts; i++)
9925 elts[i]
9926 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9927 return true;
9928 }
9929
9930 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9931 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9932 NULL_TREE otherwise. */
9933
9934 tree
9935 fold_vec_perm (tree type, tree arg0, tree arg1, const vec_perm_indices &sel)
9936 {
9937 unsigned int i;
9938 unsigned HOST_WIDE_INT nelts;
9939 bool need_ctor = false;
9940
9941 if (!sel.length ().is_constant (&nelts))
9942 return NULL_TREE;
9943 gcc_assert (known_eq (TYPE_VECTOR_SUBPARTS (type), nelts)
9944 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)), nelts)
9945 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)), nelts));
9946 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9947 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9948 return NULL_TREE;
9949
9950 tree *in_elts = XALLOCAVEC (tree, nelts * 2);
9951 if (!vec_cst_ctor_to_array (arg0, nelts, in_elts)
9952 || !vec_cst_ctor_to_array (arg1, nelts, in_elts + nelts))
9953 return NULL_TREE;
9954
9955 tree_vector_builder out_elts (type, nelts, 1);
9956 for (i = 0; i < nelts; i++)
9957 {
9958 HOST_WIDE_INT index;
9959 if (!sel[i].is_constant (&index))
9960 return NULL_TREE;
9961 if (!CONSTANT_CLASS_P (in_elts[index]))
9962 need_ctor = true;
9963 out_elts.quick_push (unshare_expr (in_elts[index]));
9964 }
9965
9966 if (need_ctor)
9967 {
9968 vec<constructor_elt, va_gc> *v;
9969 vec_alloc (v, nelts);
9970 for (i = 0; i < nelts; i++)
9971 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, out_elts[i]);
9972 return build_constructor (type, v);
9973 }
9974 else
9975 return out_elts.build ();
9976 }
9977
9978 /* Try to fold a pointer difference of type TYPE two address expressions of
9979 array references AREF0 and AREF1 using location LOC. Return a
9980 simplified expression for the difference or NULL_TREE. */
9981
9982 static tree
9983 fold_addr_of_array_ref_difference (location_t loc, tree type,
9984 tree aref0, tree aref1,
9985 bool use_pointer_diff)
9986 {
9987 tree base0 = TREE_OPERAND (aref0, 0);
9988 tree base1 = TREE_OPERAND (aref1, 0);
9989 tree base_offset = build_int_cst (type, 0);
9990
9991 /* If the bases are array references as well, recurse. If the bases
9992 are pointer indirections compute the difference of the pointers.
9993 If the bases are equal, we are set. */
9994 if ((TREE_CODE (base0) == ARRAY_REF
9995 && TREE_CODE (base1) == ARRAY_REF
9996 && (base_offset
9997 = fold_addr_of_array_ref_difference (loc, type, base0, base1,
9998 use_pointer_diff)))
9999 || (INDIRECT_REF_P (base0)
10000 && INDIRECT_REF_P (base1)
10001 && (base_offset
10002 = use_pointer_diff
10003 ? fold_binary_loc (loc, POINTER_DIFF_EXPR, type,
10004 TREE_OPERAND (base0, 0),
10005 TREE_OPERAND (base1, 0))
10006 : fold_binary_loc (loc, MINUS_EXPR, type,
10007 fold_convert (type,
10008 TREE_OPERAND (base0, 0)),
10009 fold_convert (type,
10010 TREE_OPERAND (base1, 0)))))
10011 || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
10012 {
10013 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
10014 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
10015 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
10016 tree diff = fold_build2_loc (loc, MINUS_EXPR, type, op0, op1);
10017 return fold_build2_loc (loc, PLUS_EXPR, type,
10018 base_offset,
10019 fold_build2_loc (loc, MULT_EXPR, type,
10020 diff, esz));
10021 }
10022 return NULL_TREE;
10023 }
10024
10025 /* If the real or vector real constant CST of type TYPE has an exact
10026 inverse, return it, else return NULL. */
10027
10028 tree
10029 exact_inverse (tree type, tree cst)
10030 {
10031 REAL_VALUE_TYPE r;
10032 tree unit_type;
10033 machine_mode mode;
10034
10035 switch (TREE_CODE (cst))
10036 {
10037 case REAL_CST:
10038 r = TREE_REAL_CST (cst);
10039
10040 if (exact_real_inverse (TYPE_MODE (type), &r))
10041 return build_real (type, r);
10042
10043 return NULL_TREE;
10044
10045 case VECTOR_CST:
10046 {
10047 unit_type = TREE_TYPE (type);
10048 mode = TYPE_MODE (unit_type);
10049
10050 tree_vector_builder elts;
10051 if (!elts.new_unary_operation (type, cst, false))
10052 return NULL_TREE;
10053 unsigned int count = elts.encoded_nelts ();
10054 for (unsigned int i = 0; i < count; ++i)
10055 {
10056 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
10057 if (!exact_real_inverse (mode, &r))
10058 return NULL_TREE;
10059 elts.quick_push (build_real (unit_type, r));
10060 }
10061
10062 return elts.build ();
10063 }
10064
10065 default:
10066 return NULL_TREE;
10067 }
10068 }
10069
10070 /* Mask out the tz least significant bits of X of type TYPE where
10071 tz is the number of trailing zeroes in Y. */
10072 static wide_int
10073 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
10074 {
10075 int tz = wi::ctz (y);
10076 if (tz > 0)
10077 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
10078 return x;
10079 }
10080
10081 /* Return true when T is an address and is known to be nonzero.
10082 For floating point we further ensure that T is not denormal.
10083 Similar logic is present in nonzero_address in rtlanal.h.
10084
10085 If the return value is based on the assumption that signed overflow
10086 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
10087 change *STRICT_OVERFLOW_P. */
10088
10089 static bool
10090 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
10091 {
10092 tree type = TREE_TYPE (t);
10093 enum tree_code code;
10094
10095 /* Doing something useful for floating point would need more work. */
10096 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
10097 return false;
10098
10099 code = TREE_CODE (t);
10100 switch (TREE_CODE_CLASS (code))
10101 {
10102 case tcc_unary:
10103 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
10104 strict_overflow_p);
10105 case tcc_binary:
10106 case tcc_comparison:
10107 return tree_binary_nonzero_warnv_p (code, type,
10108 TREE_OPERAND (t, 0),
10109 TREE_OPERAND (t, 1),
10110 strict_overflow_p);
10111 case tcc_constant:
10112 case tcc_declaration:
10113 case tcc_reference:
10114 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
10115
10116 default:
10117 break;
10118 }
10119
10120 switch (code)
10121 {
10122 case TRUTH_NOT_EXPR:
10123 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
10124 strict_overflow_p);
10125
10126 case TRUTH_AND_EXPR:
10127 case TRUTH_OR_EXPR:
10128 case TRUTH_XOR_EXPR:
10129 return tree_binary_nonzero_warnv_p (code, type,
10130 TREE_OPERAND (t, 0),
10131 TREE_OPERAND (t, 1),
10132 strict_overflow_p);
10133
10134 case COND_EXPR:
10135 case CONSTRUCTOR:
10136 case OBJ_TYPE_REF:
10137 case ASSERT_EXPR:
10138 case ADDR_EXPR:
10139 case WITH_SIZE_EXPR:
10140 case SSA_NAME:
10141 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
10142
10143 case COMPOUND_EXPR:
10144 case MODIFY_EXPR:
10145 case BIND_EXPR:
10146 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
10147 strict_overflow_p);
10148
10149 case SAVE_EXPR:
10150 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
10151 strict_overflow_p);
10152
10153 case CALL_EXPR:
10154 {
10155 tree fndecl = get_callee_fndecl (t);
10156 if (!fndecl) return false;
10157 if (flag_delete_null_pointer_checks && !flag_check_new
10158 && DECL_IS_OPERATOR_NEW_P (fndecl)
10159 && !TREE_NOTHROW (fndecl))
10160 return true;
10161 if (flag_delete_null_pointer_checks
10162 && lookup_attribute ("returns_nonnull",
10163 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
10164 return true;
10165 return alloca_call_p (t);
10166 }
10167
10168 default:
10169 break;
10170 }
10171 return false;
10172 }
10173
10174 /* Return true when T is an address and is known to be nonzero.
10175 Handle warnings about undefined signed overflow. */
10176
10177 bool
10178 tree_expr_nonzero_p (tree t)
10179 {
10180 bool ret, strict_overflow_p;
10181
10182 strict_overflow_p = false;
10183 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
10184 if (strict_overflow_p)
10185 fold_overflow_warning (("assuming signed overflow does not occur when "
10186 "determining that expression is always "
10187 "non-zero"),
10188 WARN_STRICT_OVERFLOW_MISC);
10189 return ret;
10190 }
10191
10192 /* Return true if T is known not to be equal to an integer W. */
10193
10194 bool
10195 expr_not_equal_to (tree t, const wide_int &w)
10196 {
10197 value_range vr;
10198 switch (TREE_CODE (t))
10199 {
10200 case INTEGER_CST:
10201 return wi::to_wide (t) != w;
10202
10203 case SSA_NAME:
10204 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
10205 return false;
10206 get_range_info (t, vr);
10207 if (!vr.undefined_p ()
10208 && !vr.contains_p (wide_int_to_tree (TREE_TYPE (t), w)))
10209 return true;
10210 /* If T has some known zero bits and W has any of those bits set,
10211 then T is known not to be equal to W. */
10212 if (wi::ne_p (wi::zext (wi::bit_and_not (w, get_nonzero_bits (t)),
10213 TYPE_PRECISION (TREE_TYPE (t))), 0))
10214 return true;
10215 return false;
10216
10217 default:
10218 return false;
10219 }
10220 }
10221
10222 /* Fold a binary expression of code CODE and type TYPE with operands
10223 OP0 and OP1. LOC is the location of the resulting expression.
10224 Return the folded expression if folding is successful. Otherwise,
10225 return NULL_TREE. */
10226
10227 tree
10228 fold_binary_loc (location_t loc, enum tree_code code, tree type,
10229 tree op0, tree op1)
10230 {
10231 enum tree_code_class kind = TREE_CODE_CLASS (code);
10232 tree arg0, arg1, tem;
10233 tree t1 = NULL_TREE;
10234 bool strict_overflow_p;
10235 unsigned int prec;
10236
10237 gcc_assert (IS_EXPR_CODE_CLASS (kind)
10238 && TREE_CODE_LENGTH (code) == 2
10239 && op0 != NULL_TREE
10240 && op1 != NULL_TREE);
10241
10242 arg0 = op0;
10243 arg1 = op1;
10244
10245 /* Strip any conversions that don't change the mode. This is
10246 safe for every expression, except for a comparison expression
10247 because its signedness is derived from its operands. So, in
10248 the latter case, only strip conversions that don't change the
10249 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
10250 preserved.
10251
10252 Note that this is done as an internal manipulation within the
10253 constant folder, in order to find the simplest representation
10254 of the arguments so that their form can be studied. In any
10255 cases, the appropriate type conversions should be put back in
10256 the tree that will get out of the constant folder. */
10257
10258 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
10259 {
10260 STRIP_SIGN_NOPS (arg0);
10261 STRIP_SIGN_NOPS (arg1);
10262 }
10263 else
10264 {
10265 STRIP_NOPS (arg0);
10266 STRIP_NOPS (arg1);
10267 }
10268
10269 /* Note that TREE_CONSTANT isn't enough: static var addresses are
10270 constant but we can't do arithmetic on them. */
10271 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
10272 {
10273 tem = const_binop (code, type, arg0, arg1);
10274 if (tem != NULL_TREE)
10275 {
10276 if (TREE_TYPE (tem) != type)
10277 tem = fold_convert_loc (loc, type, tem);
10278 return tem;
10279 }
10280 }
10281
10282 /* If this is a commutative operation, and ARG0 is a constant, move it
10283 to ARG1 to reduce the number of tests below. */
10284 if (commutative_tree_code (code)
10285 && tree_swap_operands_p (arg0, arg1))
10286 return fold_build2_loc (loc, code, type, op1, op0);
10287
10288 /* Likewise if this is a comparison, and ARG0 is a constant, move it
10289 to ARG1 to reduce the number of tests below. */
10290 if (kind == tcc_comparison
10291 && tree_swap_operands_p (arg0, arg1))
10292 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
10293
10294 tem = generic_simplify (loc, code, type, op0, op1);
10295 if (tem)
10296 return tem;
10297
10298 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
10299
10300 First check for cases where an arithmetic operation is applied to a
10301 compound, conditional, or comparison operation. Push the arithmetic
10302 operation inside the compound or conditional to see if any folding
10303 can then be done. Convert comparison to conditional for this purpose.
10304 The also optimizes non-constant cases that used to be done in
10305 expand_expr.
10306
10307 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
10308 one of the operands is a comparison and the other is a comparison, a
10309 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
10310 code below would make the expression more complex. Change it to a
10311 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
10312 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
10313
10314 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
10315 || code == EQ_EXPR || code == NE_EXPR)
10316 && !VECTOR_TYPE_P (TREE_TYPE (arg0))
10317 && ((truth_value_p (TREE_CODE (arg0))
10318 && (truth_value_p (TREE_CODE (arg1))
10319 || (TREE_CODE (arg1) == BIT_AND_EXPR
10320 && integer_onep (TREE_OPERAND (arg1, 1)))))
10321 || (truth_value_p (TREE_CODE (arg1))
10322 && (truth_value_p (TREE_CODE (arg0))
10323 || (TREE_CODE (arg0) == BIT_AND_EXPR
10324 && integer_onep (TREE_OPERAND (arg0, 1)))))))
10325 {
10326 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
10327 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
10328 : TRUTH_XOR_EXPR,
10329 boolean_type_node,
10330 fold_convert_loc (loc, boolean_type_node, arg0),
10331 fold_convert_loc (loc, boolean_type_node, arg1));
10332
10333 if (code == EQ_EXPR)
10334 tem = invert_truthvalue_loc (loc, tem);
10335
10336 return fold_convert_loc (loc, type, tem);
10337 }
10338
10339 if (TREE_CODE_CLASS (code) == tcc_binary
10340 || TREE_CODE_CLASS (code) == tcc_comparison)
10341 {
10342 if (TREE_CODE (arg0) == COMPOUND_EXPR)
10343 {
10344 tem = fold_build2_loc (loc, code, type,
10345 fold_convert_loc (loc, TREE_TYPE (op0),
10346 TREE_OPERAND (arg0, 1)), op1);
10347 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
10348 tem);
10349 }
10350 if (TREE_CODE (arg1) == COMPOUND_EXPR)
10351 {
10352 tem = fold_build2_loc (loc, code, type, op0,
10353 fold_convert_loc (loc, TREE_TYPE (op1),
10354 TREE_OPERAND (arg1, 1)));
10355 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
10356 tem);
10357 }
10358
10359 if (TREE_CODE (arg0) == COND_EXPR
10360 || TREE_CODE (arg0) == VEC_COND_EXPR
10361 || COMPARISON_CLASS_P (arg0))
10362 {
10363 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10364 arg0, arg1,
10365 /*cond_first_p=*/1);
10366 if (tem != NULL_TREE)
10367 return tem;
10368 }
10369
10370 if (TREE_CODE (arg1) == COND_EXPR
10371 || TREE_CODE (arg1) == VEC_COND_EXPR
10372 || COMPARISON_CLASS_P (arg1))
10373 {
10374 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10375 arg1, arg0,
10376 /*cond_first_p=*/0);
10377 if (tem != NULL_TREE)
10378 return tem;
10379 }
10380 }
10381
10382 switch (code)
10383 {
10384 case MEM_REF:
10385 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10386 if (TREE_CODE (arg0) == ADDR_EXPR
10387 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
10388 {
10389 tree iref = TREE_OPERAND (arg0, 0);
10390 return fold_build2 (MEM_REF, type,
10391 TREE_OPERAND (iref, 0),
10392 int_const_binop (PLUS_EXPR, arg1,
10393 TREE_OPERAND (iref, 1)));
10394 }
10395
10396 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10397 if (TREE_CODE (arg0) == ADDR_EXPR
10398 && handled_component_p (TREE_OPERAND (arg0, 0)))
10399 {
10400 tree base;
10401 poly_int64 coffset;
10402 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
10403 &coffset);
10404 if (!base)
10405 return NULL_TREE;
10406 return fold_build2 (MEM_REF, type,
10407 build1 (ADDR_EXPR, TREE_TYPE (arg0), base),
10408 int_const_binop (PLUS_EXPR, arg1,
10409 size_int (coffset)));
10410 }
10411
10412 return NULL_TREE;
10413
10414 case POINTER_PLUS_EXPR:
10415 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10416 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10417 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10418 return fold_convert_loc (loc, type,
10419 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10420 fold_convert_loc (loc, sizetype,
10421 arg1),
10422 fold_convert_loc (loc, sizetype,
10423 arg0)));
10424
10425 return NULL_TREE;
10426
10427 case PLUS_EXPR:
10428 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10429 {
10430 /* X + (X / CST) * -CST is X % CST. */
10431 if (TREE_CODE (arg1) == MULT_EXPR
10432 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10433 && operand_equal_p (arg0,
10434 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10435 {
10436 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10437 tree cst1 = TREE_OPERAND (arg1, 1);
10438 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10439 cst1, cst0);
10440 if (sum && integer_zerop (sum))
10441 return fold_convert_loc (loc, type,
10442 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10443 TREE_TYPE (arg0), arg0,
10444 cst0));
10445 }
10446 }
10447
10448 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10449 one. Make sure the type is not saturating and has the signedness of
10450 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10451 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10452 if ((TREE_CODE (arg0) == MULT_EXPR
10453 || TREE_CODE (arg1) == MULT_EXPR)
10454 && !TYPE_SATURATING (type)
10455 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10456 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10457 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10458 {
10459 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10460 if (tem)
10461 return tem;
10462 }
10463
10464 if (! FLOAT_TYPE_P (type))
10465 {
10466 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10467 (plus (plus (mult) (mult)) (foo)) so that we can
10468 take advantage of the factoring cases below. */
10469 if (ANY_INTEGRAL_TYPE_P (type)
10470 && TYPE_OVERFLOW_WRAPS (type)
10471 && (((TREE_CODE (arg0) == PLUS_EXPR
10472 || TREE_CODE (arg0) == MINUS_EXPR)
10473 && TREE_CODE (arg1) == MULT_EXPR)
10474 || ((TREE_CODE (arg1) == PLUS_EXPR
10475 || TREE_CODE (arg1) == MINUS_EXPR)
10476 && TREE_CODE (arg0) == MULT_EXPR)))
10477 {
10478 tree parg0, parg1, parg, marg;
10479 enum tree_code pcode;
10480
10481 if (TREE_CODE (arg1) == MULT_EXPR)
10482 parg = arg0, marg = arg1;
10483 else
10484 parg = arg1, marg = arg0;
10485 pcode = TREE_CODE (parg);
10486 parg0 = TREE_OPERAND (parg, 0);
10487 parg1 = TREE_OPERAND (parg, 1);
10488 STRIP_NOPS (parg0);
10489 STRIP_NOPS (parg1);
10490
10491 if (TREE_CODE (parg0) == MULT_EXPR
10492 && TREE_CODE (parg1) != MULT_EXPR)
10493 return fold_build2_loc (loc, pcode, type,
10494 fold_build2_loc (loc, PLUS_EXPR, type,
10495 fold_convert_loc (loc, type,
10496 parg0),
10497 fold_convert_loc (loc, type,
10498 marg)),
10499 fold_convert_loc (loc, type, parg1));
10500 if (TREE_CODE (parg0) != MULT_EXPR
10501 && TREE_CODE (parg1) == MULT_EXPR)
10502 return
10503 fold_build2_loc (loc, PLUS_EXPR, type,
10504 fold_convert_loc (loc, type, parg0),
10505 fold_build2_loc (loc, pcode, type,
10506 fold_convert_loc (loc, type, marg),
10507 fold_convert_loc (loc, type,
10508 parg1)));
10509 }
10510 }
10511 else
10512 {
10513 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10514 to __complex__ ( x, y ). This is not the same for SNaNs or
10515 if signed zeros are involved. */
10516 if (!HONOR_SNANS (element_mode (arg0))
10517 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10518 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10519 {
10520 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10521 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10522 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10523 bool arg0rz = false, arg0iz = false;
10524 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10525 || (arg0i && (arg0iz = real_zerop (arg0i))))
10526 {
10527 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10528 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10529 if (arg0rz && arg1i && real_zerop (arg1i))
10530 {
10531 tree rp = arg1r ? arg1r
10532 : build1 (REALPART_EXPR, rtype, arg1);
10533 tree ip = arg0i ? arg0i
10534 : build1 (IMAGPART_EXPR, rtype, arg0);
10535 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10536 }
10537 else if (arg0iz && arg1r && real_zerop (arg1r))
10538 {
10539 tree rp = arg0r ? arg0r
10540 : build1 (REALPART_EXPR, rtype, arg0);
10541 tree ip = arg1i ? arg1i
10542 : build1 (IMAGPART_EXPR, rtype, arg1);
10543 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10544 }
10545 }
10546 }
10547
10548 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10549 We associate floats only if the user has specified
10550 -fassociative-math. */
10551 if (flag_associative_math
10552 && TREE_CODE (arg1) == PLUS_EXPR
10553 && TREE_CODE (arg0) != MULT_EXPR)
10554 {
10555 tree tree10 = TREE_OPERAND (arg1, 0);
10556 tree tree11 = TREE_OPERAND (arg1, 1);
10557 if (TREE_CODE (tree11) == MULT_EXPR
10558 && TREE_CODE (tree10) == MULT_EXPR)
10559 {
10560 tree tree0;
10561 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10562 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10563 }
10564 }
10565 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10566 We associate floats only if the user has specified
10567 -fassociative-math. */
10568 if (flag_associative_math
10569 && TREE_CODE (arg0) == PLUS_EXPR
10570 && TREE_CODE (arg1) != MULT_EXPR)
10571 {
10572 tree tree00 = TREE_OPERAND (arg0, 0);
10573 tree tree01 = TREE_OPERAND (arg0, 1);
10574 if (TREE_CODE (tree01) == MULT_EXPR
10575 && TREE_CODE (tree00) == MULT_EXPR)
10576 {
10577 tree tree0;
10578 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10579 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10580 }
10581 }
10582 }
10583
10584 bit_rotate:
10585 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10586 is a rotate of A by C1 bits. */
10587 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10588 is a rotate of A by B bits.
10589 Similarly for (A << B) | (A >> (-B & C3)) where C3 is Z-1,
10590 though in this case CODE must be | and not + or ^, otherwise
10591 it doesn't return A when B is 0. */
10592 {
10593 enum tree_code code0, code1;
10594 tree rtype;
10595 code0 = TREE_CODE (arg0);
10596 code1 = TREE_CODE (arg1);
10597 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10598 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10599 && operand_equal_p (TREE_OPERAND (arg0, 0),
10600 TREE_OPERAND (arg1, 0), 0)
10601 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10602 TYPE_UNSIGNED (rtype))
10603 /* Only create rotates in complete modes. Other cases are not
10604 expanded properly. */
10605 && (element_precision (rtype)
10606 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
10607 {
10608 tree tree01, tree11;
10609 tree orig_tree01, orig_tree11;
10610 enum tree_code code01, code11;
10611
10612 tree01 = orig_tree01 = TREE_OPERAND (arg0, 1);
10613 tree11 = orig_tree11 = TREE_OPERAND (arg1, 1);
10614 STRIP_NOPS (tree01);
10615 STRIP_NOPS (tree11);
10616 code01 = TREE_CODE (tree01);
10617 code11 = TREE_CODE (tree11);
10618 if (code11 != MINUS_EXPR
10619 && (code01 == MINUS_EXPR || code01 == BIT_AND_EXPR))
10620 {
10621 std::swap (code0, code1);
10622 std::swap (code01, code11);
10623 std::swap (tree01, tree11);
10624 std::swap (orig_tree01, orig_tree11);
10625 }
10626 if (code01 == INTEGER_CST
10627 && code11 == INTEGER_CST
10628 && (wi::to_widest (tree01) + wi::to_widest (tree11)
10629 == element_precision (rtype)))
10630 {
10631 tem = build2_loc (loc, LROTATE_EXPR,
10632 rtype, TREE_OPERAND (arg0, 0),
10633 code0 == LSHIFT_EXPR
10634 ? orig_tree01 : orig_tree11);
10635 return fold_convert_loc (loc, type, tem);
10636 }
10637 else if (code11 == MINUS_EXPR)
10638 {
10639 tree tree110, tree111;
10640 tree110 = TREE_OPERAND (tree11, 0);
10641 tree111 = TREE_OPERAND (tree11, 1);
10642 STRIP_NOPS (tree110);
10643 STRIP_NOPS (tree111);
10644 if (TREE_CODE (tree110) == INTEGER_CST
10645 && compare_tree_int (tree110,
10646 element_precision (rtype)) == 0
10647 && operand_equal_p (tree01, tree111, 0))
10648 {
10649 tem = build2_loc (loc, (code0 == LSHIFT_EXPR
10650 ? LROTATE_EXPR : RROTATE_EXPR),
10651 rtype, TREE_OPERAND (arg0, 0),
10652 orig_tree01);
10653 return fold_convert_loc (loc, type, tem);
10654 }
10655 }
10656 else if (code == BIT_IOR_EXPR
10657 && code11 == BIT_AND_EXPR
10658 && pow2p_hwi (element_precision (rtype)))
10659 {
10660 tree tree110, tree111;
10661 tree110 = TREE_OPERAND (tree11, 0);
10662 tree111 = TREE_OPERAND (tree11, 1);
10663 STRIP_NOPS (tree110);
10664 STRIP_NOPS (tree111);
10665 if (TREE_CODE (tree110) == NEGATE_EXPR
10666 && TREE_CODE (tree111) == INTEGER_CST
10667 && compare_tree_int (tree111,
10668 element_precision (rtype) - 1) == 0
10669 && operand_equal_p (tree01, TREE_OPERAND (tree110, 0), 0))
10670 {
10671 tem = build2_loc (loc, (code0 == LSHIFT_EXPR
10672 ? LROTATE_EXPR : RROTATE_EXPR),
10673 rtype, TREE_OPERAND (arg0, 0),
10674 orig_tree01);
10675 return fold_convert_loc (loc, type, tem);
10676 }
10677 }
10678 }
10679 }
10680
10681 associate:
10682 /* In most languages, can't associate operations on floats through
10683 parentheses. Rather than remember where the parentheses were, we
10684 don't associate floats at all, unless the user has specified
10685 -fassociative-math.
10686 And, we need to make sure type is not saturating. */
10687
10688 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10689 && !TYPE_SATURATING (type))
10690 {
10691 tree var0, minus_var0, con0, minus_con0, lit0, minus_lit0;
10692 tree var1, minus_var1, con1, minus_con1, lit1, minus_lit1;
10693 tree atype = type;
10694 bool ok = true;
10695
10696 /* Split both trees into variables, constants, and literals. Then
10697 associate each group together, the constants with literals,
10698 then the result with variables. This increases the chances of
10699 literals being recombined later and of generating relocatable
10700 expressions for the sum of a constant and literal. */
10701 var0 = split_tree (arg0, type, code,
10702 &minus_var0, &con0, &minus_con0,
10703 &lit0, &minus_lit0, 0);
10704 var1 = split_tree (arg1, type, code,
10705 &minus_var1, &con1, &minus_con1,
10706 &lit1, &minus_lit1, code == MINUS_EXPR);
10707
10708 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10709 if (code == MINUS_EXPR)
10710 code = PLUS_EXPR;
10711
10712 /* With undefined overflow prefer doing association in a type
10713 which wraps on overflow, if that is one of the operand types. */
10714 if ((POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
10715 && !TYPE_OVERFLOW_WRAPS (type))
10716 {
10717 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10718 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10719 atype = TREE_TYPE (arg0);
10720 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10721 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10722 atype = TREE_TYPE (arg1);
10723 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10724 }
10725
10726 /* With undefined overflow we can only associate constants with one
10727 variable, and constants whose association doesn't overflow. */
10728 if ((POINTER_TYPE_P (atype) || INTEGRAL_TYPE_P (atype))
10729 && !TYPE_OVERFLOW_WRAPS (atype))
10730 {
10731 if ((var0 && var1) || (minus_var0 && minus_var1))
10732 {
10733 /* ??? If split_tree would handle NEGATE_EXPR we could
10734 simply reject these cases and the allowed cases would
10735 be the var0/minus_var1 ones. */
10736 tree tmp0 = var0 ? var0 : minus_var0;
10737 tree tmp1 = var1 ? var1 : minus_var1;
10738 bool one_neg = false;
10739
10740 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10741 {
10742 tmp0 = TREE_OPERAND (tmp0, 0);
10743 one_neg = !one_neg;
10744 }
10745 if (CONVERT_EXPR_P (tmp0)
10746 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10747 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10748 <= TYPE_PRECISION (atype)))
10749 tmp0 = TREE_OPERAND (tmp0, 0);
10750 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10751 {
10752 tmp1 = TREE_OPERAND (tmp1, 0);
10753 one_neg = !one_neg;
10754 }
10755 if (CONVERT_EXPR_P (tmp1)
10756 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10757 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10758 <= TYPE_PRECISION (atype)))
10759 tmp1 = TREE_OPERAND (tmp1, 0);
10760 /* The only case we can still associate with two variables
10761 is if they cancel out. */
10762 if (!one_neg
10763 || !operand_equal_p (tmp0, tmp1, 0))
10764 ok = false;
10765 }
10766 else if ((var0 && minus_var1
10767 && ! operand_equal_p (var0, minus_var1, 0))
10768 || (minus_var0 && var1
10769 && ! operand_equal_p (minus_var0, var1, 0)))
10770 ok = false;
10771 }
10772
10773 /* Only do something if we found more than two objects. Otherwise,
10774 nothing has changed and we risk infinite recursion. */
10775 if (ok
10776 && ((var0 != 0) + (var1 != 0)
10777 + (minus_var0 != 0) + (minus_var1 != 0)
10778 + (con0 != 0) + (con1 != 0)
10779 + (minus_con0 != 0) + (minus_con1 != 0)
10780 + (lit0 != 0) + (lit1 != 0)
10781 + (minus_lit0 != 0) + (minus_lit1 != 0)) > 2)
10782 {
10783 var0 = associate_trees (loc, var0, var1, code, atype);
10784 minus_var0 = associate_trees (loc, minus_var0, minus_var1,
10785 code, atype);
10786 con0 = associate_trees (loc, con0, con1, code, atype);
10787 minus_con0 = associate_trees (loc, minus_con0, minus_con1,
10788 code, atype);
10789 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10790 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10791 code, atype);
10792
10793 if (minus_var0 && var0)
10794 {
10795 var0 = associate_trees (loc, var0, minus_var0,
10796 MINUS_EXPR, atype);
10797 minus_var0 = 0;
10798 }
10799 if (minus_con0 && con0)
10800 {
10801 con0 = associate_trees (loc, con0, minus_con0,
10802 MINUS_EXPR, atype);
10803 minus_con0 = 0;
10804 }
10805
10806 /* Preserve the MINUS_EXPR if the negative part of the literal is
10807 greater than the positive part. Otherwise, the multiplicative
10808 folding code (i.e extract_muldiv) may be fooled in case
10809 unsigned constants are subtracted, like in the following
10810 example: ((X*2 + 4) - 8U)/2. */
10811 if (minus_lit0 && lit0)
10812 {
10813 if (TREE_CODE (lit0) == INTEGER_CST
10814 && TREE_CODE (minus_lit0) == INTEGER_CST
10815 && tree_int_cst_lt (lit0, minus_lit0)
10816 /* But avoid ending up with only negated parts. */
10817 && (var0 || con0))
10818 {
10819 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10820 MINUS_EXPR, atype);
10821 lit0 = 0;
10822 }
10823 else
10824 {
10825 lit0 = associate_trees (loc, lit0, minus_lit0,
10826 MINUS_EXPR, atype);
10827 minus_lit0 = 0;
10828 }
10829 }
10830
10831 /* Don't introduce overflows through reassociation. */
10832 if ((lit0 && TREE_OVERFLOW_P (lit0))
10833 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0)))
10834 return NULL_TREE;
10835
10836 /* Eliminate lit0 and minus_lit0 to con0 and minus_con0. */
10837 con0 = associate_trees (loc, con0, lit0, code, atype);
10838 lit0 = 0;
10839 minus_con0 = associate_trees (loc, minus_con0, minus_lit0,
10840 code, atype);
10841 minus_lit0 = 0;
10842
10843 /* Eliminate minus_con0. */
10844 if (minus_con0)
10845 {
10846 if (con0)
10847 con0 = associate_trees (loc, con0, minus_con0,
10848 MINUS_EXPR, atype);
10849 else if (var0)
10850 var0 = associate_trees (loc, var0, minus_con0,
10851 MINUS_EXPR, atype);
10852 else
10853 gcc_unreachable ();
10854 minus_con0 = 0;
10855 }
10856
10857 /* Eliminate minus_var0. */
10858 if (minus_var0)
10859 {
10860 if (con0)
10861 con0 = associate_trees (loc, con0, minus_var0,
10862 MINUS_EXPR, atype);
10863 else
10864 gcc_unreachable ();
10865 minus_var0 = 0;
10866 }
10867
10868 return
10869 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10870 code, atype));
10871 }
10872 }
10873
10874 return NULL_TREE;
10875
10876 case POINTER_DIFF_EXPR:
10877 case MINUS_EXPR:
10878 /* Fold &a[i] - &a[j] to i-j. */
10879 if (TREE_CODE (arg0) == ADDR_EXPR
10880 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10881 && TREE_CODE (arg1) == ADDR_EXPR
10882 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10883 {
10884 tree tem = fold_addr_of_array_ref_difference (loc, type,
10885 TREE_OPERAND (arg0, 0),
10886 TREE_OPERAND (arg1, 0),
10887 code
10888 == POINTER_DIFF_EXPR);
10889 if (tem)
10890 return tem;
10891 }
10892
10893 /* Further transformations are not for pointers. */
10894 if (code == POINTER_DIFF_EXPR)
10895 return NULL_TREE;
10896
10897 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10898 if (TREE_CODE (arg0) == NEGATE_EXPR
10899 && negate_expr_p (op1)
10900 /* If arg0 is e.g. unsigned int and type is int, then this could
10901 introduce UB, because if A is INT_MIN at runtime, the original
10902 expression can be well defined while the latter is not.
10903 See PR83269. */
10904 && !(ANY_INTEGRAL_TYPE_P (type)
10905 && TYPE_OVERFLOW_UNDEFINED (type)
10906 && ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10907 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
10908 return fold_build2_loc (loc, MINUS_EXPR, type, negate_expr (op1),
10909 fold_convert_loc (loc, type,
10910 TREE_OPERAND (arg0, 0)));
10911
10912 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10913 __complex__ ( x, -y ). This is not the same for SNaNs or if
10914 signed zeros are involved. */
10915 if (!HONOR_SNANS (element_mode (arg0))
10916 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10917 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10918 {
10919 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10920 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10921 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10922 bool arg0rz = false, arg0iz = false;
10923 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10924 || (arg0i && (arg0iz = real_zerop (arg0i))))
10925 {
10926 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10927 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10928 if (arg0rz && arg1i && real_zerop (arg1i))
10929 {
10930 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10931 arg1r ? arg1r
10932 : build1 (REALPART_EXPR, rtype, arg1));
10933 tree ip = arg0i ? arg0i
10934 : build1 (IMAGPART_EXPR, rtype, arg0);
10935 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10936 }
10937 else if (arg0iz && arg1r && real_zerop (arg1r))
10938 {
10939 tree rp = arg0r ? arg0r
10940 : build1 (REALPART_EXPR, rtype, arg0);
10941 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10942 arg1i ? arg1i
10943 : build1 (IMAGPART_EXPR, rtype, arg1));
10944 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10945 }
10946 }
10947 }
10948
10949 /* A - B -> A + (-B) if B is easily negatable. */
10950 if (negate_expr_p (op1)
10951 && ! TYPE_OVERFLOW_SANITIZED (type)
10952 && ((FLOAT_TYPE_P (type)
10953 /* Avoid this transformation if B is a positive REAL_CST. */
10954 && (TREE_CODE (op1) != REAL_CST
10955 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1))))
10956 || INTEGRAL_TYPE_P (type)))
10957 return fold_build2_loc (loc, PLUS_EXPR, type,
10958 fold_convert_loc (loc, type, arg0),
10959 negate_expr (op1));
10960
10961 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10962 one. Make sure the type is not saturating and has the signedness of
10963 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10964 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10965 if ((TREE_CODE (arg0) == MULT_EXPR
10966 || TREE_CODE (arg1) == MULT_EXPR)
10967 && !TYPE_SATURATING (type)
10968 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10969 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10970 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10971 {
10972 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10973 if (tem)
10974 return tem;
10975 }
10976
10977 goto associate;
10978
10979 case MULT_EXPR:
10980 if (! FLOAT_TYPE_P (type))
10981 {
10982 /* Transform x * -C into -x * C if x is easily negatable. */
10983 if (TREE_CODE (op1) == INTEGER_CST
10984 && tree_int_cst_sgn (op1) == -1
10985 && negate_expr_p (op0)
10986 && negate_expr_p (op1)
10987 && (tem = negate_expr (op1)) != op1
10988 && ! TREE_OVERFLOW (tem))
10989 return fold_build2_loc (loc, MULT_EXPR, type,
10990 fold_convert_loc (loc, type,
10991 negate_expr (op0)), tem);
10992
10993 strict_overflow_p = false;
10994 if (TREE_CODE (arg1) == INTEGER_CST
10995 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10996 &strict_overflow_p)) != 0)
10997 {
10998 if (strict_overflow_p)
10999 fold_overflow_warning (("assuming signed overflow does not "
11000 "occur when simplifying "
11001 "multiplication"),
11002 WARN_STRICT_OVERFLOW_MISC);
11003 return fold_convert_loc (loc, type, tem);
11004 }
11005
11006 /* Optimize z * conj(z) for integer complex numbers. */
11007 if (TREE_CODE (arg0) == CONJ_EXPR
11008 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11009 return fold_mult_zconjz (loc, type, arg1);
11010 if (TREE_CODE (arg1) == CONJ_EXPR
11011 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11012 return fold_mult_zconjz (loc, type, arg0);
11013 }
11014 else
11015 {
11016 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
11017 This is not the same for NaNs or if signed zeros are
11018 involved. */
11019 if (!HONOR_NANS (arg0)
11020 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
11021 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11022 && TREE_CODE (arg1) == COMPLEX_CST
11023 && real_zerop (TREE_REALPART (arg1)))
11024 {
11025 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11026 if (real_onep (TREE_IMAGPART (arg1)))
11027 return
11028 fold_build2_loc (loc, COMPLEX_EXPR, type,
11029 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
11030 rtype, arg0)),
11031 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
11032 else if (real_minus_onep (TREE_IMAGPART (arg1)))
11033 return
11034 fold_build2_loc (loc, COMPLEX_EXPR, type,
11035 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
11036 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
11037 rtype, arg0)));
11038 }
11039
11040 /* Optimize z * conj(z) for floating point complex numbers.
11041 Guarded by flag_unsafe_math_optimizations as non-finite
11042 imaginary components don't produce scalar results. */
11043 if (flag_unsafe_math_optimizations
11044 && TREE_CODE (arg0) == CONJ_EXPR
11045 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11046 return fold_mult_zconjz (loc, type, arg1);
11047 if (flag_unsafe_math_optimizations
11048 && TREE_CODE (arg1) == CONJ_EXPR
11049 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11050 return fold_mult_zconjz (loc, type, arg0);
11051 }
11052 goto associate;
11053
11054 case BIT_IOR_EXPR:
11055 /* Canonicalize (X & C1) | C2. */
11056 if (TREE_CODE (arg0) == BIT_AND_EXPR
11057 && TREE_CODE (arg1) == INTEGER_CST
11058 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11059 {
11060 int width = TYPE_PRECISION (type), w;
11061 wide_int c1 = wi::to_wide (TREE_OPERAND (arg0, 1));
11062 wide_int c2 = wi::to_wide (arg1);
11063
11064 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11065 if ((c1 & c2) == c1)
11066 return omit_one_operand_loc (loc, type, arg1,
11067 TREE_OPERAND (arg0, 0));
11068
11069 wide_int msk = wi::mask (width, false,
11070 TYPE_PRECISION (TREE_TYPE (arg1)));
11071
11072 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11073 if (wi::bit_and_not (msk, c1 | c2) == 0)
11074 {
11075 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11076 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
11077 }
11078
11079 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11080 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11081 mode which allows further optimizations. */
11082 c1 &= msk;
11083 c2 &= msk;
11084 wide_int c3 = wi::bit_and_not (c1, c2);
11085 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
11086 {
11087 wide_int mask = wi::mask (w, false,
11088 TYPE_PRECISION (type));
11089 if (((c1 | c2) & mask) == mask
11090 && wi::bit_and_not (c1, mask) == 0)
11091 {
11092 c3 = mask;
11093 break;
11094 }
11095 }
11096
11097 if (c3 != c1)
11098 {
11099 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11100 tem = fold_build2_loc (loc, BIT_AND_EXPR, type, tem,
11101 wide_int_to_tree (type, c3));
11102 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
11103 }
11104 }
11105
11106 /* See if this can be simplified into a rotate first. If that
11107 is unsuccessful continue in the association code. */
11108 goto bit_rotate;
11109
11110 case BIT_XOR_EXPR:
11111 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11112 if (TREE_CODE (arg0) == BIT_AND_EXPR
11113 && INTEGRAL_TYPE_P (type)
11114 && integer_onep (TREE_OPERAND (arg0, 1))
11115 && integer_onep (arg1))
11116 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11117 build_zero_cst (TREE_TYPE (arg0)));
11118
11119 /* See if this can be simplified into a rotate first. If that
11120 is unsuccessful continue in the association code. */
11121 goto bit_rotate;
11122
11123 case BIT_AND_EXPR:
11124 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11125 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11126 && INTEGRAL_TYPE_P (type)
11127 && integer_onep (TREE_OPERAND (arg0, 1))
11128 && integer_onep (arg1))
11129 {
11130 tree tem2;
11131 tem = TREE_OPERAND (arg0, 0);
11132 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11133 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11134 tem, tem2);
11135 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11136 build_zero_cst (TREE_TYPE (tem)));
11137 }
11138 /* Fold ~X & 1 as (X & 1) == 0. */
11139 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11140 && INTEGRAL_TYPE_P (type)
11141 && integer_onep (arg1))
11142 {
11143 tree tem2;
11144 tem = TREE_OPERAND (arg0, 0);
11145 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11146 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11147 tem, tem2);
11148 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11149 build_zero_cst (TREE_TYPE (tem)));
11150 }
11151 /* Fold !X & 1 as X == 0. */
11152 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11153 && integer_onep (arg1))
11154 {
11155 tem = TREE_OPERAND (arg0, 0);
11156 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11157 build_zero_cst (TREE_TYPE (tem)));
11158 }
11159
11160 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11161 multiple of 1 << CST. */
11162 if (TREE_CODE (arg1) == INTEGER_CST)
11163 {
11164 wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
11165 wide_int ncst1 = -cst1;
11166 if ((cst1 & ncst1) == ncst1
11167 && multiple_of_p (type, arg0,
11168 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
11169 return fold_convert_loc (loc, type, arg0);
11170 }
11171
11172 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11173 bits from CST2. */
11174 if (TREE_CODE (arg1) == INTEGER_CST
11175 && TREE_CODE (arg0) == MULT_EXPR
11176 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11177 {
11178 wi::tree_to_wide_ref warg1 = wi::to_wide (arg1);
11179 wide_int masked
11180 = mask_with_tz (type, warg1, wi::to_wide (TREE_OPERAND (arg0, 1)));
11181
11182 if (masked == 0)
11183 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11184 arg0, arg1);
11185 else if (masked != warg1)
11186 {
11187 /* Avoid the transform if arg1 is a mask of some
11188 mode which allows further optimizations. */
11189 int pop = wi::popcount (warg1);
11190 if (!(pop >= BITS_PER_UNIT
11191 && pow2p_hwi (pop)
11192 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
11193 return fold_build2_loc (loc, code, type, op0,
11194 wide_int_to_tree (type, masked));
11195 }
11196 }
11197
11198 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11199 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11200 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11201 {
11202 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11203
11204 wide_int mask = wide_int::from (wi::to_wide (arg1), prec, UNSIGNED);
11205 if (mask == -1)
11206 return
11207 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11208 }
11209
11210 goto associate;
11211
11212 case RDIV_EXPR:
11213 /* Don't touch a floating-point divide by zero unless the mode
11214 of the constant can represent infinity. */
11215 if (TREE_CODE (arg1) == REAL_CST
11216 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11217 && real_zerop (arg1))
11218 return NULL_TREE;
11219
11220 /* (-A) / (-B) -> A / B */
11221 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11222 return fold_build2_loc (loc, RDIV_EXPR, type,
11223 TREE_OPERAND (arg0, 0),
11224 negate_expr (arg1));
11225 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11226 return fold_build2_loc (loc, RDIV_EXPR, type,
11227 negate_expr (arg0),
11228 TREE_OPERAND (arg1, 0));
11229 return NULL_TREE;
11230
11231 case TRUNC_DIV_EXPR:
11232 /* Fall through */
11233
11234 case FLOOR_DIV_EXPR:
11235 /* Simplify A / (B << N) where A and B are positive and B is
11236 a power of 2, to A >> (N + log2(B)). */
11237 strict_overflow_p = false;
11238 if (TREE_CODE (arg1) == LSHIFT_EXPR
11239 && (TYPE_UNSIGNED (type)
11240 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11241 {
11242 tree sval = TREE_OPERAND (arg1, 0);
11243 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11244 {
11245 tree sh_cnt = TREE_OPERAND (arg1, 1);
11246 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
11247 wi::exact_log2 (wi::to_wide (sval)));
11248
11249 if (strict_overflow_p)
11250 fold_overflow_warning (("assuming signed overflow does not "
11251 "occur when simplifying A / (B << N)"),
11252 WARN_STRICT_OVERFLOW_MISC);
11253
11254 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11255 sh_cnt, pow2);
11256 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11257 fold_convert_loc (loc, type, arg0), sh_cnt);
11258 }
11259 }
11260
11261 /* Fall through */
11262
11263 case ROUND_DIV_EXPR:
11264 case CEIL_DIV_EXPR:
11265 case EXACT_DIV_EXPR:
11266 if (integer_zerop (arg1))
11267 return NULL_TREE;
11268
11269 /* Convert -A / -B to A / B when the type is signed and overflow is
11270 undefined. */
11271 if ((!ANY_INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11272 && TREE_CODE (op0) == NEGATE_EXPR
11273 && negate_expr_p (op1))
11274 {
11275 if (ANY_INTEGRAL_TYPE_P (type))
11276 fold_overflow_warning (("assuming signed overflow does not occur "
11277 "when distributing negation across "
11278 "division"),
11279 WARN_STRICT_OVERFLOW_MISC);
11280 return fold_build2_loc (loc, code, type,
11281 fold_convert_loc (loc, type,
11282 TREE_OPERAND (arg0, 0)),
11283 negate_expr (op1));
11284 }
11285 if ((!ANY_INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11286 && TREE_CODE (arg1) == NEGATE_EXPR
11287 && negate_expr_p (op0))
11288 {
11289 if (ANY_INTEGRAL_TYPE_P (type))
11290 fold_overflow_warning (("assuming signed overflow does not occur "
11291 "when distributing negation across "
11292 "division"),
11293 WARN_STRICT_OVERFLOW_MISC);
11294 return fold_build2_loc (loc, code, type,
11295 negate_expr (op0),
11296 fold_convert_loc (loc, type,
11297 TREE_OPERAND (arg1, 0)));
11298 }
11299
11300 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11301 operation, EXACT_DIV_EXPR.
11302
11303 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11304 At one time others generated faster code, it's not clear if they do
11305 after the last round to changes to the DIV code in expmed.c. */
11306 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11307 && multiple_of_p (type, arg0, arg1))
11308 return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
11309 fold_convert (type, arg0),
11310 fold_convert (type, arg1));
11311
11312 strict_overflow_p = false;
11313 if (TREE_CODE (arg1) == INTEGER_CST
11314 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11315 &strict_overflow_p)) != 0)
11316 {
11317 if (strict_overflow_p)
11318 fold_overflow_warning (("assuming signed overflow does not occur "
11319 "when simplifying division"),
11320 WARN_STRICT_OVERFLOW_MISC);
11321 return fold_convert_loc (loc, type, tem);
11322 }
11323
11324 return NULL_TREE;
11325
11326 case CEIL_MOD_EXPR:
11327 case FLOOR_MOD_EXPR:
11328 case ROUND_MOD_EXPR:
11329 case TRUNC_MOD_EXPR:
11330 strict_overflow_p = false;
11331 if (TREE_CODE (arg1) == INTEGER_CST
11332 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11333 &strict_overflow_p)) != 0)
11334 {
11335 if (strict_overflow_p)
11336 fold_overflow_warning (("assuming signed overflow does not occur "
11337 "when simplifying modulus"),
11338 WARN_STRICT_OVERFLOW_MISC);
11339 return fold_convert_loc (loc, type, tem);
11340 }
11341
11342 return NULL_TREE;
11343
11344 case LROTATE_EXPR:
11345 case RROTATE_EXPR:
11346 case RSHIFT_EXPR:
11347 case LSHIFT_EXPR:
11348 /* Since negative shift count is not well-defined,
11349 don't try to compute it in the compiler. */
11350 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11351 return NULL_TREE;
11352
11353 prec = element_precision (type);
11354
11355 /* If we have a rotate of a bit operation with the rotate count and
11356 the second operand of the bit operation both constant,
11357 permute the two operations. */
11358 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11359 && (TREE_CODE (arg0) == BIT_AND_EXPR
11360 || TREE_CODE (arg0) == BIT_IOR_EXPR
11361 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11362 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11363 {
11364 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11365 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11366 return fold_build2_loc (loc, TREE_CODE (arg0), type,
11367 fold_build2_loc (loc, code, type,
11368 arg00, arg1),
11369 fold_build2_loc (loc, code, type,
11370 arg01, arg1));
11371 }
11372
11373 /* Two consecutive rotates adding up to the some integer
11374 multiple of the precision of the type can be ignored. */
11375 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11376 && TREE_CODE (arg0) == RROTATE_EXPR
11377 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11378 && wi::umod_trunc (wi::to_wide (arg1)
11379 + wi::to_wide (TREE_OPERAND (arg0, 1)),
11380 prec) == 0)
11381 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11382
11383 return NULL_TREE;
11384
11385 case MIN_EXPR:
11386 case MAX_EXPR:
11387 goto associate;
11388
11389 case TRUTH_ANDIF_EXPR:
11390 /* Note that the operands of this must be ints
11391 and their values must be 0 or 1.
11392 ("true" is a fixed value perhaps depending on the language.) */
11393 /* If first arg is constant zero, return it. */
11394 if (integer_zerop (arg0))
11395 return fold_convert_loc (loc, type, arg0);
11396 /* FALLTHRU */
11397 case TRUTH_AND_EXPR:
11398 /* If either arg is constant true, drop it. */
11399 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11400 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
11401 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
11402 /* Preserve sequence points. */
11403 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11404 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11405 /* If second arg is constant zero, result is zero, but first arg
11406 must be evaluated. */
11407 if (integer_zerop (arg1))
11408 return omit_one_operand_loc (loc, type, arg1, arg0);
11409 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11410 case will be handled here. */
11411 if (integer_zerop (arg0))
11412 return omit_one_operand_loc (loc, type, arg0, arg1);
11413
11414 /* !X && X is always false. */
11415 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11416 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11417 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11418 /* X && !X is always false. */
11419 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11420 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11421 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11422
11423 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
11424 means A >= Y && A != MAX, but in this case we know that
11425 A < X <= MAX. */
11426
11427 if (!TREE_SIDE_EFFECTS (arg0)
11428 && !TREE_SIDE_EFFECTS (arg1))
11429 {
11430 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
11431 if (tem && !operand_equal_p (tem, arg0, 0))
11432 return fold_build2_loc (loc, code, type, tem, arg1);
11433
11434 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
11435 if (tem && !operand_equal_p (tem, arg1, 0))
11436 return fold_build2_loc (loc, code, type, arg0, tem);
11437 }
11438
11439 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
11440 != NULL_TREE)
11441 return tem;
11442
11443 return NULL_TREE;
11444
11445 case TRUTH_ORIF_EXPR:
11446 /* Note that the operands of this must be ints
11447 and their values must be 0 or true.
11448 ("true" is a fixed value perhaps depending on the language.) */
11449 /* If first arg is constant true, return it. */
11450 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11451 return fold_convert_loc (loc, type, arg0);
11452 /* FALLTHRU */
11453 case TRUTH_OR_EXPR:
11454 /* If either arg is constant zero, drop it. */
11455 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
11456 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
11457 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
11458 /* Preserve sequence points. */
11459 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11460 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11461 /* If second arg is constant true, result is true, but we must
11462 evaluate first arg. */
11463 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
11464 return omit_one_operand_loc (loc, type, arg1, arg0);
11465 /* Likewise for first arg, but note this only occurs here for
11466 TRUTH_OR_EXPR. */
11467 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11468 return omit_one_operand_loc (loc, type, arg0, arg1);
11469
11470 /* !X || X is always true. */
11471 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11472 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11473 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
11474 /* X || !X is always true. */
11475 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11476 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11477 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
11478
11479 /* (X && !Y) || (!X && Y) is X ^ Y */
11480 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
11481 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
11482 {
11483 tree a0, a1, l0, l1, n0, n1;
11484
11485 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11486 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11487
11488 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11489 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11490
11491 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
11492 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
11493
11494 if ((operand_equal_p (n0, a0, 0)
11495 && operand_equal_p (n1, a1, 0))
11496 || (operand_equal_p (n0, a1, 0)
11497 && operand_equal_p (n1, a0, 0)))
11498 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
11499 }
11500
11501 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
11502 != NULL_TREE)
11503 return tem;
11504
11505 return NULL_TREE;
11506
11507 case TRUTH_XOR_EXPR:
11508 /* If the second arg is constant zero, drop it. */
11509 if (integer_zerop (arg1))
11510 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11511 /* If the second arg is constant true, this is a logical inversion. */
11512 if (integer_onep (arg1))
11513 {
11514 tem = invert_truthvalue_loc (loc, arg0);
11515 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
11516 }
11517 /* Identical arguments cancel to zero. */
11518 if (operand_equal_p (arg0, arg1, 0))
11519 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11520
11521 /* !X ^ X is always true. */
11522 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11523 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11524 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
11525
11526 /* X ^ !X is always true. */
11527 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11528 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11529 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
11530
11531 return NULL_TREE;
11532
11533 case EQ_EXPR:
11534 case NE_EXPR:
11535 STRIP_NOPS (arg0);
11536 STRIP_NOPS (arg1);
11537
11538 tem = fold_comparison (loc, code, type, op0, op1);
11539 if (tem != NULL_TREE)
11540 return tem;
11541
11542 /* bool_var != 1 becomes !bool_var. */
11543 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11544 && code == NE_EXPR)
11545 return fold_convert_loc (loc, type,
11546 fold_build1_loc (loc, TRUTH_NOT_EXPR,
11547 TREE_TYPE (arg0), arg0));
11548
11549 /* bool_var == 0 becomes !bool_var. */
11550 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11551 && code == EQ_EXPR)
11552 return fold_convert_loc (loc, type,
11553 fold_build1_loc (loc, TRUTH_NOT_EXPR,
11554 TREE_TYPE (arg0), arg0));
11555
11556 /* !exp != 0 becomes !exp */
11557 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
11558 && code == NE_EXPR)
11559 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11560
11561 /* If this is an EQ or NE comparison with zero and ARG0 is
11562 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
11563 two operations, but the latter can be done in one less insn
11564 on machines that have only two-operand insns or on which a
11565 constant cannot be the first operand. */
11566 if (TREE_CODE (arg0) == BIT_AND_EXPR
11567 && integer_zerop (arg1))
11568 {
11569 tree arg00 = TREE_OPERAND (arg0, 0);
11570 tree arg01 = TREE_OPERAND (arg0, 1);
11571 if (TREE_CODE (arg00) == LSHIFT_EXPR
11572 && integer_onep (TREE_OPERAND (arg00, 0)))
11573 {
11574 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
11575 arg01, TREE_OPERAND (arg00, 1));
11576 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
11577 build_int_cst (TREE_TYPE (arg0), 1));
11578 return fold_build2_loc (loc, code, type,
11579 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
11580 arg1);
11581 }
11582 else if (TREE_CODE (arg01) == LSHIFT_EXPR
11583 && integer_onep (TREE_OPERAND (arg01, 0)))
11584 {
11585 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
11586 arg00, TREE_OPERAND (arg01, 1));
11587 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
11588 build_int_cst (TREE_TYPE (arg0), 1));
11589 return fold_build2_loc (loc, code, type,
11590 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
11591 arg1);
11592 }
11593 }
11594
11595 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
11596 C1 is a valid shift constant, and C2 is a power of two, i.e.
11597 a single bit. */
11598 if (TREE_CODE (arg0) == BIT_AND_EXPR
11599 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
11600 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
11601 == INTEGER_CST
11602 && integer_pow2p (TREE_OPERAND (arg0, 1))
11603 && integer_zerop (arg1))
11604 {
11605 tree itype = TREE_TYPE (arg0);
11606 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
11607 prec = TYPE_PRECISION (itype);
11608
11609 /* Check for a valid shift count. */
11610 if (wi::ltu_p (wi::to_wide (arg001), prec))
11611 {
11612 tree arg01 = TREE_OPERAND (arg0, 1);
11613 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11614 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
11615 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
11616 can be rewritten as (X & (C2 << C1)) != 0. */
11617 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
11618 {
11619 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
11620 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
11621 return fold_build2_loc (loc, code, type, tem,
11622 fold_convert_loc (loc, itype, arg1));
11623 }
11624 /* Otherwise, for signed (arithmetic) shifts,
11625 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
11626 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
11627 else if (!TYPE_UNSIGNED (itype))
11628 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
11629 arg000, build_int_cst (itype, 0));
11630 /* Otherwise, of unsigned (logical) shifts,
11631 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
11632 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
11633 else
11634 return omit_one_operand_loc (loc, type,
11635 code == EQ_EXPR ? integer_one_node
11636 : integer_zero_node,
11637 arg000);
11638 }
11639 }
11640
11641 /* If this is a comparison of a field, we may be able to simplify it. */
11642 if ((TREE_CODE (arg0) == COMPONENT_REF
11643 || TREE_CODE (arg0) == BIT_FIELD_REF)
11644 /* Handle the constant case even without -O
11645 to make sure the warnings are given. */
11646 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
11647 {
11648 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
11649 if (t1)
11650 return t1;
11651 }
11652
11653 /* Optimize comparisons of strlen vs zero to a compare of the
11654 first character of the string vs zero. To wit,
11655 strlen(ptr) == 0 => *ptr == 0
11656 strlen(ptr) != 0 => *ptr != 0
11657 Other cases should reduce to one of these two (or a constant)
11658 due to the return value of strlen being unsigned. */
11659 if (TREE_CODE (arg0) == CALL_EXPR && integer_zerop (arg1))
11660 {
11661 tree fndecl = get_callee_fndecl (arg0);
11662
11663 if (fndecl
11664 && fndecl_built_in_p (fndecl, BUILT_IN_STRLEN)
11665 && call_expr_nargs (arg0) == 1
11666 && (TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0)))
11667 == POINTER_TYPE))
11668 {
11669 tree ptrtype
11670 = build_pointer_type (build_qualified_type (char_type_node,
11671 TYPE_QUAL_CONST));
11672 tree ptr = fold_convert_loc (loc, ptrtype,
11673 CALL_EXPR_ARG (arg0, 0));
11674 tree iref = build_fold_indirect_ref_loc (loc, ptr);
11675 return fold_build2_loc (loc, code, type, iref,
11676 build_int_cst (TREE_TYPE (iref), 0));
11677 }
11678 }
11679
11680 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
11681 of X. Similarly fold (X >> C) == 0 into X >= 0. */
11682 if (TREE_CODE (arg0) == RSHIFT_EXPR
11683 && integer_zerop (arg1)
11684 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11685 {
11686 tree arg00 = TREE_OPERAND (arg0, 0);
11687 tree arg01 = TREE_OPERAND (arg0, 1);
11688 tree itype = TREE_TYPE (arg00);
11689 if (wi::to_wide (arg01) == element_precision (itype) - 1)
11690 {
11691 if (TYPE_UNSIGNED (itype))
11692 {
11693 itype = signed_type_for (itype);
11694 arg00 = fold_convert_loc (loc, itype, arg00);
11695 }
11696 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
11697 type, arg00, build_zero_cst (itype));
11698 }
11699 }
11700
11701 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
11702 (X & C) == 0 when C is a single bit. */
11703 if (TREE_CODE (arg0) == BIT_AND_EXPR
11704 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
11705 && integer_zerop (arg1)
11706 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11707 {
11708 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
11709 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
11710 TREE_OPERAND (arg0, 1));
11711 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
11712 type, tem,
11713 fold_convert_loc (loc, TREE_TYPE (arg0),
11714 arg1));
11715 }
11716
11717 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
11718 constant C is a power of two, i.e. a single bit. */
11719 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11720 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11721 && integer_zerop (arg1)
11722 && integer_pow2p (TREE_OPERAND (arg0, 1))
11723 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11724 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11725 {
11726 tree arg00 = TREE_OPERAND (arg0, 0);
11727 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11728 arg00, build_int_cst (TREE_TYPE (arg00), 0));
11729 }
11730
11731 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
11732 when is C is a power of two, i.e. a single bit. */
11733 if (TREE_CODE (arg0) == BIT_AND_EXPR
11734 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
11735 && integer_zerop (arg1)
11736 && integer_pow2p (TREE_OPERAND (arg0, 1))
11737 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11738 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11739 {
11740 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11741 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
11742 arg000, TREE_OPERAND (arg0, 1));
11743 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11744 tem, build_int_cst (TREE_TYPE (tem), 0));
11745 }
11746
11747 if (integer_zerop (arg1)
11748 && tree_expr_nonzero_p (arg0))
11749 {
11750 tree res = constant_boolean_node (code==NE_EXPR, type);
11751 return omit_one_operand_loc (loc, type, res, arg0);
11752 }
11753
11754 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11755 && TREE_CODE (arg1) == BIT_XOR_EXPR)
11756 {
11757 tree arg00 = TREE_OPERAND (arg0, 0);
11758 tree arg01 = TREE_OPERAND (arg0, 1);
11759 tree arg10 = TREE_OPERAND (arg1, 0);
11760 tree arg11 = TREE_OPERAND (arg1, 1);
11761 tree itype = TREE_TYPE (arg0);
11762
11763 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
11764 operand_equal_p guarantees no side-effects so we don't need
11765 to use omit_one_operand on Z. */
11766 if (operand_equal_p (arg01, arg11, 0))
11767 return fold_build2_loc (loc, code, type, arg00,
11768 fold_convert_loc (loc, TREE_TYPE (arg00),
11769 arg10));
11770 if (operand_equal_p (arg01, arg10, 0))
11771 return fold_build2_loc (loc, code, type, arg00,
11772 fold_convert_loc (loc, TREE_TYPE (arg00),
11773 arg11));
11774 if (operand_equal_p (arg00, arg11, 0))
11775 return fold_build2_loc (loc, code, type, arg01,
11776 fold_convert_loc (loc, TREE_TYPE (arg01),
11777 arg10));
11778 if (operand_equal_p (arg00, arg10, 0))
11779 return fold_build2_loc (loc, code, type, arg01,
11780 fold_convert_loc (loc, TREE_TYPE (arg01),
11781 arg11));
11782
11783 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
11784 if (TREE_CODE (arg01) == INTEGER_CST
11785 && TREE_CODE (arg11) == INTEGER_CST)
11786 {
11787 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
11788 fold_convert_loc (loc, itype, arg11));
11789 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
11790 return fold_build2_loc (loc, code, type, tem,
11791 fold_convert_loc (loc, itype, arg10));
11792 }
11793 }
11794
11795 /* Attempt to simplify equality/inequality comparisons of complex
11796 values. Only lower the comparison if the result is known or
11797 can be simplified to a single scalar comparison. */
11798 if ((TREE_CODE (arg0) == COMPLEX_EXPR
11799 || TREE_CODE (arg0) == COMPLEX_CST)
11800 && (TREE_CODE (arg1) == COMPLEX_EXPR
11801 || TREE_CODE (arg1) == COMPLEX_CST))
11802 {
11803 tree real0, imag0, real1, imag1;
11804 tree rcond, icond;
11805
11806 if (TREE_CODE (arg0) == COMPLEX_EXPR)
11807 {
11808 real0 = TREE_OPERAND (arg0, 0);
11809 imag0 = TREE_OPERAND (arg0, 1);
11810 }
11811 else
11812 {
11813 real0 = TREE_REALPART (arg0);
11814 imag0 = TREE_IMAGPART (arg0);
11815 }
11816
11817 if (TREE_CODE (arg1) == COMPLEX_EXPR)
11818 {
11819 real1 = TREE_OPERAND (arg1, 0);
11820 imag1 = TREE_OPERAND (arg1, 1);
11821 }
11822 else
11823 {
11824 real1 = TREE_REALPART (arg1);
11825 imag1 = TREE_IMAGPART (arg1);
11826 }
11827
11828 rcond = fold_binary_loc (loc, code, type, real0, real1);
11829 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
11830 {
11831 if (integer_zerop (rcond))
11832 {
11833 if (code == EQ_EXPR)
11834 return omit_two_operands_loc (loc, type, boolean_false_node,
11835 imag0, imag1);
11836 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
11837 }
11838 else
11839 {
11840 if (code == NE_EXPR)
11841 return omit_two_operands_loc (loc, type, boolean_true_node,
11842 imag0, imag1);
11843 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
11844 }
11845 }
11846
11847 icond = fold_binary_loc (loc, code, type, imag0, imag1);
11848 if (icond && TREE_CODE (icond) == INTEGER_CST)
11849 {
11850 if (integer_zerop (icond))
11851 {
11852 if (code == EQ_EXPR)
11853 return omit_two_operands_loc (loc, type, boolean_false_node,
11854 real0, real1);
11855 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
11856 }
11857 else
11858 {
11859 if (code == NE_EXPR)
11860 return omit_two_operands_loc (loc, type, boolean_true_node,
11861 real0, real1);
11862 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
11863 }
11864 }
11865 }
11866
11867 return NULL_TREE;
11868
11869 case LT_EXPR:
11870 case GT_EXPR:
11871 case LE_EXPR:
11872 case GE_EXPR:
11873 tem = fold_comparison (loc, code, type, op0, op1);
11874 if (tem != NULL_TREE)
11875 return tem;
11876
11877 /* Transform comparisons of the form X +- C CMP X. */
11878 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11879 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11880 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
11881 && !HONOR_SNANS (arg0))
11882 {
11883 tree arg01 = TREE_OPERAND (arg0, 1);
11884 enum tree_code code0 = TREE_CODE (arg0);
11885 int is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
11886
11887 /* (X - c) > X becomes false. */
11888 if (code == GT_EXPR
11889 && ((code0 == MINUS_EXPR && is_positive >= 0)
11890 || (code0 == PLUS_EXPR && is_positive <= 0)))
11891 return constant_boolean_node (0, type);
11892
11893 /* Likewise (X + c) < X becomes false. */
11894 if (code == LT_EXPR
11895 && ((code0 == PLUS_EXPR && is_positive >= 0)
11896 || (code0 == MINUS_EXPR && is_positive <= 0)))
11897 return constant_boolean_node (0, type);
11898
11899 /* Convert (X - c) <= X to true. */
11900 if (!HONOR_NANS (arg1)
11901 && code == LE_EXPR
11902 && ((code0 == MINUS_EXPR && is_positive >= 0)
11903 || (code0 == PLUS_EXPR && is_positive <= 0)))
11904 return constant_boolean_node (1, type);
11905
11906 /* Convert (X + c) >= X to true. */
11907 if (!HONOR_NANS (arg1)
11908 && code == GE_EXPR
11909 && ((code0 == PLUS_EXPR && is_positive >= 0)
11910 || (code0 == MINUS_EXPR && is_positive <= 0)))
11911 return constant_boolean_node (1, type);
11912 }
11913
11914 /* If we are comparing an ABS_EXPR with a constant, we can
11915 convert all the cases into explicit comparisons, but they may
11916 well not be faster than doing the ABS and one comparison.
11917 But ABS (X) <= C is a range comparison, which becomes a subtraction
11918 and a comparison, and is probably faster. */
11919 if (code == LE_EXPR
11920 && TREE_CODE (arg1) == INTEGER_CST
11921 && TREE_CODE (arg0) == ABS_EXPR
11922 && ! TREE_SIDE_EFFECTS (arg0)
11923 && (tem = negate_expr (arg1)) != 0
11924 && TREE_CODE (tem) == INTEGER_CST
11925 && !TREE_OVERFLOW (tem))
11926 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
11927 build2 (GE_EXPR, type,
11928 TREE_OPERAND (arg0, 0), tem),
11929 build2 (LE_EXPR, type,
11930 TREE_OPERAND (arg0, 0), arg1));
11931
11932 /* Convert ABS_EXPR<x> >= 0 to true. */
11933 strict_overflow_p = false;
11934 if (code == GE_EXPR
11935 && (integer_zerop (arg1)
11936 || (! HONOR_NANS (arg0)
11937 && real_zerop (arg1)))
11938 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11939 {
11940 if (strict_overflow_p)
11941 fold_overflow_warning (("assuming signed overflow does not occur "
11942 "when simplifying comparison of "
11943 "absolute value and zero"),
11944 WARN_STRICT_OVERFLOW_CONDITIONAL);
11945 return omit_one_operand_loc (loc, type,
11946 constant_boolean_node (true, type),
11947 arg0);
11948 }
11949
11950 /* Convert ABS_EXPR<x> < 0 to false. */
11951 strict_overflow_p = false;
11952 if (code == LT_EXPR
11953 && (integer_zerop (arg1) || real_zerop (arg1))
11954 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11955 {
11956 if (strict_overflow_p)
11957 fold_overflow_warning (("assuming signed overflow does not occur "
11958 "when simplifying comparison of "
11959 "absolute value and zero"),
11960 WARN_STRICT_OVERFLOW_CONDITIONAL);
11961 return omit_one_operand_loc (loc, type,
11962 constant_boolean_node (false, type),
11963 arg0);
11964 }
11965
11966 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11967 and similarly for >= into !=. */
11968 if ((code == LT_EXPR || code == GE_EXPR)
11969 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11970 && TREE_CODE (arg1) == LSHIFT_EXPR
11971 && integer_onep (TREE_OPERAND (arg1, 0)))
11972 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11973 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11974 TREE_OPERAND (arg1, 1)),
11975 build_zero_cst (TREE_TYPE (arg0)));
11976
11977 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
11978 otherwise Y might be >= # of bits in X's type and thus e.g.
11979 (unsigned char) (1 << Y) for Y 15 might be 0.
11980 If the cast is widening, then 1 << Y should have unsigned type,
11981 otherwise if Y is number of bits in the signed shift type minus 1,
11982 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
11983 31 might be 0xffffffff80000000. */
11984 if ((code == LT_EXPR || code == GE_EXPR)
11985 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11986 && CONVERT_EXPR_P (arg1)
11987 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
11988 && (element_precision (TREE_TYPE (arg1))
11989 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
11990 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
11991 || (element_precision (TREE_TYPE (arg1))
11992 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
11993 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11994 {
11995 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11996 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
11997 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11998 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
11999 build_zero_cst (TREE_TYPE (arg0)));
12000 }
12001
12002 return NULL_TREE;
12003
12004 case UNORDERED_EXPR:
12005 case ORDERED_EXPR:
12006 case UNLT_EXPR:
12007 case UNLE_EXPR:
12008 case UNGT_EXPR:
12009 case UNGE_EXPR:
12010 case UNEQ_EXPR:
12011 case LTGT_EXPR:
12012 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
12013 {
12014 tree targ0 = strip_float_extensions (arg0);
12015 tree targ1 = strip_float_extensions (arg1);
12016 tree newtype = TREE_TYPE (targ0);
12017
12018 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
12019 newtype = TREE_TYPE (targ1);
12020
12021 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
12022 return fold_build2_loc (loc, code, type,
12023 fold_convert_loc (loc, newtype, targ0),
12024 fold_convert_loc (loc, newtype, targ1));
12025 }
12026
12027 return NULL_TREE;
12028
12029 case COMPOUND_EXPR:
12030 /* When pedantic, a compound expression can be neither an lvalue
12031 nor an integer constant expression. */
12032 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
12033 return NULL_TREE;
12034 /* Don't let (0, 0) be null pointer constant. */
12035 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
12036 : fold_convert_loc (loc, type, arg1);
12037 return pedantic_non_lvalue_loc (loc, tem);
12038
12039 case ASSERT_EXPR:
12040 /* An ASSERT_EXPR should never be passed to fold_binary. */
12041 gcc_unreachable ();
12042
12043 default:
12044 return NULL_TREE;
12045 } /* switch (code) */
12046 }
12047
12048 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
12049 ((A & N) + B) & M -> (A + B) & M
12050 Similarly if (N & M) == 0,
12051 ((A | N) + B) & M -> (A + B) & M
12052 and for - instead of + (or unary - instead of +)
12053 and/or ^ instead of |.
12054 If B is constant and (B & M) == 0, fold into A & M.
12055
12056 This function is a helper for match.pd patterns. Return non-NULL
12057 type in which the simplified operation should be performed only
12058 if any optimization is possible.
12059
12060 ARG1 is M above, ARG00 is left operand of +/-, if CODE00 is BIT_*_EXPR,
12061 then ARG00{0,1} are operands of that bitop, otherwise CODE00 is ERROR_MARK.
12062 Similarly for ARG01, CODE01 and ARG01{0,1}, just for the right operand of
12063 +/-. */
12064 tree
12065 fold_bit_and_mask (tree type, tree arg1, enum tree_code code,
12066 tree arg00, enum tree_code code00, tree arg000, tree arg001,
12067 tree arg01, enum tree_code code01, tree arg010, tree arg011,
12068 tree *pmop)
12069 {
12070 gcc_assert (TREE_CODE (arg1) == INTEGER_CST);
12071 gcc_assert (code == PLUS_EXPR || code == MINUS_EXPR || code == NEGATE_EXPR);
12072 wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
12073 if (~cst1 == 0
12074 || (cst1 & (cst1 + 1)) != 0
12075 || !INTEGRAL_TYPE_P (type)
12076 || (!TYPE_OVERFLOW_WRAPS (type)
12077 && TREE_CODE (type) != INTEGER_TYPE)
12078 || (wi::max_value (type) & cst1) != cst1)
12079 return NULL_TREE;
12080
12081 enum tree_code codes[2] = { code00, code01 };
12082 tree arg0xx[4] = { arg000, arg001, arg010, arg011 };
12083 int which = 0;
12084 wide_int cst0;
12085
12086 /* Now we know that arg0 is (C + D) or (C - D) or -C and
12087 arg1 (M) is == (1LL << cst) - 1.
12088 Store C into PMOP[0] and D into PMOP[1]. */
12089 pmop[0] = arg00;
12090 pmop[1] = arg01;
12091 which = code != NEGATE_EXPR;
12092
12093 for (; which >= 0; which--)
12094 switch (codes[which])
12095 {
12096 case BIT_AND_EXPR:
12097 case BIT_IOR_EXPR:
12098 case BIT_XOR_EXPR:
12099 gcc_assert (TREE_CODE (arg0xx[2 * which + 1]) == INTEGER_CST);
12100 cst0 = wi::to_wide (arg0xx[2 * which + 1]) & cst1;
12101 if (codes[which] == BIT_AND_EXPR)
12102 {
12103 if (cst0 != cst1)
12104 break;
12105 }
12106 else if (cst0 != 0)
12107 break;
12108 /* If C or D is of the form (A & N) where
12109 (N & M) == M, or of the form (A | N) or
12110 (A ^ N) where (N & M) == 0, replace it with A. */
12111 pmop[which] = arg0xx[2 * which];
12112 break;
12113 case ERROR_MARK:
12114 if (TREE_CODE (pmop[which]) != INTEGER_CST)
12115 break;
12116 /* If C or D is a N where (N & M) == 0, it can be
12117 omitted (replaced with 0). */
12118 if ((code == PLUS_EXPR
12119 || (code == MINUS_EXPR && which == 0))
12120 && (cst1 & wi::to_wide (pmop[which])) == 0)
12121 pmop[which] = build_int_cst (type, 0);
12122 /* Similarly, with C - N where (-N & M) == 0. */
12123 if (code == MINUS_EXPR
12124 && which == 1
12125 && (cst1 & -wi::to_wide (pmop[which])) == 0)
12126 pmop[which] = build_int_cst (type, 0);
12127 break;
12128 default:
12129 gcc_unreachable ();
12130 }
12131
12132 /* Only build anything new if we optimized one or both arguments above. */
12133 if (pmop[0] == arg00 && pmop[1] == arg01)
12134 return NULL_TREE;
12135
12136 if (TYPE_OVERFLOW_WRAPS (type))
12137 return type;
12138 else
12139 return unsigned_type_for (type);
12140 }
12141
12142 /* Used by contains_label_[p1]. */
12143
12144 struct contains_label_data
12145 {
12146 hash_set<tree> *pset;
12147 bool inside_switch_p;
12148 };
12149
12150 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
12151 a LABEL_EXPR or CASE_LABEL_EXPR not inside of another SWITCH_EXPR; otherwise
12152 return NULL_TREE. Do not check the subtrees of GOTO_EXPR. */
12153
12154 static tree
12155 contains_label_1 (tree *tp, int *walk_subtrees, void *data)
12156 {
12157 contains_label_data *d = (contains_label_data *) data;
12158 switch (TREE_CODE (*tp))
12159 {
12160 case LABEL_EXPR:
12161 return *tp;
12162
12163 case CASE_LABEL_EXPR:
12164 if (!d->inside_switch_p)
12165 return *tp;
12166 return NULL_TREE;
12167
12168 case SWITCH_EXPR:
12169 if (!d->inside_switch_p)
12170 {
12171 if (walk_tree (&SWITCH_COND (*tp), contains_label_1, data, d->pset))
12172 return *tp;
12173 d->inside_switch_p = true;
12174 if (walk_tree (&SWITCH_BODY (*tp), contains_label_1, data, d->pset))
12175 return *tp;
12176 d->inside_switch_p = false;
12177 *walk_subtrees = 0;
12178 }
12179 return NULL_TREE;
12180
12181 case GOTO_EXPR:
12182 *walk_subtrees = 0;
12183 return NULL_TREE;
12184
12185 default:
12186 return NULL_TREE;
12187 }
12188 }
12189
12190 /* Return whether the sub-tree ST contains a label which is accessible from
12191 outside the sub-tree. */
12192
12193 static bool
12194 contains_label_p (tree st)
12195 {
12196 hash_set<tree> pset;
12197 contains_label_data data = { &pset, false };
12198 return walk_tree (&st, contains_label_1, &data, &pset) != NULL_TREE;
12199 }
12200
12201 /* Fold a ternary expression of code CODE and type TYPE with operands
12202 OP0, OP1, and OP2. Return the folded expression if folding is
12203 successful. Otherwise, return NULL_TREE. */
12204
12205 tree
12206 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
12207 tree op0, tree op1, tree op2)
12208 {
12209 tree tem;
12210 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
12211 enum tree_code_class kind = TREE_CODE_CLASS (code);
12212
12213 gcc_assert (IS_EXPR_CODE_CLASS (kind)
12214 && TREE_CODE_LENGTH (code) == 3);
12215
12216 /* If this is a commutative operation, and OP0 is a constant, move it
12217 to OP1 to reduce the number of tests below. */
12218 if (commutative_ternary_tree_code (code)
12219 && tree_swap_operands_p (op0, op1))
12220 return fold_build3_loc (loc, code, type, op1, op0, op2);
12221
12222 tem = generic_simplify (loc, code, type, op0, op1, op2);
12223 if (tem)
12224 return tem;
12225
12226 /* Strip any conversions that don't change the mode. This is safe
12227 for every expression, except for a comparison expression because
12228 its signedness is derived from its operands. So, in the latter
12229 case, only strip conversions that don't change the signedness.
12230
12231 Note that this is done as an internal manipulation within the
12232 constant folder, in order to find the simplest representation of
12233 the arguments so that their form can be studied. In any cases,
12234 the appropriate type conversions should be put back in the tree
12235 that will get out of the constant folder. */
12236 if (op0)
12237 {
12238 arg0 = op0;
12239 STRIP_NOPS (arg0);
12240 }
12241
12242 if (op1)
12243 {
12244 arg1 = op1;
12245 STRIP_NOPS (arg1);
12246 }
12247
12248 if (op2)
12249 {
12250 arg2 = op2;
12251 STRIP_NOPS (arg2);
12252 }
12253
12254 switch (code)
12255 {
12256 case COMPONENT_REF:
12257 if (TREE_CODE (arg0) == CONSTRUCTOR
12258 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
12259 {
12260 unsigned HOST_WIDE_INT idx;
12261 tree field, value;
12262 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
12263 if (field == arg1)
12264 return value;
12265 }
12266 return NULL_TREE;
12267
12268 case COND_EXPR:
12269 case VEC_COND_EXPR:
12270 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
12271 so all simple results must be passed through pedantic_non_lvalue. */
12272 if (TREE_CODE (arg0) == INTEGER_CST)
12273 {
12274 tree unused_op = integer_zerop (arg0) ? op1 : op2;
12275 tem = integer_zerop (arg0) ? op2 : op1;
12276 /* Only optimize constant conditions when the selected branch
12277 has the same type as the COND_EXPR. This avoids optimizing
12278 away "c ? x : throw", where the throw has a void type.
12279 Avoid throwing away that operand which contains label. */
12280 if ((!TREE_SIDE_EFFECTS (unused_op)
12281 || !contains_label_p (unused_op))
12282 && (! VOID_TYPE_P (TREE_TYPE (tem))
12283 || VOID_TYPE_P (type)))
12284 return pedantic_non_lvalue_loc (loc, tem);
12285 return NULL_TREE;
12286 }
12287 else if (TREE_CODE (arg0) == VECTOR_CST)
12288 {
12289 unsigned HOST_WIDE_INT nelts;
12290 if ((TREE_CODE (arg1) == VECTOR_CST
12291 || TREE_CODE (arg1) == CONSTRUCTOR)
12292 && (TREE_CODE (arg2) == VECTOR_CST
12293 || TREE_CODE (arg2) == CONSTRUCTOR)
12294 && TYPE_VECTOR_SUBPARTS (type).is_constant (&nelts))
12295 {
12296 vec_perm_builder sel (nelts, nelts, 1);
12297 for (unsigned int i = 0; i < nelts; i++)
12298 {
12299 tree val = VECTOR_CST_ELT (arg0, i);
12300 if (integer_all_onesp (val))
12301 sel.quick_push (i);
12302 else if (integer_zerop (val))
12303 sel.quick_push (nelts + i);
12304 else /* Currently unreachable. */
12305 return NULL_TREE;
12306 }
12307 vec_perm_indices indices (sel, 2, nelts);
12308 tree t = fold_vec_perm (type, arg1, arg2, indices);
12309 if (t != NULL_TREE)
12310 return t;
12311 }
12312 }
12313
12314 /* If we have A op B ? A : C, we may be able to convert this to a
12315 simpler expression, depending on the operation and the values
12316 of B and C. Signed zeros prevent all of these transformations,
12317 for reasons given above each one.
12318
12319 Also try swapping the arguments and inverting the conditional. */
12320 if (COMPARISON_CLASS_P (arg0)
12321 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op1)
12322 && !HONOR_SIGNED_ZEROS (element_mode (op1)))
12323 {
12324 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
12325 if (tem)
12326 return tem;
12327 }
12328
12329 if (COMPARISON_CLASS_P (arg0)
12330 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op2)
12331 && !HONOR_SIGNED_ZEROS (element_mode (op2)))
12332 {
12333 location_t loc0 = expr_location_or (arg0, loc);
12334 tem = fold_invert_truthvalue (loc0, arg0);
12335 if (tem && COMPARISON_CLASS_P (tem))
12336 {
12337 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
12338 if (tem)
12339 return tem;
12340 }
12341 }
12342
12343 /* If the second operand is simpler than the third, swap them
12344 since that produces better jump optimization results. */
12345 if (truth_value_p (TREE_CODE (arg0))
12346 && tree_swap_operands_p (op1, op2))
12347 {
12348 location_t loc0 = expr_location_or (arg0, loc);
12349 /* See if this can be inverted. If it can't, possibly because
12350 it was a floating-point inequality comparison, don't do
12351 anything. */
12352 tem = fold_invert_truthvalue (loc0, arg0);
12353 if (tem)
12354 return fold_build3_loc (loc, code, type, tem, op2, op1);
12355 }
12356
12357 /* Convert A ? 1 : 0 to simply A. */
12358 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
12359 : (integer_onep (op1)
12360 && !VECTOR_TYPE_P (type)))
12361 && integer_zerop (op2)
12362 /* If we try to convert OP0 to our type, the
12363 call to fold will try to move the conversion inside
12364 a COND, which will recurse. In that case, the COND_EXPR
12365 is probably the best choice, so leave it alone. */
12366 && type == TREE_TYPE (arg0))
12367 return pedantic_non_lvalue_loc (loc, arg0);
12368
12369 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
12370 over COND_EXPR in cases such as floating point comparisons. */
12371 if (integer_zerop (op1)
12372 && code == COND_EXPR
12373 && integer_onep (op2)
12374 && !VECTOR_TYPE_P (type)
12375 && truth_value_p (TREE_CODE (arg0)))
12376 return pedantic_non_lvalue_loc (loc,
12377 fold_convert_loc (loc, type,
12378 invert_truthvalue_loc (loc,
12379 arg0)));
12380
12381 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
12382 if (TREE_CODE (arg0) == LT_EXPR
12383 && integer_zerop (TREE_OPERAND (arg0, 1))
12384 && integer_zerop (op2)
12385 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
12386 {
12387 /* sign_bit_p looks through both zero and sign extensions,
12388 but for this optimization only sign extensions are
12389 usable. */
12390 tree tem2 = TREE_OPERAND (arg0, 0);
12391 while (tem != tem2)
12392 {
12393 if (TREE_CODE (tem2) != NOP_EXPR
12394 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
12395 {
12396 tem = NULL_TREE;
12397 break;
12398 }
12399 tem2 = TREE_OPERAND (tem2, 0);
12400 }
12401 /* sign_bit_p only checks ARG1 bits within A's precision.
12402 If <sign bit of A> has wider type than A, bits outside
12403 of A's precision in <sign bit of A> need to be checked.
12404 If they are all 0, this optimization needs to be done
12405 in unsigned A's type, if they are all 1 in signed A's type,
12406 otherwise this can't be done. */
12407 if (tem
12408 && TYPE_PRECISION (TREE_TYPE (tem))
12409 < TYPE_PRECISION (TREE_TYPE (arg1))
12410 && TYPE_PRECISION (TREE_TYPE (tem))
12411 < TYPE_PRECISION (type))
12412 {
12413 int inner_width, outer_width;
12414 tree tem_type;
12415
12416 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
12417 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
12418 if (outer_width > TYPE_PRECISION (type))
12419 outer_width = TYPE_PRECISION (type);
12420
12421 wide_int mask = wi::shifted_mask
12422 (inner_width, outer_width - inner_width, false,
12423 TYPE_PRECISION (TREE_TYPE (arg1)));
12424
12425 wide_int common = mask & wi::to_wide (arg1);
12426 if (common == mask)
12427 {
12428 tem_type = signed_type_for (TREE_TYPE (tem));
12429 tem = fold_convert_loc (loc, tem_type, tem);
12430 }
12431 else if (common == 0)
12432 {
12433 tem_type = unsigned_type_for (TREE_TYPE (tem));
12434 tem = fold_convert_loc (loc, tem_type, tem);
12435 }
12436 else
12437 tem = NULL;
12438 }
12439
12440 if (tem)
12441 return
12442 fold_convert_loc (loc, type,
12443 fold_build2_loc (loc, BIT_AND_EXPR,
12444 TREE_TYPE (tem), tem,
12445 fold_convert_loc (loc,
12446 TREE_TYPE (tem),
12447 arg1)));
12448 }
12449
12450 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
12451 already handled above. */
12452 if (TREE_CODE (arg0) == BIT_AND_EXPR
12453 && integer_onep (TREE_OPERAND (arg0, 1))
12454 && integer_zerop (op2)
12455 && integer_pow2p (arg1))
12456 {
12457 tree tem = TREE_OPERAND (arg0, 0);
12458 STRIP_NOPS (tem);
12459 if (TREE_CODE (tem) == RSHIFT_EXPR
12460 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
12461 && (unsigned HOST_WIDE_INT) tree_log2 (arg1)
12462 == tree_to_uhwi (TREE_OPERAND (tem, 1)))
12463 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12464 fold_convert_loc (loc, type,
12465 TREE_OPERAND (tem, 0)),
12466 op1);
12467 }
12468
12469 /* A & N ? N : 0 is simply A & N if N is a power of two. This
12470 is probably obsolete because the first operand should be a
12471 truth value (that's why we have the two cases above), but let's
12472 leave it in until we can confirm this for all front-ends. */
12473 if (integer_zerop (op2)
12474 && TREE_CODE (arg0) == NE_EXPR
12475 && integer_zerop (TREE_OPERAND (arg0, 1))
12476 && integer_pow2p (arg1)
12477 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12478 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12479 arg1, OEP_ONLY_CONST)
12480 /* operand_equal_p compares just value, not precision, so e.g.
12481 arg1 could be 8-bit -128 and be power of two, but BIT_AND_EXPR
12482 second operand 32-bit -128, which is not a power of two (or vice
12483 versa. */
12484 && integer_pow2p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)))
12485 return pedantic_non_lvalue_loc (loc,
12486 fold_convert_loc (loc, type,
12487 TREE_OPERAND (arg0,
12488 0)));
12489
12490 /* Disable the transformations below for vectors, since
12491 fold_binary_op_with_conditional_arg may undo them immediately,
12492 yielding an infinite loop. */
12493 if (code == VEC_COND_EXPR)
12494 return NULL_TREE;
12495
12496 /* Convert A ? B : 0 into A && B if A and B are truth values. */
12497 if (integer_zerop (op2)
12498 && truth_value_p (TREE_CODE (arg0))
12499 && truth_value_p (TREE_CODE (arg1))
12500 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
12501 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
12502 : TRUTH_ANDIF_EXPR,
12503 type, fold_convert_loc (loc, type, arg0), op1);
12504
12505 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
12506 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
12507 && truth_value_p (TREE_CODE (arg0))
12508 && truth_value_p (TREE_CODE (arg1))
12509 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
12510 {
12511 location_t loc0 = expr_location_or (arg0, loc);
12512 /* Only perform transformation if ARG0 is easily inverted. */
12513 tem = fold_invert_truthvalue (loc0, arg0);
12514 if (tem)
12515 return fold_build2_loc (loc, code == VEC_COND_EXPR
12516 ? BIT_IOR_EXPR
12517 : TRUTH_ORIF_EXPR,
12518 type, fold_convert_loc (loc, type, tem),
12519 op1);
12520 }
12521
12522 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
12523 if (integer_zerop (arg1)
12524 && truth_value_p (TREE_CODE (arg0))
12525 && truth_value_p (TREE_CODE (op2))
12526 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
12527 {
12528 location_t loc0 = expr_location_or (arg0, loc);
12529 /* Only perform transformation if ARG0 is easily inverted. */
12530 tem = fold_invert_truthvalue (loc0, arg0);
12531 if (tem)
12532 return fold_build2_loc (loc, code == VEC_COND_EXPR
12533 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
12534 type, fold_convert_loc (loc, type, tem),
12535 op2);
12536 }
12537
12538 /* Convert A ? 1 : B into A || B if A and B are truth values. */
12539 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
12540 && truth_value_p (TREE_CODE (arg0))
12541 && truth_value_p (TREE_CODE (op2))
12542 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
12543 return fold_build2_loc (loc, code == VEC_COND_EXPR
12544 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
12545 type, fold_convert_loc (loc, type, arg0), op2);
12546
12547 return NULL_TREE;
12548
12549 case CALL_EXPR:
12550 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
12551 of fold_ternary on them. */
12552 gcc_unreachable ();
12553
12554 case BIT_FIELD_REF:
12555 if (TREE_CODE (arg0) == VECTOR_CST
12556 && (type == TREE_TYPE (TREE_TYPE (arg0))
12557 || (VECTOR_TYPE_P (type)
12558 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0))))
12559 && tree_fits_uhwi_p (op1)
12560 && tree_fits_uhwi_p (op2))
12561 {
12562 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
12563 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
12564 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
12565 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
12566
12567 if (n != 0
12568 && (idx % width) == 0
12569 && (n % width) == 0
12570 && known_le ((idx + n) / width,
12571 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))))
12572 {
12573 idx = idx / width;
12574 n = n / width;
12575
12576 if (TREE_CODE (arg0) == VECTOR_CST)
12577 {
12578 if (n == 1)
12579 {
12580 tem = VECTOR_CST_ELT (arg0, idx);
12581 if (VECTOR_TYPE_P (type))
12582 tem = fold_build1 (VIEW_CONVERT_EXPR, type, tem);
12583 return tem;
12584 }
12585
12586 tree_vector_builder vals (type, n, 1);
12587 for (unsigned i = 0; i < n; ++i)
12588 vals.quick_push (VECTOR_CST_ELT (arg0, idx + i));
12589 return vals.build ();
12590 }
12591 }
12592 }
12593
12594 /* On constants we can use native encode/interpret to constant
12595 fold (nearly) all BIT_FIELD_REFs. */
12596 if (CONSTANT_CLASS_P (arg0)
12597 && can_native_interpret_type_p (type)
12598 && BITS_PER_UNIT == 8
12599 && tree_fits_uhwi_p (op1)
12600 && tree_fits_uhwi_p (op2))
12601 {
12602 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
12603 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
12604 /* Limit us to a reasonable amount of work. To relax the
12605 other limitations we need bit-shifting of the buffer
12606 and rounding up the size. */
12607 if (bitpos % BITS_PER_UNIT == 0
12608 && bitsize % BITS_PER_UNIT == 0
12609 && bitsize <= MAX_BITSIZE_MODE_ANY_MODE)
12610 {
12611 unsigned char b[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
12612 unsigned HOST_WIDE_INT len
12613 = native_encode_expr (arg0, b, bitsize / BITS_PER_UNIT,
12614 bitpos / BITS_PER_UNIT);
12615 if (len > 0
12616 && len * BITS_PER_UNIT >= bitsize)
12617 {
12618 tree v = native_interpret_expr (type, b,
12619 bitsize / BITS_PER_UNIT);
12620 if (v)
12621 return v;
12622 }
12623 }
12624 }
12625
12626 return NULL_TREE;
12627
12628 case VEC_PERM_EXPR:
12629 /* Perform constant folding of BIT_INSERT_EXPR. */
12630 if (TREE_CODE (arg2) == VECTOR_CST
12631 && TREE_CODE (op0) == VECTOR_CST
12632 && TREE_CODE (op1) == VECTOR_CST)
12633 {
12634 /* Build a vector of integers from the tree mask. */
12635 vec_perm_builder builder;
12636 if (!tree_to_vec_perm_builder (&builder, arg2))
12637 return NULL_TREE;
12638
12639 /* Create a vec_perm_indices for the integer vector. */
12640 poly_uint64 nelts = TYPE_VECTOR_SUBPARTS (type);
12641 bool single_arg = (op0 == op1);
12642 vec_perm_indices sel (builder, single_arg ? 1 : 2, nelts);
12643 return fold_vec_perm (type, op0, op1, sel);
12644 }
12645 return NULL_TREE;
12646
12647 case BIT_INSERT_EXPR:
12648 /* Perform (partial) constant folding of BIT_INSERT_EXPR. */
12649 if (TREE_CODE (arg0) == INTEGER_CST
12650 && TREE_CODE (arg1) == INTEGER_CST)
12651 {
12652 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
12653 unsigned bitsize = TYPE_PRECISION (TREE_TYPE (arg1));
12654 wide_int tem = (wi::to_wide (arg0)
12655 & wi::shifted_mask (bitpos, bitsize, true,
12656 TYPE_PRECISION (type)));
12657 wide_int tem2
12658 = wi::lshift (wi::zext (wi::to_wide (arg1, TYPE_PRECISION (type)),
12659 bitsize), bitpos);
12660 return wide_int_to_tree (type, wi::bit_or (tem, tem2));
12661 }
12662 else if (TREE_CODE (arg0) == VECTOR_CST
12663 && CONSTANT_CLASS_P (arg1)
12664 && types_compatible_p (TREE_TYPE (TREE_TYPE (arg0)),
12665 TREE_TYPE (arg1)))
12666 {
12667 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
12668 unsigned HOST_WIDE_INT elsize
12669 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (arg1)));
12670 if (bitpos % elsize == 0)
12671 {
12672 unsigned k = bitpos / elsize;
12673 unsigned HOST_WIDE_INT nelts;
12674 if (operand_equal_p (VECTOR_CST_ELT (arg0, k), arg1, 0))
12675 return arg0;
12676 else if (VECTOR_CST_NELTS (arg0).is_constant (&nelts))
12677 {
12678 tree_vector_builder elts (type, nelts, 1);
12679 elts.quick_grow (nelts);
12680 for (unsigned HOST_WIDE_INT i = 0; i < nelts; ++i)
12681 elts[i] = (i == k ? arg1 : VECTOR_CST_ELT (arg0, i));
12682 return elts.build ();
12683 }
12684 }
12685 }
12686 return NULL_TREE;
12687
12688 default:
12689 return NULL_TREE;
12690 } /* switch (code) */
12691 }
12692
12693 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
12694 of an array (or vector). *CTOR_IDX if non-NULL is updated with the
12695 constructor element index of the value returned. If the element is
12696 not found NULL_TREE is returned and *CTOR_IDX is updated to
12697 the index of the element after the ACCESS_INDEX position (which
12698 may be outside of the CTOR array). */
12699
12700 tree
12701 get_array_ctor_element_at_index (tree ctor, offset_int access_index,
12702 unsigned *ctor_idx)
12703 {
12704 tree index_type = NULL_TREE;
12705 signop index_sgn = UNSIGNED;
12706 offset_int low_bound = 0;
12707
12708 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
12709 {
12710 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
12711 if (domain_type && TYPE_MIN_VALUE (domain_type))
12712 {
12713 /* Static constructors for variably sized objects makes no sense. */
12714 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
12715 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
12716 /* ??? When it is obvious that the range is signed, treat it so. */
12717 if (TYPE_UNSIGNED (index_type)
12718 && TYPE_MAX_VALUE (domain_type)
12719 && tree_int_cst_lt (TYPE_MAX_VALUE (domain_type),
12720 TYPE_MIN_VALUE (domain_type)))
12721 {
12722 index_sgn = SIGNED;
12723 low_bound
12724 = offset_int::from (wi::to_wide (TYPE_MIN_VALUE (domain_type)),
12725 SIGNED);
12726 }
12727 else
12728 {
12729 index_sgn = TYPE_SIGN (index_type);
12730 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
12731 }
12732 }
12733 }
12734
12735 if (index_type)
12736 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
12737 index_sgn);
12738
12739 offset_int index = low_bound;
12740 if (index_type)
12741 index = wi::ext (index, TYPE_PRECISION (index_type), index_sgn);
12742
12743 offset_int max_index = index;
12744 unsigned cnt;
12745 tree cfield, cval;
12746 bool first_p = true;
12747
12748 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
12749 {
12750 /* Array constructor might explicitly set index, or specify a range,
12751 or leave index NULL meaning that it is next index after previous
12752 one. */
12753 if (cfield)
12754 {
12755 if (TREE_CODE (cfield) == INTEGER_CST)
12756 max_index = index
12757 = offset_int::from (wi::to_wide (cfield), index_sgn);
12758 else
12759 {
12760 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
12761 index = offset_int::from (wi::to_wide (TREE_OPERAND (cfield, 0)),
12762 index_sgn);
12763 max_index
12764 = offset_int::from (wi::to_wide (TREE_OPERAND (cfield, 1)),
12765 index_sgn);
12766 gcc_checking_assert (wi::le_p (index, max_index, index_sgn));
12767 }
12768 }
12769 else if (!first_p)
12770 {
12771 index = max_index + 1;
12772 if (index_type)
12773 index = wi::ext (index, TYPE_PRECISION (index_type), index_sgn);
12774 gcc_checking_assert (wi::gt_p (index, max_index, index_sgn));
12775 max_index = index;
12776 }
12777 else
12778 first_p = false;
12779
12780 /* Do we have match? */
12781 if (wi::cmp (access_index, index, index_sgn) >= 0)
12782 {
12783 if (wi::cmp (access_index, max_index, index_sgn) <= 0)
12784 {
12785 if (ctor_idx)
12786 *ctor_idx = cnt;
12787 return cval;
12788 }
12789 }
12790 else if (in_gimple_form)
12791 /* We're past the element we search for. Note during parsing
12792 the elements might not be sorted.
12793 ??? We should use a binary search and a flag on the
12794 CONSTRUCTOR as to whether elements are sorted in declaration
12795 order. */
12796 break;
12797 }
12798 if (ctor_idx)
12799 *ctor_idx = cnt;
12800 return NULL_TREE;
12801 }
12802
12803 /* Perform constant folding and related simplification of EXPR.
12804 The related simplifications include x*1 => x, x*0 => 0, etc.,
12805 and application of the associative law.
12806 NOP_EXPR conversions may be removed freely (as long as we
12807 are careful not to change the type of the overall expression).
12808 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
12809 but we can constant-fold them if they have constant operands. */
12810
12811 #ifdef ENABLE_FOLD_CHECKING
12812 # define fold(x) fold_1 (x)
12813 static tree fold_1 (tree);
12814 static
12815 #endif
12816 tree
12817 fold (tree expr)
12818 {
12819 const tree t = expr;
12820 enum tree_code code = TREE_CODE (t);
12821 enum tree_code_class kind = TREE_CODE_CLASS (code);
12822 tree tem;
12823 location_t loc = EXPR_LOCATION (expr);
12824
12825 /* Return right away if a constant. */
12826 if (kind == tcc_constant)
12827 return t;
12828
12829 /* CALL_EXPR-like objects with variable numbers of operands are
12830 treated specially. */
12831 if (kind == tcc_vl_exp)
12832 {
12833 if (code == CALL_EXPR)
12834 {
12835 tem = fold_call_expr (loc, expr, false);
12836 return tem ? tem : expr;
12837 }
12838 return expr;
12839 }
12840
12841 if (IS_EXPR_CODE_CLASS (kind))
12842 {
12843 tree type = TREE_TYPE (t);
12844 tree op0, op1, op2;
12845
12846 switch (TREE_CODE_LENGTH (code))
12847 {
12848 case 1:
12849 op0 = TREE_OPERAND (t, 0);
12850 tem = fold_unary_loc (loc, code, type, op0);
12851 return tem ? tem : expr;
12852 case 2:
12853 op0 = TREE_OPERAND (t, 0);
12854 op1 = TREE_OPERAND (t, 1);
12855 tem = fold_binary_loc (loc, code, type, op0, op1);
12856 return tem ? tem : expr;
12857 case 3:
12858 op0 = TREE_OPERAND (t, 0);
12859 op1 = TREE_OPERAND (t, 1);
12860 op2 = TREE_OPERAND (t, 2);
12861 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12862 return tem ? tem : expr;
12863 default:
12864 break;
12865 }
12866 }
12867
12868 switch (code)
12869 {
12870 case ARRAY_REF:
12871 {
12872 tree op0 = TREE_OPERAND (t, 0);
12873 tree op1 = TREE_OPERAND (t, 1);
12874
12875 if (TREE_CODE (op1) == INTEGER_CST
12876 && TREE_CODE (op0) == CONSTRUCTOR
12877 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
12878 {
12879 tree val = get_array_ctor_element_at_index (op0,
12880 wi::to_offset (op1));
12881 if (val)
12882 return val;
12883 }
12884
12885 return t;
12886 }
12887
12888 /* Return a VECTOR_CST if possible. */
12889 case CONSTRUCTOR:
12890 {
12891 tree type = TREE_TYPE (t);
12892 if (TREE_CODE (type) != VECTOR_TYPE)
12893 return t;
12894
12895 unsigned i;
12896 tree val;
12897 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
12898 if (! CONSTANT_CLASS_P (val))
12899 return t;
12900
12901 return build_vector_from_ctor (type, CONSTRUCTOR_ELTS (t));
12902 }
12903
12904 case CONST_DECL:
12905 return fold (DECL_INITIAL (t));
12906
12907 default:
12908 return t;
12909 } /* switch (code) */
12910 }
12911
12912 #ifdef ENABLE_FOLD_CHECKING
12913 #undef fold
12914
12915 static void fold_checksum_tree (const_tree, struct md5_ctx *,
12916 hash_table<nofree_ptr_hash<const tree_node> > *);
12917 static void fold_check_failed (const_tree, const_tree);
12918 void print_fold_checksum (const_tree);
12919
12920 /* When --enable-checking=fold, compute a digest of expr before
12921 and after actual fold call to see if fold did not accidentally
12922 change original expr. */
12923
12924 tree
12925 fold (tree expr)
12926 {
12927 tree ret;
12928 struct md5_ctx ctx;
12929 unsigned char checksum_before[16], checksum_after[16];
12930 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12931
12932 md5_init_ctx (&ctx);
12933 fold_checksum_tree (expr, &ctx, &ht);
12934 md5_finish_ctx (&ctx, checksum_before);
12935 ht.empty ();
12936
12937 ret = fold_1 (expr);
12938
12939 md5_init_ctx (&ctx);
12940 fold_checksum_tree (expr, &ctx, &ht);
12941 md5_finish_ctx (&ctx, checksum_after);
12942
12943 if (memcmp (checksum_before, checksum_after, 16))
12944 fold_check_failed (expr, ret);
12945
12946 return ret;
12947 }
12948
12949 void
12950 print_fold_checksum (const_tree expr)
12951 {
12952 struct md5_ctx ctx;
12953 unsigned char checksum[16], cnt;
12954 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12955
12956 md5_init_ctx (&ctx);
12957 fold_checksum_tree (expr, &ctx, &ht);
12958 md5_finish_ctx (&ctx, checksum);
12959 for (cnt = 0; cnt < 16; ++cnt)
12960 fprintf (stderr, "%02x", checksum[cnt]);
12961 putc ('\n', stderr);
12962 }
12963
12964 static void
12965 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
12966 {
12967 internal_error ("fold check: original tree changed by fold");
12968 }
12969
12970 static void
12971 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
12972 hash_table<nofree_ptr_hash <const tree_node> > *ht)
12973 {
12974 const tree_node **slot;
12975 enum tree_code code;
12976 union tree_node *buf;
12977 int i, len;
12978
12979 recursive_label:
12980 if (expr == NULL)
12981 return;
12982 slot = ht->find_slot (expr, INSERT);
12983 if (*slot != NULL)
12984 return;
12985 *slot = expr;
12986 code = TREE_CODE (expr);
12987 if (TREE_CODE_CLASS (code) == tcc_declaration
12988 && HAS_DECL_ASSEMBLER_NAME_P (expr))
12989 {
12990 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
12991 size_t sz = tree_size (expr);
12992 buf = XALLOCAVAR (union tree_node, sz);
12993 memcpy ((char *) buf, expr, sz);
12994 SET_DECL_ASSEMBLER_NAME ((tree) buf, NULL);
12995 buf->decl_with_vis.symtab_node = NULL;
12996 buf->base.nowarning_flag = 0;
12997 expr = (tree) buf;
12998 }
12999 else if (TREE_CODE_CLASS (code) == tcc_type
13000 && (TYPE_POINTER_TO (expr)
13001 || TYPE_REFERENCE_TO (expr)
13002 || TYPE_CACHED_VALUES_P (expr)
13003 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
13004 || TYPE_NEXT_VARIANT (expr)
13005 || TYPE_ALIAS_SET_KNOWN_P (expr)))
13006 {
13007 /* Allow these fields to be modified. */
13008 tree tmp;
13009 size_t sz = tree_size (expr);
13010 buf = XALLOCAVAR (union tree_node, sz);
13011 memcpy ((char *) buf, expr, sz);
13012 expr = tmp = (tree) buf;
13013 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13014 TYPE_POINTER_TO (tmp) = NULL;
13015 TYPE_REFERENCE_TO (tmp) = NULL;
13016 TYPE_NEXT_VARIANT (tmp) = NULL;
13017 TYPE_ALIAS_SET (tmp) = -1;
13018 if (TYPE_CACHED_VALUES_P (tmp))
13019 {
13020 TYPE_CACHED_VALUES_P (tmp) = 0;
13021 TYPE_CACHED_VALUES (tmp) = NULL;
13022 }
13023 }
13024 else if (TREE_NO_WARNING (expr) && (DECL_P (expr) || EXPR_P (expr)))
13025 {
13026 /* Allow TREE_NO_WARNING to be set. Perhaps we shouldn't allow that
13027 and change builtins.c etc. instead - see PR89543. */
13028 size_t sz = tree_size (expr);
13029 buf = XALLOCAVAR (union tree_node, sz);
13030 memcpy ((char *) buf, expr, sz);
13031 buf->base.nowarning_flag = 0;
13032 expr = (tree) buf;
13033 }
13034 md5_process_bytes (expr, tree_size (expr), ctx);
13035 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
13036 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13037 if (TREE_CODE_CLASS (code) != tcc_type
13038 && TREE_CODE_CLASS (code) != tcc_declaration
13039 && code != TREE_LIST
13040 && code != SSA_NAME
13041 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
13042 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13043 switch (TREE_CODE_CLASS (code))
13044 {
13045 case tcc_constant:
13046 switch (code)
13047 {
13048 case STRING_CST:
13049 md5_process_bytes (TREE_STRING_POINTER (expr),
13050 TREE_STRING_LENGTH (expr), ctx);
13051 break;
13052 case COMPLEX_CST:
13053 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13054 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13055 break;
13056 case VECTOR_CST:
13057 len = vector_cst_encoded_nelts (expr);
13058 for (i = 0; i < len; ++i)
13059 fold_checksum_tree (VECTOR_CST_ENCODED_ELT (expr, i), ctx, ht);
13060 break;
13061 default:
13062 break;
13063 }
13064 break;
13065 case tcc_exceptional:
13066 switch (code)
13067 {
13068 case TREE_LIST:
13069 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13070 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13071 expr = TREE_CHAIN (expr);
13072 goto recursive_label;
13073 break;
13074 case TREE_VEC:
13075 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13076 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13077 break;
13078 default:
13079 break;
13080 }
13081 break;
13082 case tcc_expression:
13083 case tcc_reference:
13084 case tcc_comparison:
13085 case tcc_unary:
13086 case tcc_binary:
13087 case tcc_statement:
13088 case tcc_vl_exp:
13089 len = TREE_OPERAND_LENGTH (expr);
13090 for (i = 0; i < len; ++i)
13091 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13092 break;
13093 case tcc_declaration:
13094 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13095 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13096 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13097 {
13098 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13099 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13100 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13101 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13102 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13103 }
13104
13105 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13106 {
13107 if (TREE_CODE (expr) == FUNCTION_DECL)
13108 {
13109 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13110 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
13111 }
13112 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13113 }
13114 break;
13115 case tcc_type:
13116 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13117 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13118 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13119 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13120 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13121 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13122 if (INTEGRAL_TYPE_P (expr)
13123 || SCALAR_FLOAT_TYPE_P (expr))
13124 {
13125 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13126 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13127 }
13128 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13129 if (TREE_CODE (expr) == RECORD_TYPE
13130 || TREE_CODE (expr) == UNION_TYPE
13131 || TREE_CODE (expr) == QUAL_UNION_TYPE)
13132 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13133 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13134 break;
13135 default:
13136 break;
13137 }
13138 }
13139
13140 /* Helper function for outputting the checksum of a tree T. When
13141 debugging with gdb, you can "define mynext" to be "next" followed
13142 by "call debug_fold_checksum (op0)", then just trace down till the
13143 outputs differ. */
13144
13145 DEBUG_FUNCTION void
13146 debug_fold_checksum (const_tree t)
13147 {
13148 int i;
13149 unsigned char checksum[16];
13150 struct md5_ctx ctx;
13151 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13152
13153 md5_init_ctx (&ctx);
13154 fold_checksum_tree (t, &ctx, &ht);
13155 md5_finish_ctx (&ctx, checksum);
13156 ht.empty ();
13157
13158 for (i = 0; i < 16; i++)
13159 fprintf (stderr, "%d ", checksum[i]);
13160
13161 fprintf (stderr, "\n");
13162 }
13163
13164 #endif
13165
13166 /* Fold a unary tree expression with code CODE of type TYPE with an
13167 operand OP0. LOC is the location of the resulting expression.
13168 Return a folded expression if successful. Otherwise, return a tree
13169 expression with code CODE of type TYPE with an operand OP0. */
13170
13171 tree
13172 fold_build1_loc (location_t loc,
13173 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13174 {
13175 tree tem;
13176 #ifdef ENABLE_FOLD_CHECKING
13177 unsigned char checksum_before[16], checksum_after[16];
13178 struct md5_ctx ctx;
13179 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13180
13181 md5_init_ctx (&ctx);
13182 fold_checksum_tree (op0, &ctx, &ht);
13183 md5_finish_ctx (&ctx, checksum_before);
13184 ht.empty ();
13185 #endif
13186
13187 tem = fold_unary_loc (loc, code, type, op0);
13188 if (!tem)
13189 tem = build1_loc (loc, code, type, op0 PASS_MEM_STAT);
13190
13191 #ifdef ENABLE_FOLD_CHECKING
13192 md5_init_ctx (&ctx);
13193 fold_checksum_tree (op0, &ctx, &ht);
13194 md5_finish_ctx (&ctx, checksum_after);
13195
13196 if (memcmp (checksum_before, checksum_after, 16))
13197 fold_check_failed (op0, tem);
13198 #endif
13199 return tem;
13200 }
13201
13202 /* Fold a binary tree expression with code CODE of type TYPE with
13203 operands OP0 and OP1. LOC is the location of the resulting
13204 expression. Return a folded expression if successful. Otherwise,
13205 return a tree expression with code CODE of type TYPE with operands
13206 OP0 and OP1. */
13207
13208 tree
13209 fold_build2_loc (location_t loc,
13210 enum tree_code code, tree type, tree op0, tree op1
13211 MEM_STAT_DECL)
13212 {
13213 tree tem;
13214 #ifdef ENABLE_FOLD_CHECKING
13215 unsigned char checksum_before_op0[16],
13216 checksum_before_op1[16],
13217 checksum_after_op0[16],
13218 checksum_after_op1[16];
13219 struct md5_ctx ctx;
13220 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13221
13222 md5_init_ctx (&ctx);
13223 fold_checksum_tree (op0, &ctx, &ht);
13224 md5_finish_ctx (&ctx, checksum_before_op0);
13225 ht.empty ();
13226
13227 md5_init_ctx (&ctx);
13228 fold_checksum_tree (op1, &ctx, &ht);
13229 md5_finish_ctx (&ctx, checksum_before_op1);
13230 ht.empty ();
13231 #endif
13232
13233 tem = fold_binary_loc (loc, code, type, op0, op1);
13234 if (!tem)
13235 tem = build2_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
13236
13237 #ifdef ENABLE_FOLD_CHECKING
13238 md5_init_ctx (&ctx);
13239 fold_checksum_tree (op0, &ctx, &ht);
13240 md5_finish_ctx (&ctx, checksum_after_op0);
13241 ht.empty ();
13242
13243 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13244 fold_check_failed (op0, tem);
13245
13246 md5_init_ctx (&ctx);
13247 fold_checksum_tree (op1, &ctx, &ht);
13248 md5_finish_ctx (&ctx, checksum_after_op1);
13249
13250 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13251 fold_check_failed (op1, tem);
13252 #endif
13253 return tem;
13254 }
13255
13256 /* Fold a ternary tree expression with code CODE of type TYPE with
13257 operands OP0, OP1, and OP2. Return a folded expression if
13258 successful. Otherwise, return a tree expression with code CODE of
13259 type TYPE with operands OP0, OP1, and OP2. */
13260
13261 tree
13262 fold_build3_loc (location_t loc, enum tree_code code, tree type,
13263 tree op0, tree op1, tree op2 MEM_STAT_DECL)
13264 {
13265 tree tem;
13266 #ifdef ENABLE_FOLD_CHECKING
13267 unsigned char checksum_before_op0[16],
13268 checksum_before_op1[16],
13269 checksum_before_op2[16],
13270 checksum_after_op0[16],
13271 checksum_after_op1[16],
13272 checksum_after_op2[16];
13273 struct md5_ctx ctx;
13274 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13275
13276 md5_init_ctx (&ctx);
13277 fold_checksum_tree (op0, &ctx, &ht);
13278 md5_finish_ctx (&ctx, checksum_before_op0);
13279 ht.empty ();
13280
13281 md5_init_ctx (&ctx);
13282 fold_checksum_tree (op1, &ctx, &ht);
13283 md5_finish_ctx (&ctx, checksum_before_op1);
13284 ht.empty ();
13285
13286 md5_init_ctx (&ctx);
13287 fold_checksum_tree (op2, &ctx, &ht);
13288 md5_finish_ctx (&ctx, checksum_before_op2);
13289 ht.empty ();
13290 #endif
13291
13292 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
13293 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13294 if (!tem)
13295 tem = build3_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
13296
13297 #ifdef ENABLE_FOLD_CHECKING
13298 md5_init_ctx (&ctx);
13299 fold_checksum_tree (op0, &ctx, &ht);
13300 md5_finish_ctx (&ctx, checksum_after_op0);
13301 ht.empty ();
13302
13303 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13304 fold_check_failed (op0, tem);
13305
13306 md5_init_ctx (&ctx);
13307 fold_checksum_tree (op1, &ctx, &ht);
13308 md5_finish_ctx (&ctx, checksum_after_op1);
13309 ht.empty ();
13310
13311 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13312 fold_check_failed (op1, tem);
13313
13314 md5_init_ctx (&ctx);
13315 fold_checksum_tree (op2, &ctx, &ht);
13316 md5_finish_ctx (&ctx, checksum_after_op2);
13317
13318 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
13319 fold_check_failed (op2, tem);
13320 #endif
13321 return tem;
13322 }
13323
13324 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
13325 arguments in ARGARRAY, and a null static chain.
13326 Return a folded expression if successful. Otherwise, return a CALL_EXPR
13327 of type TYPE from the given operands as constructed by build_call_array. */
13328
13329 tree
13330 fold_build_call_array_loc (location_t loc, tree type, tree fn,
13331 int nargs, tree *argarray)
13332 {
13333 tree tem;
13334 #ifdef ENABLE_FOLD_CHECKING
13335 unsigned char checksum_before_fn[16],
13336 checksum_before_arglist[16],
13337 checksum_after_fn[16],
13338 checksum_after_arglist[16];
13339 struct md5_ctx ctx;
13340 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13341 int i;
13342
13343 md5_init_ctx (&ctx);
13344 fold_checksum_tree (fn, &ctx, &ht);
13345 md5_finish_ctx (&ctx, checksum_before_fn);
13346 ht.empty ();
13347
13348 md5_init_ctx (&ctx);
13349 for (i = 0; i < nargs; i++)
13350 fold_checksum_tree (argarray[i], &ctx, &ht);
13351 md5_finish_ctx (&ctx, checksum_before_arglist);
13352 ht.empty ();
13353 #endif
13354
13355 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
13356 if (!tem)
13357 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
13358
13359 #ifdef ENABLE_FOLD_CHECKING
13360 md5_init_ctx (&ctx);
13361 fold_checksum_tree (fn, &ctx, &ht);
13362 md5_finish_ctx (&ctx, checksum_after_fn);
13363 ht.empty ();
13364
13365 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
13366 fold_check_failed (fn, tem);
13367
13368 md5_init_ctx (&ctx);
13369 for (i = 0; i < nargs; i++)
13370 fold_checksum_tree (argarray[i], &ctx, &ht);
13371 md5_finish_ctx (&ctx, checksum_after_arglist);
13372
13373 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
13374 fold_check_failed (NULL_TREE, tem);
13375 #endif
13376 return tem;
13377 }
13378
13379 /* Perform constant folding and related simplification of initializer
13380 expression EXPR. These behave identically to "fold_buildN" but ignore
13381 potential run-time traps and exceptions that fold must preserve. */
13382
13383 #define START_FOLD_INIT \
13384 int saved_signaling_nans = flag_signaling_nans;\
13385 int saved_trapping_math = flag_trapping_math;\
13386 int saved_rounding_math = flag_rounding_math;\
13387 int saved_trapv = flag_trapv;\
13388 int saved_folding_initializer = folding_initializer;\
13389 flag_signaling_nans = 0;\
13390 flag_trapping_math = 0;\
13391 flag_rounding_math = 0;\
13392 flag_trapv = 0;\
13393 folding_initializer = 1;
13394
13395 #define END_FOLD_INIT \
13396 flag_signaling_nans = saved_signaling_nans;\
13397 flag_trapping_math = saved_trapping_math;\
13398 flag_rounding_math = saved_rounding_math;\
13399 flag_trapv = saved_trapv;\
13400 folding_initializer = saved_folding_initializer;
13401
13402 tree
13403 fold_build1_initializer_loc (location_t loc, enum tree_code code,
13404 tree type, tree op)
13405 {
13406 tree result;
13407 START_FOLD_INIT;
13408
13409 result = fold_build1_loc (loc, code, type, op);
13410
13411 END_FOLD_INIT;
13412 return result;
13413 }
13414
13415 tree
13416 fold_build2_initializer_loc (location_t loc, enum tree_code code,
13417 tree type, tree op0, tree op1)
13418 {
13419 tree result;
13420 START_FOLD_INIT;
13421
13422 result = fold_build2_loc (loc, code, type, op0, op1);
13423
13424 END_FOLD_INIT;
13425 return result;
13426 }
13427
13428 tree
13429 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
13430 int nargs, tree *argarray)
13431 {
13432 tree result;
13433 START_FOLD_INIT;
13434
13435 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
13436
13437 END_FOLD_INIT;
13438 return result;
13439 }
13440
13441 #undef START_FOLD_INIT
13442 #undef END_FOLD_INIT
13443
13444 /* Determine if first argument is a multiple of second argument. Return 0 if
13445 it is not, or we cannot easily determined it to be.
13446
13447 An example of the sort of thing we care about (at this point; this routine
13448 could surely be made more general, and expanded to do what the *_DIV_EXPR's
13449 fold cases do now) is discovering that
13450
13451 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13452
13453 is a multiple of
13454
13455 SAVE_EXPR (J * 8)
13456
13457 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
13458
13459 This code also handles discovering that
13460
13461 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13462
13463 is a multiple of 8 so we don't have to worry about dealing with a
13464 possible remainder.
13465
13466 Note that we *look* inside a SAVE_EXPR only to determine how it was
13467 calculated; it is not safe for fold to do much of anything else with the
13468 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
13469 at run time. For example, the latter example above *cannot* be implemented
13470 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
13471 evaluation time of the original SAVE_EXPR is not necessarily the same at
13472 the time the new expression is evaluated. The only optimization of this
13473 sort that would be valid is changing
13474
13475 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
13476
13477 divided by 8 to
13478
13479 SAVE_EXPR (I) * SAVE_EXPR (J)
13480
13481 (where the same SAVE_EXPR (J) is used in the original and the
13482 transformed version). */
13483
13484 int
13485 multiple_of_p (tree type, const_tree top, const_tree bottom)
13486 {
13487 gimple *stmt;
13488 tree t1, op1, op2;
13489
13490 if (operand_equal_p (top, bottom, 0))
13491 return 1;
13492
13493 if (TREE_CODE (type) != INTEGER_TYPE)
13494 return 0;
13495
13496 switch (TREE_CODE (top))
13497 {
13498 case BIT_AND_EXPR:
13499 /* Bitwise and provides a power of two multiple. If the mask is
13500 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
13501 if (!integer_pow2p (bottom))
13502 return 0;
13503 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
13504 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
13505
13506 case MULT_EXPR:
13507 if (TREE_CODE (bottom) == INTEGER_CST)
13508 {
13509 op1 = TREE_OPERAND (top, 0);
13510 op2 = TREE_OPERAND (top, 1);
13511 if (TREE_CODE (op1) == INTEGER_CST)
13512 std::swap (op1, op2);
13513 if (TREE_CODE (op2) == INTEGER_CST)
13514 {
13515 if (multiple_of_p (type, op2, bottom))
13516 return 1;
13517 /* Handle multiple_of_p ((x * 2 + 2) * 4, 8). */
13518 if (multiple_of_p (type, bottom, op2))
13519 {
13520 widest_int w = wi::sdiv_trunc (wi::to_widest (bottom),
13521 wi::to_widest (op2));
13522 if (wi::fits_to_tree_p (w, TREE_TYPE (bottom)))
13523 {
13524 op2 = wide_int_to_tree (TREE_TYPE (bottom), w);
13525 return multiple_of_p (type, op1, op2);
13526 }
13527 }
13528 return multiple_of_p (type, op1, bottom);
13529 }
13530 }
13531 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
13532 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
13533
13534 case MINUS_EXPR:
13535 /* It is impossible to prove if op0 - op1 is multiple of bottom
13536 precisely, so be conservative here checking if both op0 and op1
13537 are multiple of bottom. Note we check the second operand first
13538 since it's usually simpler. */
13539 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
13540 && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
13541
13542 case PLUS_EXPR:
13543 /* The same as MINUS_EXPR, but handle cases like op0 + 0xfffffffd
13544 as op0 - 3 if the expression has unsigned type. For example,
13545 (X / 3) + 0xfffffffd is multiple of 3, but 0xfffffffd is not. */
13546 op1 = TREE_OPERAND (top, 1);
13547 if (TYPE_UNSIGNED (type)
13548 && TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sign_bit (op1))
13549 op1 = fold_build1 (NEGATE_EXPR, type, op1);
13550 return (multiple_of_p (type, op1, bottom)
13551 && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
13552
13553 case LSHIFT_EXPR:
13554 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
13555 {
13556 op1 = TREE_OPERAND (top, 1);
13557 /* const_binop may not detect overflow correctly,
13558 so check for it explicitly here. */
13559 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
13560 wi::to_wide (op1))
13561 && (t1 = fold_convert (type,
13562 const_binop (LSHIFT_EXPR, size_one_node,
13563 op1))) != 0
13564 && !TREE_OVERFLOW (t1))
13565 return multiple_of_p (type, t1, bottom);
13566 }
13567 return 0;
13568
13569 case NOP_EXPR:
13570 /* Can't handle conversions from non-integral or wider integral type. */
13571 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
13572 || (TYPE_PRECISION (type)
13573 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
13574 return 0;
13575
13576 /* fall through */
13577
13578 case SAVE_EXPR:
13579 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
13580
13581 case COND_EXPR:
13582 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
13583 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
13584
13585 case INTEGER_CST:
13586 if (TREE_CODE (bottom) != INTEGER_CST
13587 || integer_zerop (bottom)
13588 || (TYPE_UNSIGNED (type)
13589 && (tree_int_cst_sgn (top) < 0
13590 || tree_int_cst_sgn (bottom) < 0)))
13591 return 0;
13592 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
13593 SIGNED);
13594
13595 case SSA_NAME:
13596 if (TREE_CODE (bottom) == INTEGER_CST
13597 && (stmt = SSA_NAME_DEF_STMT (top)) != NULL
13598 && gimple_code (stmt) == GIMPLE_ASSIGN)
13599 {
13600 enum tree_code code = gimple_assign_rhs_code (stmt);
13601
13602 /* Check for special cases to see if top is defined as multiple
13603 of bottom:
13604
13605 top = (X & ~(bottom - 1) ; bottom is power of 2
13606
13607 or
13608
13609 Y = X % bottom
13610 top = X - Y. */
13611 if (code == BIT_AND_EXPR
13612 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
13613 && TREE_CODE (op2) == INTEGER_CST
13614 && integer_pow2p (bottom)
13615 && wi::multiple_of_p (wi::to_widest (op2),
13616 wi::to_widest (bottom), UNSIGNED))
13617 return 1;
13618
13619 op1 = gimple_assign_rhs1 (stmt);
13620 if (code == MINUS_EXPR
13621 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
13622 && TREE_CODE (op2) == SSA_NAME
13623 && (stmt = SSA_NAME_DEF_STMT (op2)) != NULL
13624 && gimple_code (stmt) == GIMPLE_ASSIGN
13625 && (code = gimple_assign_rhs_code (stmt)) == TRUNC_MOD_EXPR
13626 && operand_equal_p (op1, gimple_assign_rhs1 (stmt), 0)
13627 && operand_equal_p (bottom, gimple_assign_rhs2 (stmt), 0))
13628 return 1;
13629 }
13630
13631 /* fall through */
13632
13633 default:
13634 if (POLY_INT_CST_P (top) && poly_int_tree_p (bottom))
13635 return multiple_p (wi::to_poly_widest (top),
13636 wi::to_poly_widest (bottom));
13637
13638 return 0;
13639 }
13640 }
13641
13642 #define tree_expr_nonnegative_warnv_p(X, Y) \
13643 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
13644
13645 #define RECURSE(X) \
13646 ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
13647
13648 /* Return true if CODE or TYPE is known to be non-negative. */
13649
13650 static bool
13651 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
13652 {
13653 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
13654 && truth_value_p (code))
13655 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
13656 have a signed:1 type (where the value is -1 and 0). */
13657 return true;
13658 return false;
13659 }
13660
13661 /* Return true if (CODE OP0) is known to be non-negative. If the return
13662 value is based on the assumption that signed overflow is undefined,
13663 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13664 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13665
13666 bool
13667 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
13668 bool *strict_overflow_p, int depth)
13669 {
13670 if (TYPE_UNSIGNED (type))
13671 return true;
13672
13673 switch (code)
13674 {
13675 case ABS_EXPR:
13676 /* We can't return 1 if flag_wrapv is set because
13677 ABS_EXPR<INT_MIN> = INT_MIN. */
13678 if (!ANY_INTEGRAL_TYPE_P (type))
13679 return true;
13680 if (TYPE_OVERFLOW_UNDEFINED (type))
13681 {
13682 *strict_overflow_p = true;
13683 return true;
13684 }
13685 break;
13686
13687 case NON_LVALUE_EXPR:
13688 case FLOAT_EXPR:
13689 case FIX_TRUNC_EXPR:
13690 return RECURSE (op0);
13691
13692 CASE_CONVERT:
13693 {
13694 tree inner_type = TREE_TYPE (op0);
13695 tree outer_type = type;
13696
13697 if (TREE_CODE (outer_type) == REAL_TYPE)
13698 {
13699 if (TREE_CODE (inner_type) == REAL_TYPE)
13700 return RECURSE (op0);
13701 if (INTEGRAL_TYPE_P (inner_type))
13702 {
13703 if (TYPE_UNSIGNED (inner_type))
13704 return true;
13705 return RECURSE (op0);
13706 }
13707 }
13708 else if (INTEGRAL_TYPE_P (outer_type))
13709 {
13710 if (TREE_CODE (inner_type) == REAL_TYPE)
13711 return RECURSE (op0);
13712 if (INTEGRAL_TYPE_P (inner_type))
13713 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
13714 && TYPE_UNSIGNED (inner_type);
13715 }
13716 }
13717 break;
13718
13719 default:
13720 return tree_simple_nonnegative_warnv_p (code, type);
13721 }
13722
13723 /* We don't know sign of `t', so be conservative and return false. */
13724 return false;
13725 }
13726
13727 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
13728 value is based on the assumption that signed overflow is undefined,
13729 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13730 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13731
13732 bool
13733 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
13734 tree op1, bool *strict_overflow_p,
13735 int depth)
13736 {
13737 if (TYPE_UNSIGNED (type))
13738 return true;
13739
13740 switch (code)
13741 {
13742 case POINTER_PLUS_EXPR:
13743 case PLUS_EXPR:
13744 if (FLOAT_TYPE_P (type))
13745 return RECURSE (op0) && RECURSE (op1);
13746
13747 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
13748 both unsigned and at least 2 bits shorter than the result. */
13749 if (TREE_CODE (type) == INTEGER_TYPE
13750 && TREE_CODE (op0) == NOP_EXPR
13751 && TREE_CODE (op1) == NOP_EXPR)
13752 {
13753 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
13754 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
13755 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
13756 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
13757 {
13758 unsigned int prec = MAX (TYPE_PRECISION (inner1),
13759 TYPE_PRECISION (inner2)) + 1;
13760 return prec < TYPE_PRECISION (type);
13761 }
13762 }
13763 break;
13764
13765 case MULT_EXPR:
13766 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
13767 {
13768 /* x * x is always non-negative for floating point x
13769 or without overflow. */
13770 if (operand_equal_p (op0, op1, 0)
13771 || (RECURSE (op0) && RECURSE (op1)))
13772 {
13773 if (ANY_INTEGRAL_TYPE_P (type)
13774 && TYPE_OVERFLOW_UNDEFINED (type))
13775 *strict_overflow_p = true;
13776 return true;
13777 }
13778 }
13779
13780 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
13781 both unsigned and their total bits is shorter than the result. */
13782 if (TREE_CODE (type) == INTEGER_TYPE
13783 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
13784 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
13785 {
13786 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
13787 ? TREE_TYPE (TREE_OPERAND (op0, 0))
13788 : TREE_TYPE (op0);
13789 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
13790 ? TREE_TYPE (TREE_OPERAND (op1, 0))
13791 : TREE_TYPE (op1);
13792
13793 bool unsigned0 = TYPE_UNSIGNED (inner0);
13794 bool unsigned1 = TYPE_UNSIGNED (inner1);
13795
13796 if (TREE_CODE (op0) == INTEGER_CST)
13797 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
13798
13799 if (TREE_CODE (op1) == INTEGER_CST)
13800 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
13801
13802 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
13803 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
13804 {
13805 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
13806 ? tree_int_cst_min_precision (op0, UNSIGNED)
13807 : TYPE_PRECISION (inner0);
13808
13809 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
13810 ? tree_int_cst_min_precision (op1, UNSIGNED)
13811 : TYPE_PRECISION (inner1);
13812
13813 return precision0 + precision1 < TYPE_PRECISION (type);
13814 }
13815 }
13816 return false;
13817
13818 case BIT_AND_EXPR:
13819 case MAX_EXPR:
13820 return RECURSE (op0) || RECURSE (op1);
13821
13822 case BIT_IOR_EXPR:
13823 case BIT_XOR_EXPR:
13824 case MIN_EXPR:
13825 case RDIV_EXPR:
13826 case TRUNC_DIV_EXPR:
13827 case CEIL_DIV_EXPR:
13828 case FLOOR_DIV_EXPR:
13829 case ROUND_DIV_EXPR:
13830 return RECURSE (op0) && RECURSE (op1);
13831
13832 case TRUNC_MOD_EXPR:
13833 return RECURSE (op0);
13834
13835 case FLOOR_MOD_EXPR:
13836 return RECURSE (op1);
13837
13838 case CEIL_MOD_EXPR:
13839 case ROUND_MOD_EXPR:
13840 default:
13841 return tree_simple_nonnegative_warnv_p (code, type);
13842 }
13843
13844 /* We don't know sign of `t', so be conservative and return false. */
13845 return false;
13846 }
13847
13848 /* Return true if T is known to be non-negative. If the return
13849 value is based on the assumption that signed overflow is undefined,
13850 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13851 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13852
13853 bool
13854 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13855 {
13856 if (TYPE_UNSIGNED (TREE_TYPE (t)))
13857 return true;
13858
13859 switch (TREE_CODE (t))
13860 {
13861 case INTEGER_CST:
13862 return tree_int_cst_sgn (t) >= 0;
13863
13864 case REAL_CST:
13865 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
13866
13867 case FIXED_CST:
13868 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
13869
13870 case COND_EXPR:
13871 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
13872
13873 case SSA_NAME:
13874 /* Limit the depth of recursion to avoid quadratic behavior.
13875 This is expected to catch almost all occurrences in practice.
13876 If this code misses important cases that unbounded recursion
13877 would not, passes that need this information could be revised
13878 to provide it through dataflow propagation. */
13879 return (!name_registered_for_update_p (t)
13880 && depth < param_max_ssa_name_query_depth
13881 && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
13882 strict_overflow_p, depth));
13883
13884 default:
13885 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
13886 }
13887 }
13888
13889 /* Return true if T is known to be non-negative. If the return
13890 value is based on the assumption that signed overflow is undefined,
13891 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13892 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13893
13894 bool
13895 tree_call_nonnegative_warnv_p (tree type, combined_fn fn, tree arg0, tree arg1,
13896 bool *strict_overflow_p, int depth)
13897 {
13898 switch (fn)
13899 {
13900 CASE_CFN_ACOS:
13901 CASE_CFN_ACOSH:
13902 CASE_CFN_CABS:
13903 CASE_CFN_COSH:
13904 CASE_CFN_ERFC:
13905 CASE_CFN_EXP:
13906 CASE_CFN_EXP10:
13907 CASE_CFN_EXP2:
13908 CASE_CFN_FABS:
13909 CASE_CFN_FDIM:
13910 CASE_CFN_HYPOT:
13911 CASE_CFN_POW10:
13912 CASE_CFN_FFS:
13913 CASE_CFN_PARITY:
13914 CASE_CFN_POPCOUNT:
13915 CASE_CFN_CLZ:
13916 CASE_CFN_CLRSB:
13917 case CFN_BUILT_IN_BSWAP16:
13918 case CFN_BUILT_IN_BSWAP32:
13919 case CFN_BUILT_IN_BSWAP64:
13920 case CFN_BUILT_IN_BSWAP128:
13921 /* Always true. */
13922 return true;
13923
13924 CASE_CFN_SQRT:
13925 CASE_CFN_SQRT_FN:
13926 /* sqrt(-0.0) is -0.0. */
13927 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
13928 return true;
13929 return RECURSE (arg0);
13930
13931 CASE_CFN_ASINH:
13932 CASE_CFN_ATAN:
13933 CASE_CFN_ATANH:
13934 CASE_CFN_CBRT:
13935 CASE_CFN_CEIL:
13936 CASE_CFN_CEIL_FN:
13937 CASE_CFN_ERF:
13938 CASE_CFN_EXPM1:
13939 CASE_CFN_FLOOR:
13940 CASE_CFN_FLOOR_FN:
13941 CASE_CFN_FMOD:
13942 CASE_CFN_FREXP:
13943 CASE_CFN_ICEIL:
13944 CASE_CFN_IFLOOR:
13945 CASE_CFN_IRINT:
13946 CASE_CFN_IROUND:
13947 CASE_CFN_LCEIL:
13948 CASE_CFN_LDEXP:
13949 CASE_CFN_LFLOOR:
13950 CASE_CFN_LLCEIL:
13951 CASE_CFN_LLFLOOR:
13952 CASE_CFN_LLRINT:
13953 CASE_CFN_LLROUND:
13954 CASE_CFN_LRINT:
13955 CASE_CFN_LROUND:
13956 CASE_CFN_MODF:
13957 CASE_CFN_NEARBYINT:
13958 CASE_CFN_NEARBYINT_FN:
13959 CASE_CFN_RINT:
13960 CASE_CFN_RINT_FN:
13961 CASE_CFN_ROUND:
13962 CASE_CFN_ROUND_FN:
13963 CASE_CFN_ROUNDEVEN:
13964 CASE_CFN_ROUNDEVEN_FN:
13965 CASE_CFN_SCALB:
13966 CASE_CFN_SCALBLN:
13967 CASE_CFN_SCALBN:
13968 CASE_CFN_SIGNBIT:
13969 CASE_CFN_SIGNIFICAND:
13970 CASE_CFN_SINH:
13971 CASE_CFN_TANH:
13972 CASE_CFN_TRUNC:
13973 CASE_CFN_TRUNC_FN:
13974 /* True if the 1st argument is nonnegative. */
13975 return RECURSE (arg0);
13976
13977 CASE_CFN_FMAX:
13978 CASE_CFN_FMAX_FN:
13979 /* True if the 1st OR 2nd arguments are nonnegative. */
13980 return RECURSE (arg0) || RECURSE (arg1);
13981
13982 CASE_CFN_FMIN:
13983 CASE_CFN_FMIN_FN:
13984 /* True if the 1st AND 2nd arguments are nonnegative. */
13985 return RECURSE (arg0) && RECURSE (arg1);
13986
13987 CASE_CFN_COPYSIGN:
13988 CASE_CFN_COPYSIGN_FN:
13989 /* True if the 2nd argument is nonnegative. */
13990 return RECURSE (arg1);
13991
13992 CASE_CFN_POWI:
13993 /* True if the 1st argument is nonnegative or the second
13994 argument is an even integer. */
13995 if (TREE_CODE (arg1) == INTEGER_CST
13996 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
13997 return true;
13998 return RECURSE (arg0);
13999
14000 CASE_CFN_POW:
14001 /* True if the 1st argument is nonnegative or the second
14002 argument is an even integer valued real. */
14003 if (TREE_CODE (arg1) == REAL_CST)
14004 {
14005 REAL_VALUE_TYPE c;
14006 HOST_WIDE_INT n;
14007
14008 c = TREE_REAL_CST (arg1);
14009 n = real_to_integer (&c);
14010 if ((n & 1) == 0)
14011 {
14012 REAL_VALUE_TYPE cint;
14013 real_from_integer (&cint, VOIDmode, n, SIGNED);
14014 if (real_identical (&c, &cint))
14015 return true;
14016 }
14017 }
14018 return RECURSE (arg0);
14019
14020 default:
14021 break;
14022 }
14023 return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
14024 }
14025
14026 /* Return true if T is known to be non-negative. If the return
14027 value is based on the assumption that signed overflow is undefined,
14028 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14029 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14030
14031 static bool
14032 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
14033 {
14034 enum tree_code code = TREE_CODE (t);
14035 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14036 return true;
14037
14038 switch (code)
14039 {
14040 case TARGET_EXPR:
14041 {
14042 tree temp = TARGET_EXPR_SLOT (t);
14043 t = TARGET_EXPR_INITIAL (t);
14044
14045 /* If the initializer is non-void, then it's a normal expression
14046 that will be assigned to the slot. */
14047 if (!VOID_TYPE_P (t))
14048 return RECURSE (t);
14049
14050 /* Otherwise, the initializer sets the slot in some way. One common
14051 way is an assignment statement at the end of the initializer. */
14052 while (1)
14053 {
14054 if (TREE_CODE (t) == BIND_EXPR)
14055 t = expr_last (BIND_EXPR_BODY (t));
14056 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
14057 || TREE_CODE (t) == TRY_CATCH_EXPR)
14058 t = expr_last (TREE_OPERAND (t, 0));
14059 else if (TREE_CODE (t) == STATEMENT_LIST)
14060 t = expr_last (t);
14061 else
14062 break;
14063 }
14064 if (TREE_CODE (t) == MODIFY_EXPR
14065 && TREE_OPERAND (t, 0) == temp)
14066 return RECURSE (TREE_OPERAND (t, 1));
14067
14068 return false;
14069 }
14070
14071 case CALL_EXPR:
14072 {
14073 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
14074 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
14075
14076 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
14077 get_call_combined_fn (t),
14078 arg0,
14079 arg1,
14080 strict_overflow_p, depth);
14081 }
14082 case COMPOUND_EXPR:
14083 case MODIFY_EXPR:
14084 return RECURSE (TREE_OPERAND (t, 1));
14085
14086 case BIND_EXPR:
14087 return RECURSE (expr_last (TREE_OPERAND (t, 1)));
14088
14089 case SAVE_EXPR:
14090 return RECURSE (TREE_OPERAND (t, 0));
14091
14092 default:
14093 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
14094 }
14095 }
14096
14097 #undef RECURSE
14098 #undef tree_expr_nonnegative_warnv_p
14099
14100 /* Return true if T is known to be non-negative. If the return
14101 value is based on the assumption that signed overflow is undefined,
14102 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14103 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14104
14105 bool
14106 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
14107 {
14108 enum tree_code code;
14109 if (t == error_mark_node)
14110 return false;
14111
14112 code = TREE_CODE (t);
14113 switch (TREE_CODE_CLASS (code))
14114 {
14115 case tcc_binary:
14116 case tcc_comparison:
14117 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14118 TREE_TYPE (t),
14119 TREE_OPERAND (t, 0),
14120 TREE_OPERAND (t, 1),
14121 strict_overflow_p, depth);
14122
14123 case tcc_unary:
14124 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14125 TREE_TYPE (t),
14126 TREE_OPERAND (t, 0),
14127 strict_overflow_p, depth);
14128
14129 case tcc_constant:
14130 case tcc_declaration:
14131 case tcc_reference:
14132 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
14133
14134 default:
14135 break;
14136 }
14137
14138 switch (code)
14139 {
14140 case TRUTH_AND_EXPR:
14141 case TRUTH_OR_EXPR:
14142 case TRUTH_XOR_EXPR:
14143 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14144 TREE_TYPE (t),
14145 TREE_OPERAND (t, 0),
14146 TREE_OPERAND (t, 1),
14147 strict_overflow_p, depth);
14148 case TRUTH_NOT_EXPR:
14149 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14150 TREE_TYPE (t),
14151 TREE_OPERAND (t, 0),
14152 strict_overflow_p, depth);
14153
14154 case COND_EXPR:
14155 case CONSTRUCTOR:
14156 case OBJ_TYPE_REF:
14157 case ASSERT_EXPR:
14158 case ADDR_EXPR:
14159 case WITH_SIZE_EXPR:
14160 case SSA_NAME:
14161 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
14162
14163 default:
14164 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
14165 }
14166 }
14167
14168 /* Return true if `t' is known to be non-negative. Handle warnings
14169 about undefined signed overflow. */
14170
14171 bool
14172 tree_expr_nonnegative_p (tree t)
14173 {
14174 bool ret, strict_overflow_p;
14175
14176 strict_overflow_p = false;
14177 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
14178 if (strict_overflow_p)
14179 fold_overflow_warning (("assuming signed overflow does not occur when "
14180 "determining that expression is always "
14181 "non-negative"),
14182 WARN_STRICT_OVERFLOW_MISC);
14183 return ret;
14184 }
14185
14186
14187 /* Return true when (CODE OP0) is an address and is known to be nonzero.
14188 For floating point we further ensure that T is not denormal.
14189 Similar logic is present in nonzero_address in rtlanal.h.
14190
14191 If the return value is based on the assumption that signed overflow
14192 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14193 change *STRICT_OVERFLOW_P. */
14194
14195 bool
14196 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
14197 bool *strict_overflow_p)
14198 {
14199 switch (code)
14200 {
14201 case ABS_EXPR:
14202 return tree_expr_nonzero_warnv_p (op0,
14203 strict_overflow_p);
14204
14205 case NOP_EXPR:
14206 {
14207 tree inner_type = TREE_TYPE (op0);
14208 tree outer_type = type;
14209
14210 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
14211 && tree_expr_nonzero_warnv_p (op0,
14212 strict_overflow_p));
14213 }
14214 break;
14215
14216 case NON_LVALUE_EXPR:
14217 return tree_expr_nonzero_warnv_p (op0,
14218 strict_overflow_p);
14219
14220 default:
14221 break;
14222 }
14223
14224 return false;
14225 }
14226
14227 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
14228 For floating point we further ensure that T is not denormal.
14229 Similar logic is present in nonzero_address in rtlanal.h.
14230
14231 If the return value is based on the assumption that signed overflow
14232 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14233 change *STRICT_OVERFLOW_P. */
14234
14235 bool
14236 tree_binary_nonzero_warnv_p (enum tree_code code,
14237 tree type,
14238 tree op0,
14239 tree op1, bool *strict_overflow_p)
14240 {
14241 bool sub_strict_overflow_p;
14242 switch (code)
14243 {
14244 case POINTER_PLUS_EXPR:
14245 case PLUS_EXPR:
14246 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
14247 {
14248 /* With the presence of negative values it is hard
14249 to say something. */
14250 sub_strict_overflow_p = false;
14251 if (!tree_expr_nonnegative_warnv_p (op0,
14252 &sub_strict_overflow_p)
14253 || !tree_expr_nonnegative_warnv_p (op1,
14254 &sub_strict_overflow_p))
14255 return false;
14256 /* One of operands must be positive and the other non-negative. */
14257 /* We don't set *STRICT_OVERFLOW_P here: even if this value
14258 overflows, on a twos-complement machine the sum of two
14259 nonnegative numbers can never be zero. */
14260 return (tree_expr_nonzero_warnv_p (op0,
14261 strict_overflow_p)
14262 || tree_expr_nonzero_warnv_p (op1,
14263 strict_overflow_p));
14264 }
14265 break;
14266
14267 case MULT_EXPR:
14268 if (TYPE_OVERFLOW_UNDEFINED (type))
14269 {
14270 if (tree_expr_nonzero_warnv_p (op0,
14271 strict_overflow_p)
14272 && tree_expr_nonzero_warnv_p (op1,
14273 strict_overflow_p))
14274 {
14275 *strict_overflow_p = true;
14276 return true;
14277 }
14278 }
14279 break;
14280
14281 case MIN_EXPR:
14282 sub_strict_overflow_p = false;
14283 if (tree_expr_nonzero_warnv_p (op0,
14284 &sub_strict_overflow_p)
14285 && tree_expr_nonzero_warnv_p (op1,
14286 &sub_strict_overflow_p))
14287 {
14288 if (sub_strict_overflow_p)
14289 *strict_overflow_p = true;
14290 }
14291 break;
14292
14293 case MAX_EXPR:
14294 sub_strict_overflow_p = false;
14295 if (tree_expr_nonzero_warnv_p (op0,
14296 &sub_strict_overflow_p))
14297 {
14298 if (sub_strict_overflow_p)
14299 *strict_overflow_p = true;
14300
14301 /* When both operands are nonzero, then MAX must be too. */
14302 if (tree_expr_nonzero_warnv_p (op1,
14303 strict_overflow_p))
14304 return true;
14305
14306 /* MAX where operand 0 is positive is positive. */
14307 return tree_expr_nonnegative_warnv_p (op0,
14308 strict_overflow_p);
14309 }
14310 /* MAX where operand 1 is positive is positive. */
14311 else if (tree_expr_nonzero_warnv_p (op1,
14312 &sub_strict_overflow_p)
14313 && tree_expr_nonnegative_warnv_p (op1,
14314 &sub_strict_overflow_p))
14315 {
14316 if (sub_strict_overflow_p)
14317 *strict_overflow_p = true;
14318 return true;
14319 }
14320 break;
14321
14322 case BIT_IOR_EXPR:
14323 return (tree_expr_nonzero_warnv_p (op1,
14324 strict_overflow_p)
14325 || tree_expr_nonzero_warnv_p (op0,
14326 strict_overflow_p));
14327
14328 default:
14329 break;
14330 }
14331
14332 return false;
14333 }
14334
14335 /* Return true when T is an address and is known to be nonzero.
14336 For floating point we further ensure that T is not denormal.
14337 Similar logic is present in nonzero_address in rtlanal.h.
14338
14339 If the return value is based on the assumption that signed overflow
14340 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14341 change *STRICT_OVERFLOW_P. */
14342
14343 bool
14344 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
14345 {
14346 bool sub_strict_overflow_p;
14347 switch (TREE_CODE (t))
14348 {
14349 case INTEGER_CST:
14350 return !integer_zerop (t);
14351
14352 case ADDR_EXPR:
14353 {
14354 tree base = TREE_OPERAND (t, 0);
14355
14356 if (!DECL_P (base))
14357 base = get_base_address (base);
14358
14359 if (base && TREE_CODE (base) == TARGET_EXPR)
14360 base = TARGET_EXPR_SLOT (base);
14361
14362 if (!base)
14363 return false;
14364
14365 /* For objects in symbol table check if we know they are non-zero.
14366 Don't do anything for variables and functions before symtab is built;
14367 it is quite possible that they will be declared weak later. */
14368 int nonzero_addr = maybe_nonzero_address (base);
14369 if (nonzero_addr >= 0)
14370 return nonzero_addr;
14371
14372 /* Constants are never weak. */
14373 if (CONSTANT_CLASS_P (base))
14374 return true;
14375
14376 return false;
14377 }
14378
14379 case COND_EXPR:
14380 sub_strict_overflow_p = false;
14381 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14382 &sub_strict_overflow_p)
14383 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
14384 &sub_strict_overflow_p))
14385 {
14386 if (sub_strict_overflow_p)
14387 *strict_overflow_p = true;
14388 return true;
14389 }
14390 break;
14391
14392 case SSA_NAME:
14393 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
14394 break;
14395 return expr_not_equal_to (t, wi::zero (TYPE_PRECISION (TREE_TYPE (t))));
14396
14397 default:
14398 break;
14399 }
14400 return false;
14401 }
14402
14403 #define integer_valued_real_p(X) \
14404 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
14405
14406 #define RECURSE(X) \
14407 ((integer_valued_real_p) (X, depth + 1))
14408
14409 /* Return true if the floating point result of (CODE OP0) has an
14410 integer value. We also allow +Inf, -Inf and NaN to be considered
14411 integer values. Return false for signaling NaN.
14412
14413 DEPTH is the current nesting depth of the query. */
14414
14415 bool
14416 integer_valued_real_unary_p (tree_code code, tree op0, int depth)
14417 {
14418 switch (code)
14419 {
14420 case FLOAT_EXPR:
14421 return true;
14422
14423 case ABS_EXPR:
14424 return RECURSE (op0);
14425
14426 CASE_CONVERT:
14427 {
14428 tree type = TREE_TYPE (op0);
14429 if (TREE_CODE (type) == INTEGER_TYPE)
14430 return true;
14431 if (TREE_CODE (type) == REAL_TYPE)
14432 return RECURSE (op0);
14433 break;
14434 }
14435
14436 default:
14437 break;
14438 }
14439 return false;
14440 }
14441
14442 /* Return true if the floating point result of (CODE OP0 OP1) has an
14443 integer value. We also allow +Inf, -Inf and NaN to be considered
14444 integer values. Return false for signaling NaN.
14445
14446 DEPTH is the current nesting depth of the query. */
14447
14448 bool
14449 integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth)
14450 {
14451 switch (code)
14452 {
14453 case PLUS_EXPR:
14454 case MINUS_EXPR:
14455 case MULT_EXPR:
14456 case MIN_EXPR:
14457 case MAX_EXPR:
14458 return RECURSE (op0) && RECURSE (op1);
14459
14460 default:
14461 break;
14462 }
14463 return false;
14464 }
14465
14466 /* Return true if the floating point result of calling FNDECL with arguments
14467 ARG0 and ARG1 has an integer value. We also allow +Inf, -Inf and NaN to be
14468 considered integer values. Return false for signaling NaN. If FNDECL
14469 takes fewer than 2 arguments, the remaining ARGn are null.
14470
14471 DEPTH is the current nesting depth of the query. */
14472
14473 bool
14474 integer_valued_real_call_p (combined_fn fn, tree arg0, tree arg1, int depth)
14475 {
14476 switch (fn)
14477 {
14478 CASE_CFN_CEIL:
14479 CASE_CFN_CEIL_FN:
14480 CASE_CFN_FLOOR:
14481 CASE_CFN_FLOOR_FN:
14482 CASE_CFN_NEARBYINT:
14483 CASE_CFN_NEARBYINT_FN:
14484 CASE_CFN_RINT:
14485 CASE_CFN_RINT_FN:
14486 CASE_CFN_ROUND:
14487 CASE_CFN_ROUND_FN:
14488 CASE_CFN_ROUNDEVEN:
14489 CASE_CFN_ROUNDEVEN_FN:
14490 CASE_CFN_TRUNC:
14491 CASE_CFN_TRUNC_FN:
14492 return true;
14493
14494 CASE_CFN_FMIN:
14495 CASE_CFN_FMIN_FN:
14496 CASE_CFN_FMAX:
14497 CASE_CFN_FMAX_FN:
14498 return RECURSE (arg0) && RECURSE (arg1);
14499
14500 default:
14501 break;
14502 }
14503 return false;
14504 }
14505
14506 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
14507 has an integer value. We also allow +Inf, -Inf and NaN to be
14508 considered integer values. Return false for signaling NaN.
14509
14510 DEPTH is the current nesting depth of the query. */
14511
14512 bool
14513 integer_valued_real_single_p (tree t, int depth)
14514 {
14515 switch (TREE_CODE (t))
14516 {
14517 case REAL_CST:
14518 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
14519
14520 case COND_EXPR:
14521 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
14522
14523 case SSA_NAME:
14524 /* Limit the depth of recursion to avoid quadratic behavior.
14525 This is expected to catch almost all occurrences in practice.
14526 If this code misses important cases that unbounded recursion
14527 would not, passes that need this information could be revised
14528 to provide it through dataflow propagation. */
14529 return (!name_registered_for_update_p (t)
14530 && depth < param_max_ssa_name_query_depth
14531 && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t),
14532 depth));
14533
14534 default:
14535 break;
14536 }
14537 return false;
14538 }
14539
14540 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
14541 has an integer value. We also allow +Inf, -Inf and NaN to be
14542 considered integer values. Return false for signaling NaN.
14543
14544 DEPTH is the current nesting depth of the query. */
14545
14546 static bool
14547 integer_valued_real_invalid_p (tree t, int depth)
14548 {
14549 switch (TREE_CODE (t))
14550 {
14551 case COMPOUND_EXPR:
14552 case MODIFY_EXPR:
14553 case BIND_EXPR:
14554 return RECURSE (TREE_OPERAND (t, 1));
14555
14556 case SAVE_EXPR:
14557 return RECURSE (TREE_OPERAND (t, 0));
14558
14559 default:
14560 break;
14561 }
14562 return false;
14563 }
14564
14565 #undef RECURSE
14566 #undef integer_valued_real_p
14567
14568 /* Return true if the floating point expression T has an integer value.
14569 We also allow +Inf, -Inf and NaN to be considered integer values.
14570 Return false for signaling NaN.
14571
14572 DEPTH is the current nesting depth of the query. */
14573
14574 bool
14575 integer_valued_real_p (tree t, int depth)
14576 {
14577 if (t == error_mark_node)
14578 return false;
14579
14580 STRIP_ANY_LOCATION_WRAPPER (t);
14581
14582 tree_code code = TREE_CODE (t);
14583 switch (TREE_CODE_CLASS (code))
14584 {
14585 case tcc_binary:
14586 case tcc_comparison:
14587 return integer_valued_real_binary_p (code, TREE_OPERAND (t, 0),
14588 TREE_OPERAND (t, 1), depth);
14589
14590 case tcc_unary:
14591 return integer_valued_real_unary_p (code, TREE_OPERAND (t, 0), depth);
14592
14593 case tcc_constant:
14594 case tcc_declaration:
14595 case tcc_reference:
14596 return integer_valued_real_single_p (t, depth);
14597
14598 default:
14599 break;
14600 }
14601
14602 switch (code)
14603 {
14604 case COND_EXPR:
14605 case SSA_NAME:
14606 return integer_valued_real_single_p (t, depth);
14607
14608 case CALL_EXPR:
14609 {
14610 tree arg0 = (call_expr_nargs (t) > 0
14611 ? CALL_EXPR_ARG (t, 0)
14612 : NULL_TREE);
14613 tree arg1 = (call_expr_nargs (t) > 1
14614 ? CALL_EXPR_ARG (t, 1)
14615 : NULL_TREE);
14616 return integer_valued_real_call_p (get_call_combined_fn (t),
14617 arg0, arg1, depth);
14618 }
14619
14620 default:
14621 return integer_valued_real_invalid_p (t, depth);
14622 }
14623 }
14624
14625 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
14626 attempt to fold the expression to a constant without modifying TYPE,
14627 OP0 or OP1.
14628
14629 If the expression could be simplified to a constant, then return
14630 the constant. If the expression would not be simplified to a
14631 constant, then return NULL_TREE. */
14632
14633 tree
14634 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
14635 {
14636 tree tem = fold_binary (code, type, op0, op1);
14637 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
14638 }
14639
14640 /* Given the components of a unary expression CODE, TYPE and OP0,
14641 attempt to fold the expression to a constant without modifying
14642 TYPE or OP0.
14643
14644 If the expression could be simplified to a constant, then return
14645 the constant. If the expression would not be simplified to a
14646 constant, then return NULL_TREE. */
14647
14648 tree
14649 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
14650 {
14651 tree tem = fold_unary (code, type, op0);
14652 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
14653 }
14654
14655 /* If EXP represents referencing an element in a constant string
14656 (either via pointer arithmetic or array indexing), return the
14657 tree representing the value accessed, otherwise return NULL. */
14658
14659 tree
14660 fold_read_from_constant_string (tree exp)
14661 {
14662 if ((TREE_CODE (exp) == INDIRECT_REF
14663 || TREE_CODE (exp) == ARRAY_REF)
14664 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
14665 {
14666 tree exp1 = TREE_OPERAND (exp, 0);
14667 tree index;
14668 tree string;
14669 location_t loc = EXPR_LOCATION (exp);
14670
14671 if (TREE_CODE (exp) == INDIRECT_REF)
14672 string = string_constant (exp1, &index, NULL, NULL);
14673 else
14674 {
14675 tree low_bound = array_ref_low_bound (exp);
14676 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
14677
14678 /* Optimize the special-case of a zero lower bound.
14679
14680 We convert the low_bound to sizetype to avoid some problems
14681 with constant folding. (E.g. suppose the lower bound is 1,
14682 and its mode is QI. Without the conversion,l (ARRAY
14683 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
14684 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
14685 if (! integer_zerop (low_bound))
14686 index = size_diffop_loc (loc, index,
14687 fold_convert_loc (loc, sizetype, low_bound));
14688
14689 string = exp1;
14690 }
14691
14692 scalar_int_mode char_mode;
14693 if (string
14694 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
14695 && TREE_CODE (string) == STRING_CST
14696 && TREE_CODE (index) == INTEGER_CST
14697 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
14698 && is_int_mode (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))),
14699 &char_mode)
14700 && GET_MODE_SIZE (char_mode) == 1)
14701 return build_int_cst_type (TREE_TYPE (exp),
14702 (TREE_STRING_POINTER (string)
14703 [TREE_INT_CST_LOW (index)]));
14704 }
14705 return NULL;
14706 }
14707
14708 /* Folds a read from vector element at IDX of vector ARG. */
14709
14710 tree
14711 fold_read_from_vector (tree arg, poly_uint64 idx)
14712 {
14713 unsigned HOST_WIDE_INT i;
14714 if (known_lt (idx, TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)))
14715 && known_ge (idx, 0u)
14716 && idx.is_constant (&i))
14717 {
14718 if (TREE_CODE (arg) == VECTOR_CST)
14719 return VECTOR_CST_ELT (arg, i);
14720 else if (TREE_CODE (arg) == CONSTRUCTOR)
14721 {
14722 if (i >= CONSTRUCTOR_NELTS (arg))
14723 return build_zero_cst (TREE_TYPE (TREE_TYPE (arg)));
14724 return CONSTRUCTOR_ELT (arg, i)->value;
14725 }
14726 }
14727 return NULL_TREE;
14728 }
14729
14730 /* Return the tree for neg (ARG0) when ARG0 is known to be either
14731 an integer constant, real, or fixed-point constant.
14732
14733 TYPE is the type of the result. */
14734
14735 static tree
14736 fold_negate_const (tree arg0, tree type)
14737 {
14738 tree t = NULL_TREE;
14739
14740 switch (TREE_CODE (arg0))
14741 {
14742 case REAL_CST:
14743 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
14744 break;
14745
14746 case FIXED_CST:
14747 {
14748 FIXED_VALUE_TYPE f;
14749 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
14750 &(TREE_FIXED_CST (arg0)), NULL,
14751 TYPE_SATURATING (type));
14752 t = build_fixed (type, f);
14753 /* Propagate overflow flags. */
14754 if (overflow_p | TREE_OVERFLOW (arg0))
14755 TREE_OVERFLOW (t) = 1;
14756 break;
14757 }
14758
14759 default:
14760 if (poly_int_tree_p (arg0))
14761 {
14762 wi::overflow_type overflow;
14763 poly_wide_int res = wi::neg (wi::to_poly_wide (arg0), &overflow);
14764 t = force_fit_type (type, res, 1,
14765 (overflow && ! TYPE_UNSIGNED (type))
14766 || TREE_OVERFLOW (arg0));
14767 break;
14768 }
14769
14770 gcc_unreachable ();
14771 }
14772
14773 return t;
14774 }
14775
14776 /* Return the tree for abs (ARG0) when ARG0 is known to be either
14777 an integer constant or real constant.
14778
14779 TYPE is the type of the result. */
14780
14781 tree
14782 fold_abs_const (tree arg0, tree type)
14783 {
14784 tree t = NULL_TREE;
14785
14786 switch (TREE_CODE (arg0))
14787 {
14788 case INTEGER_CST:
14789 {
14790 /* If the value is unsigned or non-negative, then the absolute value
14791 is the same as the ordinary value. */
14792 wide_int val = wi::to_wide (arg0);
14793 wi::overflow_type overflow = wi::OVF_NONE;
14794 if (!wi::neg_p (val, TYPE_SIGN (TREE_TYPE (arg0))))
14795 ;
14796
14797 /* If the value is negative, then the absolute value is
14798 its negation. */
14799 else
14800 val = wi::neg (val, &overflow);
14801
14802 /* Force to the destination type, set TREE_OVERFLOW for signed
14803 TYPE only. */
14804 t = force_fit_type (type, val, 1, overflow | TREE_OVERFLOW (arg0));
14805 }
14806 break;
14807
14808 case REAL_CST:
14809 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
14810 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
14811 else
14812 t = arg0;
14813 break;
14814
14815 default:
14816 gcc_unreachable ();
14817 }
14818
14819 return t;
14820 }
14821
14822 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
14823 constant. TYPE is the type of the result. */
14824
14825 static tree
14826 fold_not_const (const_tree arg0, tree type)
14827 {
14828 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
14829
14830 return force_fit_type (type, ~wi::to_wide (arg0), 0, TREE_OVERFLOW (arg0));
14831 }
14832
14833 /* Given CODE, a relational operator, the target type, TYPE and two
14834 constant operands OP0 and OP1, return the result of the
14835 relational operation. If the result is not a compile time
14836 constant, then return NULL_TREE. */
14837
14838 static tree
14839 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
14840 {
14841 int result, invert;
14842
14843 /* From here on, the only cases we handle are when the result is
14844 known to be a constant. */
14845
14846 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
14847 {
14848 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
14849 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
14850
14851 /* Handle the cases where either operand is a NaN. */
14852 if (real_isnan (c0) || real_isnan (c1))
14853 {
14854 switch (code)
14855 {
14856 case EQ_EXPR:
14857 case ORDERED_EXPR:
14858 result = 0;
14859 break;
14860
14861 case NE_EXPR:
14862 case UNORDERED_EXPR:
14863 case UNLT_EXPR:
14864 case UNLE_EXPR:
14865 case UNGT_EXPR:
14866 case UNGE_EXPR:
14867 case UNEQ_EXPR:
14868 result = 1;
14869 break;
14870
14871 case LT_EXPR:
14872 case LE_EXPR:
14873 case GT_EXPR:
14874 case GE_EXPR:
14875 case LTGT_EXPR:
14876 if (flag_trapping_math)
14877 return NULL_TREE;
14878 result = 0;
14879 break;
14880
14881 default:
14882 gcc_unreachable ();
14883 }
14884
14885 return constant_boolean_node (result, type);
14886 }
14887
14888 return constant_boolean_node (real_compare (code, c0, c1), type);
14889 }
14890
14891 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
14892 {
14893 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
14894 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
14895 return constant_boolean_node (fixed_compare (code, c0, c1), type);
14896 }
14897
14898 /* Handle equality/inequality of complex constants. */
14899 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
14900 {
14901 tree rcond = fold_relational_const (code, type,
14902 TREE_REALPART (op0),
14903 TREE_REALPART (op1));
14904 tree icond = fold_relational_const (code, type,
14905 TREE_IMAGPART (op0),
14906 TREE_IMAGPART (op1));
14907 if (code == EQ_EXPR)
14908 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
14909 else if (code == NE_EXPR)
14910 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
14911 else
14912 return NULL_TREE;
14913 }
14914
14915 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
14916 {
14917 if (!VECTOR_TYPE_P (type))
14918 {
14919 /* Have vector comparison with scalar boolean result. */
14920 gcc_assert ((code == EQ_EXPR || code == NE_EXPR)
14921 && known_eq (VECTOR_CST_NELTS (op0),
14922 VECTOR_CST_NELTS (op1)));
14923 unsigned HOST_WIDE_INT nunits;
14924 if (!VECTOR_CST_NELTS (op0).is_constant (&nunits))
14925 return NULL_TREE;
14926 for (unsigned i = 0; i < nunits; i++)
14927 {
14928 tree elem0 = VECTOR_CST_ELT (op0, i);
14929 tree elem1 = VECTOR_CST_ELT (op1, i);
14930 tree tmp = fold_relational_const (EQ_EXPR, type, elem0, elem1);
14931 if (tmp == NULL_TREE)
14932 return NULL_TREE;
14933 if (integer_zerop (tmp))
14934 return constant_boolean_node (code == NE_EXPR, type);
14935 }
14936 return constant_boolean_node (code == EQ_EXPR, type);
14937 }
14938 tree_vector_builder elts;
14939 if (!elts.new_binary_operation (type, op0, op1, false))
14940 return NULL_TREE;
14941 unsigned int count = elts.encoded_nelts ();
14942 for (unsigned i = 0; i < count; i++)
14943 {
14944 tree elem_type = TREE_TYPE (type);
14945 tree elem0 = VECTOR_CST_ELT (op0, i);
14946 tree elem1 = VECTOR_CST_ELT (op1, i);
14947
14948 tree tem = fold_relational_const (code, elem_type,
14949 elem0, elem1);
14950
14951 if (tem == NULL_TREE)
14952 return NULL_TREE;
14953
14954 elts.quick_push (build_int_cst (elem_type,
14955 integer_zerop (tem) ? 0 : -1));
14956 }
14957
14958 return elts.build ();
14959 }
14960
14961 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
14962
14963 To compute GT, swap the arguments and do LT.
14964 To compute GE, do LT and invert the result.
14965 To compute LE, swap the arguments, do LT and invert the result.
14966 To compute NE, do EQ and invert the result.
14967
14968 Therefore, the code below must handle only EQ and LT. */
14969
14970 if (code == LE_EXPR || code == GT_EXPR)
14971 {
14972 std::swap (op0, op1);
14973 code = swap_tree_comparison (code);
14974 }
14975
14976 /* Note that it is safe to invert for real values here because we
14977 have already handled the one case that it matters. */
14978
14979 invert = 0;
14980 if (code == NE_EXPR || code == GE_EXPR)
14981 {
14982 invert = 1;
14983 code = invert_tree_comparison (code, false);
14984 }
14985
14986 /* Compute a result for LT or EQ if args permit;
14987 Otherwise return T. */
14988 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
14989 {
14990 if (code == EQ_EXPR)
14991 result = tree_int_cst_equal (op0, op1);
14992 else
14993 result = tree_int_cst_lt (op0, op1);
14994 }
14995 else
14996 return NULL_TREE;
14997
14998 if (invert)
14999 result ^= 1;
15000 return constant_boolean_node (result, type);
15001 }
15002
15003 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15004 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15005 itself. */
15006
15007 tree
15008 fold_build_cleanup_point_expr (tree type, tree expr)
15009 {
15010 /* If the expression does not have side effects then we don't have to wrap
15011 it with a cleanup point expression. */
15012 if (!TREE_SIDE_EFFECTS (expr))
15013 return expr;
15014
15015 /* If the expression is a return, check to see if the expression inside the
15016 return has no side effects or the right hand side of the modify expression
15017 inside the return. If either don't have side effects set we don't need to
15018 wrap the expression in a cleanup point expression. Note we don't check the
15019 left hand side of the modify because it should always be a return decl. */
15020 if (TREE_CODE (expr) == RETURN_EXPR)
15021 {
15022 tree op = TREE_OPERAND (expr, 0);
15023 if (!op || !TREE_SIDE_EFFECTS (op))
15024 return expr;
15025 op = TREE_OPERAND (op, 1);
15026 if (!TREE_SIDE_EFFECTS (op))
15027 return expr;
15028 }
15029
15030 return build1_loc (EXPR_LOCATION (expr), CLEANUP_POINT_EXPR, type, expr);
15031 }
15032
15033 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15034 of an indirection through OP0, or NULL_TREE if no simplification is
15035 possible. */
15036
15037 tree
15038 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
15039 {
15040 tree sub = op0;
15041 tree subtype;
15042 poly_uint64 const_op01;
15043
15044 STRIP_NOPS (sub);
15045 subtype = TREE_TYPE (sub);
15046 if (!POINTER_TYPE_P (subtype)
15047 || TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (op0)))
15048 return NULL_TREE;
15049
15050 if (TREE_CODE (sub) == ADDR_EXPR)
15051 {
15052 tree op = TREE_OPERAND (sub, 0);
15053 tree optype = TREE_TYPE (op);
15054
15055 /* *&CONST_DECL -> to the value of the const decl. */
15056 if (TREE_CODE (op) == CONST_DECL)
15057 return DECL_INITIAL (op);
15058 /* *&p => p; make sure to handle *&"str"[cst] here. */
15059 if (type == optype)
15060 {
15061 tree fop = fold_read_from_constant_string (op);
15062 if (fop)
15063 return fop;
15064 else
15065 return op;
15066 }
15067 /* *(foo *)&fooarray => fooarray[0] */
15068 else if (TREE_CODE (optype) == ARRAY_TYPE
15069 && type == TREE_TYPE (optype)
15070 && (!in_gimple_form
15071 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15072 {
15073 tree type_domain = TYPE_DOMAIN (optype);
15074 tree min_val = size_zero_node;
15075 if (type_domain && TYPE_MIN_VALUE (type_domain))
15076 min_val = TYPE_MIN_VALUE (type_domain);
15077 if (in_gimple_form
15078 && TREE_CODE (min_val) != INTEGER_CST)
15079 return NULL_TREE;
15080 return build4_loc (loc, ARRAY_REF, type, op, min_val,
15081 NULL_TREE, NULL_TREE);
15082 }
15083 /* *(foo *)&complexfoo => __real__ complexfoo */
15084 else if (TREE_CODE (optype) == COMPLEX_TYPE
15085 && type == TREE_TYPE (optype))
15086 return fold_build1_loc (loc, REALPART_EXPR, type, op);
15087 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15088 else if (VECTOR_TYPE_P (optype)
15089 && type == TREE_TYPE (optype))
15090 {
15091 tree part_width = TYPE_SIZE (type);
15092 tree index = bitsize_int (0);
15093 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width,
15094 index);
15095 }
15096 }
15097
15098 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15099 && poly_int_tree_p (TREE_OPERAND (sub, 1), &const_op01))
15100 {
15101 tree op00 = TREE_OPERAND (sub, 0);
15102 tree op01 = TREE_OPERAND (sub, 1);
15103
15104 STRIP_NOPS (op00);
15105 if (TREE_CODE (op00) == ADDR_EXPR)
15106 {
15107 tree op00type;
15108 op00 = TREE_OPERAND (op00, 0);
15109 op00type = TREE_TYPE (op00);
15110
15111 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15112 if (VECTOR_TYPE_P (op00type)
15113 && type == TREE_TYPE (op00type)
15114 /* POINTER_PLUS_EXPR second operand is sizetype, unsigned,
15115 but we want to treat offsets with MSB set as negative.
15116 For the code below negative offsets are invalid and
15117 TYPE_SIZE of the element is something unsigned, so
15118 check whether op01 fits into poly_int64, which implies
15119 it is from 0 to INTTYPE_MAXIMUM (HOST_WIDE_INT), and
15120 then just use poly_uint64 because we want to treat the
15121 value as unsigned. */
15122 && tree_fits_poly_int64_p (op01))
15123 {
15124 tree part_width = TYPE_SIZE (type);
15125 poly_uint64 max_offset
15126 = (tree_to_uhwi (part_width) / BITS_PER_UNIT
15127 * TYPE_VECTOR_SUBPARTS (op00type));
15128 if (known_lt (const_op01, max_offset))
15129 {
15130 tree index = bitsize_int (const_op01 * BITS_PER_UNIT);
15131 return fold_build3_loc (loc,
15132 BIT_FIELD_REF, type, op00,
15133 part_width, index);
15134 }
15135 }
15136 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15137 else if (TREE_CODE (op00type) == COMPLEX_TYPE
15138 && type == TREE_TYPE (op00type))
15139 {
15140 if (known_eq (wi::to_poly_offset (TYPE_SIZE_UNIT (type)),
15141 const_op01))
15142 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
15143 }
15144 /* ((foo *)&fooarray)[1] => fooarray[1] */
15145 else if (TREE_CODE (op00type) == ARRAY_TYPE
15146 && type == TREE_TYPE (op00type))
15147 {
15148 tree type_domain = TYPE_DOMAIN (op00type);
15149 tree min_val = size_zero_node;
15150 if (type_domain && TYPE_MIN_VALUE (type_domain))
15151 min_val = TYPE_MIN_VALUE (type_domain);
15152 poly_uint64 type_size, index;
15153 if (poly_int_tree_p (min_val)
15154 && poly_int_tree_p (TYPE_SIZE_UNIT (type), &type_size)
15155 && multiple_p (const_op01, type_size, &index))
15156 {
15157 poly_offset_int off = index + wi::to_poly_offset (min_val);
15158 op01 = wide_int_to_tree (sizetype, off);
15159 return build4_loc (loc, ARRAY_REF, type, op00, op01,
15160 NULL_TREE, NULL_TREE);
15161 }
15162 }
15163 }
15164 }
15165
15166 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15167 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
15168 && type == TREE_TYPE (TREE_TYPE (subtype))
15169 && (!in_gimple_form
15170 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15171 {
15172 tree type_domain;
15173 tree min_val = size_zero_node;
15174 sub = build_fold_indirect_ref_loc (loc, sub);
15175 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15176 if (type_domain && TYPE_MIN_VALUE (type_domain))
15177 min_val = TYPE_MIN_VALUE (type_domain);
15178 if (in_gimple_form
15179 && TREE_CODE (min_val) != INTEGER_CST)
15180 return NULL_TREE;
15181 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
15182 NULL_TREE);
15183 }
15184
15185 return NULL_TREE;
15186 }
15187
15188 /* Builds an expression for an indirection through T, simplifying some
15189 cases. */
15190
15191 tree
15192 build_fold_indirect_ref_loc (location_t loc, tree t)
15193 {
15194 tree type = TREE_TYPE (TREE_TYPE (t));
15195 tree sub = fold_indirect_ref_1 (loc, type, t);
15196
15197 if (sub)
15198 return sub;
15199
15200 return build1_loc (loc, INDIRECT_REF, type, t);
15201 }
15202
15203 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15204
15205 tree
15206 fold_indirect_ref_loc (location_t loc, tree t)
15207 {
15208 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
15209
15210 if (sub)
15211 return sub;
15212 else
15213 return t;
15214 }
15215
15216 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15217 whose result is ignored. The type of the returned tree need not be
15218 the same as the original expression. */
15219
15220 tree
15221 fold_ignored_result (tree t)
15222 {
15223 if (!TREE_SIDE_EFFECTS (t))
15224 return integer_zero_node;
15225
15226 for (;;)
15227 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15228 {
15229 case tcc_unary:
15230 t = TREE_OPERAND (t, 0);
15231 break;
15232
15233 case tcc_binary:
15234 case tcc_comparison:
15235 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15236 t = TREE_OPERAND (t, 0);
15237 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15238 t = TREE_OPERAND (t, 1);
15239 else
15240 return t;
15241 break;
15242
15243 case tcc_expression:
15244 switch (TREE_CODE (t))
15245 {
15246 case COMPOUND_EXPR:
15247 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15248 return t;
15249 t = TREE_OPERAND (t, 0);
15250 break;
15251
15252 case COND_EXPR:
15253 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15254 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15255 return t;
15256 t = TREE_OPERAND (t, 0);
15257 break;
15258
15259 default:
15260 return t;
15261 }
15262 break;
15263
15264 default:
15265 return t;
15266 }
15267 }
15268
15269 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
15270
15271 tree
15272 round_up_loc (location_t loc, tree value, unsigned int divisor)
15273 {
15274 tree div = NULL_TREE;
15275
15276 if (divisor == 1)
15277 return value;
15278
15279 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15280 have to do anything. Only do this when we are not given a const,
15281 because in that case, this check is more expensive than just
15282 doing it. */
15283 if (TREE_CODE (value) != INTEGER_CST)
15284 {
15285 div = build_int_cst (TREE_TYPE (value), divisor);
15286
15287 if (multiple_of_p (TREE_TYPE (value), value, div))
15288 return value;
15289 }
15290
15291 /* If divisor is a power of two, simplify this to bit manipulation. */
15292 if (pow2_or_zerop (divisor))
15293 {
15294 if (TREE_CODE (value) == INTEGER_CST)
15295 {
15296 wide_int val = wi::to_wide (value);
15297 bool overflow_p;
15298
15299 if ((val & (divisor - 1)) == 0)
15300 return value;
15301
15302 overflow_p = TREE_OVERFLOW (value);
15303 val += divisor - 1;
15304 val &= (int) -divisor;
15305 if (val == 0)
15306 overflow_p = true;
15307
15308 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
15309 }
15310 else
15311 {
15312 tree t;
15313
15314 t = build_int_cst (TREE_TYPE (value), divisor - 1);
15315 value = size_binop_loc (loc, PLUS_EXPR, value, t);
15316 t = build_int_cst (TREE_TYPE (value), - (int) divisor);
15317 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
15318 }
15319 }
15320 else
15321 {
15322 if (!div)
15323 div = build_int_cst (TREE_TYPE (value), divisor);
15324 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
15325 value = size_binop_loc (loc, MULT_EXPR, value, div);
15326 }
15327
15328 return value;
15329 }
15330
15331 /* Likewise, but round down. */
15332
15333 tree
15334 round_down_loc (location_t loc, tree value, int divisor)
15335 {
15336 tree div = NULL_TREE;
15337
15338 gcc_assert (divisor > 0);
15339 if (divisor == 1)
15340 return value;
15341
15342 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15343 have to do anything. Only do this when we are not given a const,
15344 because in that case, this check is more expensive than just
15345 doing it. */
15346 if (TREE_CODE (value) != INTEGER_CST)
15347 {
15348 div = build_int_cst (TREE_TYPE (value), divisor);
15349
15350 if (multiple_of_p (TREE_TYPE (value), value, div))
15351 return value;
15352 }
15353
15354 /* If divisor is a power of two, simplify this to bit manipulation. */
15355 if (pow2_or_zerop (divisor))
15356 {
15357 tree t;
15358
15359 t = build_int_cst (TREE_TYPE (value), -divisor);
15360 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
15361 }
15362 else
15363 {
15364 if (!div)
15365 div = build_int_cst (TREE_TYPE (value), divisor);
15366 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
15367 value = size_binop_loc (loc, MULT_EXPR, value, div);
15368 }
15369
15370 return value;
15371 }
15372
15373 /* Returns the pointer to the base of the object addressed by EXP and
15374 extracts the information about the offset of the access, storing it
15375 to PBITPOS and POFFSET. */
15376
15377 static tree
15378 split_address_to_core_and_offset (tree exp,
15379 poly_int64_pod *pbitpos, tree *poffset)
15380 {
15381 tree core;
15382 machine_mode mode;
15383 int unsignedp, reversep, volatilep;
15384 poly_int64 bitsize;
15385 location_t loc = EXPR_LOCATION (exp);
15386
15387 if (TREE_CODE (exp) == ADDR_EXPR)
15388 {
15389 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
15390 poffset, &mode, &unsignedp, &reversep,
15391 &volatilep);
15392 core = build_fold_addr_expr_loc (loc, core);
15393 }
15394 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
15395 {
15396 core = TREE_OPERAND (exp, 0);
15397 STRIP_NOPS (core);
15398 *pbitpos = 0;
15399 *poffset = TREE_OPERAND (exp, 1);
15400 if (poly_int_tree_p (*poffset))
15401 {
15402 poly_offset_int tem
15403 = wi::sext (wi::to_poly_offset (*poffset),
15404 TYPE_PRECISION (TREE_TYPE (*poffset)));
15405 tem <<= LOG2_BITS_PER_UNIT;
15406 if (tem.to_shwi (pbitpos))
15407 *poffset = NULL_TREE;
15408 }
15409 }
15410 else
15411 {
15412 core = exp;
15413 *pbitpos = 0;
15414 *poffset = NULL_TREE;
15415 }
15416
15417 return core;
15418 }
15419
15420 /* Returns true if addresses of E1 and E2 differ by a constant, false
15421 otherwise. If they do, E1 - E2 is stored in *DIFF. */
15422
15423 bool
15424 ptr_difference_const (tree e1, tree e2, poly_int64_pod *diff)
15425 {
15426 tree core1, core2;
15427 poly_int64 bitpos1, bitpos2;
15428 tree toffset1, toffset2, tdiff, type;
15429
15430 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
15431 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
15432
15433 poly_int64 bytepos1, bytepos2;
15434 if (!multiple_p (bitpos1, BITS_PER_UNIT, &bytepos1)
15435 || !multiple_p (bitpos2, BITS_PER_UNIT, &bytepos2)
15436 || !operand_equal_p (core1, core2, 0))
15437 return false;
15438
15439 if (toffset1 && toffset2)
15440 {
15441 type = TREE_TYPE (toffset1);
15442 if (type != TREE_TYPE (toffset2))
15443 toffset2 = fold_convert (type, toffset2);
15444
15445 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
15446 if (!cst_and_fits_in_hwi (tdiff))
15447 return false;
15448
15449 *diff = int_cst_value (tdiff);
15450 }
15451 else if (toffset1 || toffset2)
15452 {
15453 /* If only one of the offsets is non-constant, the difference cannot
15454 be a constant. */
15455 return false;
15456 }
15457 else
15458 *diff = 0;
15459
15460 *diff += bytepos1 - bytepos2;
15461 return true;
15462 }
15463
15464 /* Return OFF converted to a pointer offset type suitable as offset for
15465 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
15466 tree
15467 convert_to_ptrofftype_loc (location_t loc, tree off)
15468 {
15469 return fold_convert_loc (loc, sizetype, off);
15470 }
15471
15472 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
15473 tree
15474 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
15475 {
15476 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
15477 ptr, convert_to_ptrofftype_loc (loc, off));
15478 }
15479
15480 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
15481 tree
15482 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
15483 {
15484 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
15485 ptr, size_int (off));
15486 }
15487
15488 /* Return a pointer P to a NUL-terminated string containing the sequence
15489 of bytes corresponding to the representation of the object referred to
15490 by SRC (or a subsequence of such bytes within it if SRC is a reference
15491 to an initialized constant array plus some constant offset).
15492 If STRSIZE is non-null, store the number of bytes in the constant
15493 sequence including the terminating NUL byte. *STRSIZE is equal to
15494 sizeof(A) - OFFSET where A is the array that stores the constant
15495 sequence that SRC points to and OFFSET is the byte offset of SRC from
15496 the beginning of A. SRC need not point to a string or even an array
15497 of characters but may point to an object of any type. */
15498
15499 const char *
15500 c_getstr (tree src, unsigned HOST_WIDE_INT *strsize /* = NULL */)
15501 {
15502 /* The offset into the array A storing the string, and A's byte size. */
15503 tree offset_node;
15504 tree mem_size;
15505
15506 if (strsize)
15507 *strsize = 0;
15508
15509 src = string_constant (src, &offset_node, &mem_size, NULL);
15510 if (!src)
15511 return NULL;
15512
15513 unsigned HOST_WIDE_INT offset = 0;
15514 if (offset_node != NULL_TREE)
15515 {
15516 if (!tree_fits_uhwi_p (offset_node))
15517 return NULL;
15518 else
15519 offset = tree_to_uhwi (offset_node);
15520 }
15521
15522 if (!tree_fits_uhwi_p (mem_size))
15523 return NULL;
15524
15525 /* ARRAY_SIZE is the byte size of the array the constant sequence
15526 is stored in and equal to sizeof A. INIT_BYTES is the number
15527 of bytes in the constant sequence used to initialize the array,
15528 including any embedded NULs as well as the terminating NUL (for
15529 strings), but not including any trailing zeros/NULs past
15530 the terminating one appended implicitly to a string literal to
15531 zero out the remainder of the array it's stored in. For example,
15532 given:
15533 const char a[7] = "abc\0d";
15534 n = strlen (a + 1);
15535 ARRAY_SIZE is 7, INIT_BYTES is 6, and OFFSET is 1. For a valid
15536 (i.e., nul-terminated) string with no embedded nuls, INIT_BYTES
15537 is equal to strlen (A) + 1. */
15538 const unsigned HOST_WIDE_INT array_size = tree_to_uhwi (mem_size);
15539 unsigned HOST_WIDE_INT init_bytes = TREE_STRING_LENGTH (src);
15540
15541 /* Ideally this would turn into a gcc_checking_assert over time. */
15542 if (init_bytes > array_size)
15543 init_bytes = array_size;
15544
15545 const char *string = TREE_STRING_POINTER (src);
15546
15547 /* Ideally this would turn into a gcc_checking_assert over time. */
15548 if (init_bytes > array_size)
15549 init_bytes = array_size;
15550
15551 if (init_bytes == 0 || offset >= array_size)
15552 return NULL;
15553
15554 if (strsize)
15555 {
15556 /* Compute and store the number of characters from the beginning
15557 of the substring at OFFSET to the end, including the terminating
15558 nul. Offsets past the initial length refer to null strings. */
15559 if (offset < init_bytes)
15560 *strsize = init_bytes - offset;
15561 else
15562 *strsize = 1;
15563 }
15564 else
15565 {
15566 tree eltype = TREE_TYPE (TREE_TYPE (src));
15567 /* Support only properly NUL-terminated single byte strings. */
15568 if (tree_to_uhwi (TYPE_SIZE_UNIT (eltype)) != 1)
15569 return NULL;
15570 if (string[init_bytes - 1] != '\0')
15571 return NULL;
15572 }
15573
15574 return offset < init_bytes ? string + offset : "";
15575 }
15576
15577 /* Given a tree T, compute which bits in T may be nonzero. */
15578
15579 wide_int
15580 tree_nonzero_bits (const_tree t)
15581 {
15582 switch (TREE_CODE (t))
15583 {
15584 case INTEGER_CST:
15585 return wi::to_wide (t);
15586 case SSA_NAME:
15587 return get_nonzero_bits (t);
15588 case NON_LVALUE_EXPR:
15589 case SAVE_EXPR:
15590 return tree_nonzero_bits (TREE_OPERAND (t, 0));
15591 case BIT_AND_EXPR:
15592 return wi::bit_and (tree_nonzero_bits (TREE_OPERAND (t, 0)),
15593 tree_nonzero_bits (TREE_OPERAND (t, 1)));
15594 case BIT_IOR_EXPR:
15595 case BIT_XOR_EXPR:
15596 return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 0)),
15597 tree_nonzero_bits (TREE_OPERAND (t, 1)));
15598 case COND_EXPR:
15599 return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 1)),
15600 tree_nonzero_bits (TREE_OPERAND (t, 2)));
15601 CASE_CONVERT:
15602 return wide_int::from (tree_nonzero_bits (TREE_OPERAND (t, 0)),
15603 TYPE_PRECISION (TREE_TYPE (t)),
15604 TYPE_SIGN (TREE_TYPE (TREE_OPERAND (t, 0))));
15605 case PLUS_EXPR:
15606 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
15607 {
15608 wide_int nzbits1 = tree_nonzero_bits (TREE_OPERAND (t, 0));
15609 wide_int nzbits2 = tree_nonzero_bits (TREE_OPERAND (t, 1));
15610 if (wi::bit_and (nzbits1, nzbits2) == 0)
15611 return wi::bit_or (nzbits1, nzbits2);
15612 }
15613 break;
15614 case LSHIFT_EXPR:
15615 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
15616 {
15617 tree type = TREE_TYPE (t);
15618 wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
15619 wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
15620 TYPE_PRECISION (type));
15621 return wi::neg_p (arg1)
15622 ? wi::rshift (nzbits, -arg1, TYPE_SIGN (type))
15623 : wi::lshift (nzbits, arg1);
15624 }
15625 break;
15626 case RSHIFT_EXPR:
15627 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
15628 {
15629 tree type = TREE_TYPE (t);
15630 wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
15631 wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
15632 TYPE_PRECISION (type));
15633 return wi::neg_p (arg1)
15634 ? wi::lshift (nzbits, -arg1)
15635 : wi::rshift (nzbits, arg1, TYPE_SIGN (type));
15636 }
15637 break;
15638 default:
15639 break;
15640 }
15641
15642 return wi::shwi (-1, TYPE_PRECISION (TREE_TYPE (t)));
15643 }
15644
15645 #if CHECKING_P
15646
15647 namespace selftest {
15648
15649 /* Helper functions for writing tests of folding trees. */
15650
15651 /* Verify that the binary op (LHS CODE RHS) folds to CONSTANT. */
15652
15653 static void
15654 assert_binop_folds_to_const (tree lhs, enum tree_code code, tree rhs,
15655 tree constant)
15656 {
15657 ASSERT_EQ (constant, fold_build2 (code, TREE_TYPE (lhs), lhs, rhs));
15658 }
15659
15660 /* Verify that the binary op (LHS CODE RHS) folds to an NON_LVALUE_EXPR
15661 wrapping WRAPPED_EXPR. */
15662
15663 static void
15664 assert_binop_folds_to_nonlvalue (tree lhs, enum tree_code code, tree rhs,
15665 tree wrapped_expr)
15666 {
15667 tree result = fold_build2 (code, TREE_TYPE (lhs), lhs, rhs);
15668 ASSERT_NE (wrapped_expr, result);
15669 ASSERT_EQ (NON_LVALUE_EXPR, TREE_CODE (result));
15670 ASSERT_EQ (wrapped_expr, TREE_OPERAND (result, 0));
15671 }
15672
15673 /* Verify that various arithmetic binary operations are folded
15674 correctly. */
15675
15676 static void
15677 test_arithmetic_folding ()
15678 {
15679 tree type = integer_type_node;
15680 tree x = create_tmp_var_raw (type, "x");
15681 tree zero = build_zero_cst (type);
15682 tree one = build_int_cst (type, 1);
15683
15684 /* Addition. */
15685 /* 1 <-- (0 + 1) */
15686 assert_binop_folds_to_const (zero, PLUS_EXPR, one,
15687 one);
15688 assert_binop_folds_to_const (one, PLUS_EXPR, zero,
15689 one);
15690
15691 /* (nonlvalue)x <-- (x + 0) */
15692 assert_binop_folds_to_nonlvalue (x, PLUS_EXPR, zero,
15693 x);
15694
15695 /* Subtraction. */
15696 /* 0 <-- (x - x) */
15697 assert_binop_folds_to_const (x, MINUS_EXPR, x,
15698 zero);
15699 assert_binop_folds_to_nonlvalue (x, MINUS_EXPR, zero,
15700 x);
15701
15702 /* Multiplication. */
15703 /* 0 <-- (x * 0) */
15704 assert_binop_folds_to_const (x, MULT_EXPR, zero,
15705 zero);
15706
15707 /* (nonlvalue)x <-- (x * 1) */
15708 assert_binop_folds_to_nonlvalue (x, MULT_EXPR, one,
15709 x);
15710 }
15711
15712 /* Verify that various binary operations on vectors are folded
15713 correctly. */
15714
15715 static void
15716 test_vector_folding ()
15717 {
15718 tree inner_type = integer_type_node;
15719 tree type = build_vector_type (inner_type, 4);
15720 tree zero = build_zero_cst (type);
15721 tree one = build_one_cst (type);
15722 tree index = build_index_vector (type, 0, 1);
15723
15724 /* Verify equality tests that return a scalar boolean result. */
15725 tree res_type = boolean_type_node;
15726 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, one)));
15727 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, zero)));
15728 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, zero, one)));
15729 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, one, one)));
15730 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, index, one)));
15731 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
15732 index, one)));
15733 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type,
15734 index, index)));
15735 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
15736 index, index)));
15737 }
15738
15739 /* Verify folding of VEC_DUPLICATE_EXPRs. */
15740
15741 static void
15742 test_vec_duplicate_folding ()
15743 {
15744 scalar_int_mode int_mode = SCALAR_INT_TYPE_MODE (ssizetype);
15745 machine_mode vec_mode = targetm.vectorize.preferred_simd_mode (int_mode);
15746 /* This will be 1 if VEC_MODE isn't a vector mode. */
15747 poly_uint64 nunits = GET_MODE_NUNITS (vec_mode);
15748
15749 tree type = build_vector_type (ssizetype, nunits);
15750 tree dup5_expr = fold_unary (VEC_DUPLICATE_EXPR, type, ssize_int (5));
15751 tree dup5_cst = build_vector_from_val (type, ssize_int (5));
15752 ASSERT_TRUE (operand_equal_p (dup5_expr, dup5_cst, 0));
15753 }
15754
15755 /* Run all of the selftests within this file. */
15756
15757 void
15758 fold_const_c_tests ()
15759 {
15760 test_arithmetic_folding ();
15761 test_vector_folding ();
15762 test_vec_duplicate_folding ();
15763 }
15764
15765 } // namespace selftest
15766
15767 #endif /* CHECKING_P */