New POINTER_DIFF_EXPR
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2017 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
27
28 /* The entry points in this file are fold, size_int_wide and size_binop.
29
30 fold takes a tree as argument and returns a simplified tree.
31
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
35
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
38
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
42
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "backend.h"
47 #include "target.h"
48 #include "rtl.h"
49 #include "tree.h"
50 #include "gimple.h"
51 #include "predict.h"
52 #include "memmodel.h"
53 #include "tm_p.h"
54 #include "tree-ssa-operands.h"
55 #include "optabs-query.h"
56 #include "cgraph.h"
57 #include "diagnostic-core.h"
58 #include "flags.h"
59 #include "alias.h"
60 #include "fold-const.h"
61 #include "fold-const-call.h"
62 #include "stor-layout.h"
63 #include "calls.h"
64 #include "tree-iterator.h"
65 #include "expr.h"
66 #include "intl.h"
67 #include "langhooks.h"
68 #include "tree-eh.h"
69 #include "gimplify.h"
70 #include "tree-dfa.h"
71 #include "builtins.h"
72 #include "generic-match.h"
73 #include "gimple-fold.h"
74 #include "params.h"
75 #include "tree-into-ssa.h"
76 #include "md5.h"
77 #include "case-cfn-macros.h"
78 #include "stringpool.h"
79 #include "tree-vrp.h"
80 #include "tree-ssanames.h"
81 #include "selftest.h"
82 #include "stringpool.h"
83 #include "attribs.h"
84
85 /* Nonzero if we are folding constants inside an initializer; zero
86 otherwise. */
87 int folding_initializer = 0;
88
89 /* The following constants represent a bit based encoding of GCC's
90 comparison operators. This encoding simplifies transformations
91 on relational comparison operators, such as AND and OR. */
92 enum comparison_code {
93 COMPCODE_FALSE = 0,
94 COMPCODE_LT = 1,
95 COMPCODE_EQ = 2,
96 COMPCODE_LE = 3,
97 COMPCODE_GT = 4,
98 COMPCODE_LTGT = 5,
99 COMPCODE_GE = 6,
100 COMPCODE_ORD = 7,
101 COMPCODE_UNORD = 8,
102 COMPCODE_UNLT = 9,
103 COMPCODE_UNEQ = 10,
104 COMPCODE_UNLE = 11,
105 COMPCODE_UNGT = 12,
106 COMPCODE_NE = 13,
107 COMPCODE_UNGE = 14,
108 COMPCODE_TRUE = 15
109 };
110
111 static bool negate_expr_p (tree);
112 static tree negate_expr (tree);
113 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
114 static enum comparison_code comparison_to_compcode (enum tree_code);
115 static enum tree_code compcode_to_comparison (enum comparison_code);
116 static int twoval_comparison_p (tree, tree *, tree *, int *);
117 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
118 static tree optimize_bit_field_compare (location_t, enum tree_code,
119 tree, tree, tree);
120 static int simple_operand_p (const_tree);
121 static bool simple_operand_p_2 (tree);
122 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
123 static tree range_predecessor (tree);
124 static tree range_successor (tree);
125 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
126 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
127 static tree unextend (tree, int, int, tree);
128 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
129 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
130 static tree fold_binary_op_with_conditional_arg (location_t,
131 enum tree_code, tree,
132 tree, tree,
133 tree, tree, int);
134 static tree fold_negate_const (tree, tree);
135 static tree fold_not_const (const_tree, tree);
136 static tree fold_relational_const (enum tree_code, tree, tree, tree);
137 static tree fold_convert_const (enum tree_code, tree, tree);
138 static tree fold_view_convert_expr (tree, tree);
139 static tree fold_negate_expr (location_t, tree);
140
141
142 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
143 Otherwise, return LOC. */
144
145 static location_t
146 expr_location_or (tree t, location_t loc)
147 {
148 location_t tloc = EXPR_LOCATION (t);
149 return tloc == UNKNOWN_LOCATION ? loc : tloc;
150 }
151
152 /* Similar to protected_set_expr_location, but never modify x in place,
153 if location can and needs to be set, unshare it. */
154
155 static inline tree
156 protected_set_expr_location_unshare (tree x, location_t loc)
157 {
158 if (CAN_HAVE_LOCATION_P (x)
159 && EXPR_LOCATION (x) != loc
160 && !(TREE_CODE (x) == SAVE_EXPR
161 || TREE_CODE (x) == TARGET_EXPR
162 || TREE_CODE (x) == BIND_EXPR))
163 {
164 x = copy_node (x);
165 SET_EXPR_LOCATION (x, loc);
166 }
167 return x;
168 }
169 \f
170 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
171 division and returns the quotient. Otherwise returns
172 NULL_TREE. */
173
174 tree
175 div_if_zero_remainder (const_tree arg1, const_tree arg2)
176 {
177 widest_int quo;
178
179 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
180 SIGNED, &quo))
181 return wide_int_to_tree (TREE_TYPE (arg1), quo);
182
183 return NULL_TREE;
184 }
185 \f
186 /* This is nonzero if we should defer warnings about undefined
187 overflow. This facility exists because these warnings are a
188 special case. The code to estimate loop iterations does not want
189 to issue any warnings, since it works with expressions which do not
190 occur in user code. Various bits of cleanup code call fold(), but
191 only use the result if it has certain characteristics (e.g., is a
192 constant); that code only wants to issue a warning if the result is
193 used. */
194
195 static int fold_deferring_overflow_warnings;
196
197 /* If a warning about undefined overflow is deferred, this is the
198 warning. Note that this may cause us to turn two warnings into
199 one, but that is fine since it is sufficient to only give one
200 warning per expression. */
201
202 static const char* fold_deferred_overflow_warning;
203
204 /* If a warning about undefined overflow is deferred, this is the
205 level at which the warning should be emitted. */
206
207 static enum warn_strict_overflow_code fold_deferred_overflow_code;
208
209 /* Start deferring overflow warnings. We could use a stack here to
210 permit nested calls, but at present it is not necessary. */
211
212 void
213 fold_defer_overflow_warnings (void)
214 {
215 ++fold_deferring_overflow_warnings;
216 }
217
218 /* Stop deferring overflow warnings. If there is a pending warning,
219 and ISSUE is true, then issue the warning if appropriate. STMT is
220 the statement with which the warning should be associated (used for
221 location information); STMT may be NULL. CODE is the level of the
222 warning--a warn_strict_overflow_code value. This function will use
223 the smaller of CODE and the deferred code when deciding whether to
224 issue the warning. CODE may be zero to mean to always use the
225 deferred code. */
226
227 void
228 fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
229 {
230 const char *warnmsg;
231 location_t locus;
232
233 gcc_assert (fold_deferring_overflow_warnings > 0);
234 --fold_deferring_overflow_warnings;
235 if (fold_deferring_overflow_warnings > 0)
236 {
237 if (fold_deferred_overflow_warning != NULL
238 && code != 0
239 && code < (int) fold_deferred_overflow_code)
240 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
241 return;
242 }
243
244 warnmsg = fold_deferred_overflow_warning;
245 fold_deferred_overflow_warning = NULL;
246
247 if (!issue || warnmsg == NULL)
248 return;
249
250 if (gimple_no_warning_p (stmt))
251 return;
252
253 /* Use the smallest code level when deciding to issue the
254 warning. */
255 if (code == 0 || code > (int) fold_deferred_overflow_code)
256 code = fold_deferred_overflow_code;
257
258 if (!issue_strict_overflow_warning (code))
259 return;
260
261 if (stmt == NULL)
262 locus = input_location;
263 else
264 locus = gimple_location (stmt);
265 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
266 }
267
268 /* Stop deferring overflow warnings, ignoring any deferred
269 warnings. */
270
271 void
272 fold_undefer_and_ignore_overflow_warnings (void)
273 {
274 fold_undefer_overflow_warnings (false, NULL, 0);
275 }
276
277 /* Whether we are deferring overflow warnings. */
278
279 bool
280 fold_deferring_overflow_warnings_p (void)
281 {
282 return fold_deferring_overflow_warnings > 0;
283 }
284
285 /* This is called when we fold something based on the fact that signed
286 overflow is undefined. */
287
288 void
289 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
290 {
291 if (fold_deferring_overflow_warnings > 0)
292 {
293 if (fold_deferred_overflow_warning == NULL
294 || wc < fold_deferred_overflow_code)
295 {
296 fold_deferred_overflow_warning = gmsgid;
297 fold_deferred_overflow_code = wc;
298 }
299 }
300 else if (issue_strict_overflow_warning (wc))
301 warning (OPT_Wstrict_overflow, gmsgid);
302 }
303 \f
304 /* Return true if the built-in mathematical function specified by CODE
305 is odd, i.e. -f(x) == f(-x). */
306
307 bool
308 negate_mathfn_p (combined_fn fn)
309 {
310 switch (fn)
311 {
312 CASE_CFN_ASIN:
313 CASE_CFN_ASINH:
314 CASE_CFN_ATAN:
315 CASE_CFN_ATANH:
316 CASE_CFN_CASIN:
317 CASE_CFN_CASINH:
318 CASE_CFN_CATAN:
319 CASE_CFN_CATANH:
320 CASE_CFN_CBRT:
321 CASE_CFN_CPROJ:
322 CASE_CFN_CSIN:
323 CASE_CFN_CSINH:
324 CASE_CFN_CTAN:
325 CASE_CFN_CTANH:
326 CASE_CFN_ERF:
327 CASE_CFN_LLROUND:
328 CASE_CFN_LROUND:
329 CASE_CFN_ROUND:
330 CASE_CFN_SIN:
331 CASE_CFN_SINH:
332 CASE_CFN_TAN:
333 CASE_CFN_TANH:
334 CASE_CFN_TRUNC:
335 return true;
336
337 CASE_CFN_LLRINT:
338 CASE_CFN_LRINT:
339 CASE_CFN_NEARBYINT:
340 CASE_CFN_RINT:
341 return !flag_rounding_math;
342
343 default:
344 break;
345 }
346 return false;
347 }
348
349 /* Check whether we may negate an integer constant T without causing
350 overflow. */
351
352 bool
353 may_negate_without_overflow_p (const_tree t)
354 {
355 tree type;
356
357 gcc_assert (TREE_CODE (t) == INTEGER_CST);
358
359 type = TREE_TYPE (t);
360 if (TYPE_UNSIGNED (type))
361 return false;
362
363 return !wi::only_sign_bit_p (wi::to_wide (t));
364 }
365
366 /* Determine whether an expression T can be cheaply negated using
367 the function negate_expr without introducing undefined overflow. */
368
369 static bool
370 negate_expr_p (tree t)
371 {
372 tree type;
373
374 if (t == 0)
375 return false;
376
377 type = TREE_TYPE (t);
378
379 STRIP_SIGN_NOPS (t);
380 switch (TREE_CODE (t))
381 {
382 case INTEGER_CST:
383 if (INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type))
384 return true;
385
386 /* Check that -CST will not overflow type. */
387 return may_negate_without_overflow_p (t);
388 case BIT_NOT_EXPR:
389 return (INTEGRAL_TYPE_P (type)
390 && TYPE_OVERFLOW_WRAPS (type));
391
392 case FIXED_CST:
393 return true;
394
395 case NEGATE_EXPR:
396 return !TYPE_OVERFLOW_SANITIZED (type);
397
398 case REAL_CST:
399 /* We want to canonicalize to positive real constants. Pretend
400 that only negative ones can be easily negated. */
401 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
402
403 case COMPLEX_CST:
404 return negate_expr_p (TREE_REALPART (t))
405 && negate_expr_p (TREE_IMAGPART (t));
406
407 case VECTOR_CST:
408 {
409 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
410 return true;
411
412 int count = VECTOR_CST_NELTS (t), i;
413
414 for (i = 0; i < count; i++)
415 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
416 return false;
417
418 return true;
419 }
420
421 case COMPLEX_EXPR:
422 return negate_expr_p (TREE_OPERAND (t, 0))
423 && negate_expr_p (TREE_OPERAND (t, 1));
424
425 case CONJ_EXPR:
426 return negate_expr_p (TREE_OPERAND (t, 0));
427
428 case PLUS_EXPR:
429 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
430 || HONOR_SIGNED_ZEROS (element_mode (type))
431 || (ANY_INTEGRAL_TYPE_P (type)
432 && ! TYPE_OVERFLOW_WRAPS (type)))
433 return false;
434 /* -(A + B) -> (-B) - A. */
435 if (negate_expr_p (TREE_OPERAND (t, 1)))
436 return true;
437 /* -(A + B) -> (-A) - B. */
438 return negate_expr_p (TREE_OPERAND (t, 0));
439
440 case MINUS_EXPR:
441 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
442 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
443 && !HONOR_SIGNED_ZEROS (element_mode (type))
444 && (! ANY_INTEGRAL_TYPE_P (type)
445 || TYPE_OVERFLOW_WRAPS (type));
446
447 case MULT_EXPR:
448 if (TYPE_UNSIGNED (type))
449 break;
450 /* INT_MIN/n * n doesn't overflow while negating one operand it does
451 if n is a (negative) power of two. */
452 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
453 && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
454 && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
455 && (wi::popcount
456 (wi::abs (wi::to_wide (TREE_OPERAND (t, 0))))) != 1)
457 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
458 && (wi::popcount
459 (wi::abs (wi::to_wide (TREE_OPERAND (t, 1))))) != 1)))
460 break;
461
462 /* Fall through. */
463
464 case RDIV_EXPR:
465 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
466 return negate_expr_p (TREE_OPERAND (t, 1))
467 || negate_expr_p (TREE_OPERAND (t, 0));
468 break;
469
470 case TRUNC_DIV_EXPR:
471 case ROUND_DIV_EXPR:
472 case EXACT_DIV_EXPR:
473 if (TYPE_UNSIGNED (type))
474 break;
475 if (negate_expr_p (TREE_OPERAND (t, 0)))
476 return true;
477 /* In general we can't negate B in A / B, because if A is INT_MIN and
478 B is 1, we may turn this into INT_MIN / -1 which is undefined
479 and actually traps on some architectures. */
480 if (! INTEGRAL_TYPE_P (TREE_TYPE (t))
481 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
482 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
483 && ! integer_onep (TREE_OPERAND (t, 1))))
484 return negate_expr_p (TREE_OPERAND (t, 1));
485 break;
486
487 case NOP_EXPR:
488 /* Negate -((double)float) as (double)(-float). */
489 if (TREE_CODE (type) == REAL_TYPE)
490 {
491 tree tem = strip_float_extensions (t);
492 if (tem != t)
493 return negate_expr_p (tem);
494 }
495 break;
496
497 case CALL_EXPR:
498 /* Negate -f(x) as f(-x). */
499 if (negate_mathfn_p (get_call_combined_fn (t)))
500 return negate_expr_p (CALL_EXPR_ARG (t, 0));
501 break;
502
503 case RSHIFT_EXPR:
504 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
505 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
506 {
507 tree op1 = TREE_OPERAND (t, 1);
508 if (wi::to_wide (op1) == TYPE_PRECISION (type) - 1)
509 return true;
510 }
511 break;
512
513 default:
514 break;
515 }
516 return false;
517 }
518
519 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
520 simplification is possible.
521 If negate_expr_p would return true for T, NULL_TREE will never be
522 returned. */
523
524 static tree
525 fold_negate_expr_1 (location_t loc, tree t)
526 {
527 tree type = TREE_TYPE (t);
528 tree tem;
529
530 switch (TREE_CODE (t))
531 {
532 /* Convert - (~A) to A + 1. */
533 case BIT_NOT_EXPR:
534 if (INTEGRAL_TYPE_P (type))
535 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
536 build_one_cst (type));
537 break;
538
539 case INTEGER_CST:
540 tem = fold_negate_const (t, type);
541 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
542 || (ANY_INTEGRAL_TYPE_P (type)
543 && !TYPE_OVERFLOW_TRAPS (type)
544 && TYPE_OVERFLOW_WRAPS (type))
545 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
546 return tem;
547 break;
548
549 case REAL_CST:
550 tem = fold_negate_const (t, type);
551 return tem;
552
553 case FIXED_CST:
554 tem = fold_negate_const (t, type);
555 return tem;
556
557 case COMPLEX_CST:
558 {
559 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
560 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
561 if (rpart && ipart)
562 return build_complex (type, rpart, ipart);
563 }
564 break;
565
566 case VECTOR_CST:
567 {
568 int count = VECTOR_CST_NELTS (t), i;
569
570 auto_vec<tree, 32> elts (count);
571 for (i = 0; i < count; i++)
572 {
573 tree elt = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
574 if (elt == NULL_TREE)
575 return NULL_TREE;
576 elts.quick_push (elt);
577 }
578
579 return build_vector (type, elts);
580 }
581
582 case COMPLEX_EXPR:
583 if (negate_expr_p (t))
584 return fold_build2_loc (loc, COMPLEX_EXPR, type,
585 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
586 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
587 break;
588
589 case CONJ_EXPR:
590 if (negate_expr_p (t))
591 return fold_build1_loc (loc, CONJ_EXPR, type,
592 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
593 break;
594
595 case NEGATE_EXPR:
596 if (!TYPE_OVERFLOW_SANITIZED (type))
597 return TREE_OPERAND (t, 0);
598 break;
599
600 case PLUS_EXPR:
601 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
602 && !HONOR_SIGNED_ZEROS (element_mode (type)))
603 {
604 /* -(A + B) -> (-B) - A. */
605 if (negate_expr_p (TREE_OPERAND (t, 1)))
606 {
607 tem = negate_expr (TREE_OPERAND (t, 1));
608 return fold_build2_loc (loc, MINUS_EXPR, type,
609 tem, TREE_OPERAND (t, 0));
610 }
611
612 /* -(A + B) -> (-A) - B. */
613 if (negate_expr_p (TREE_OPERAND (t, 0)))
614 {
615 tem = negate_expr (TREE_OPERAND (t, 0));
616 return fold_build2_loc (loc, MINUS_EXPR, type,
617 tem, TREE_OPERAND (t, 1));
618 }
619 }
620 break;
621
622 case MINUS_EXPR:
623 /* - (A - B) -> B - A */
624 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
625 && !HONOR_SIGNED_ZEROS (element_mode (type)))
626 return fold_build2_loc (loc, MINUS_EXPR, type,
627 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
628 break;
629
630 case MULT_EXPR:
631 if (TYPE_UNSIGNED (type))
632 break;
633
634 /* Fall through. */
635
636 case RDIV_EXPR:
637 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
638 {
639 tem = TREE_OPERAND (t, 1);
640 if (negate_expr_p (tem))
641 return fold_build2_loc (loc, TREE_CODE (t), type,
642 TREE_OPERAND (t, 0), negate_expr (tem));
643 tem = TREE_OPERAND (t, 0);
644 if (negate_expr_p (tem))
645 return fold_build2_loc (loc, TREE_CODE (t), type,
646 negate_expr (tem), TREE_OPERAND (t, 1));
647 }
648 break;
649
650 case TRUNC_DIV_EXPR:
651 case ROUND_DIV_EXPR:
652 case EXACT_DIV_EXPR:
653 if (TYPE_UNSIGNED (type))
654 break;
655 if (negate_expr_p (TREE_OPERAND (t, 0)))
656 return fold_build2_loc (loc, TREE_CODE (t), type,
657 negate_expr (TREE_OPERAND (t, 0)),
658 TREE_OPERAND (t, 1));
659 /* In general we can't negate B in A / B, because if A is INT_MIN and
660 B is 1, we may turn this into INT_MIN / -1 which is undefined
661 and actually traps on some architectures. */
662 if ((! INTEGRAL_TYPE_P (TREE_TYPE (t))
663 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
664 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
665 && ! integer_onep (TREE_OPERAND (t, 1))))
666 && negate_expr_p (TREE_OPERAND (t, 1)))
667 return fold_build2_loc (loc, TREE_CODE (t), type,
668 TREE_OPERAND (t, 0),
669 negate_expr (TREE_OPERAND (t, 1)));
670 break;
671
672 case NOP_EXPR:
673 /* Convert -((double)float) into (double)(-float). */
674 if (TREE_CODE (type) == REAL_TYPE)
675 {
676 tem = strip_float_extensions (t);
677 if (tem != t && negate_expr_p (tem))
678 return fold_convert_loc (loc, type, negate_expr (tem));
679 }
680 break;
681
682 case CALL_EXPR:
683 /* Negate -f(x) as f(-x). */
684 if (negate_mathfn_p (get_call_combined_fn (t))
685 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
686 {
687 tree fndecl, arg;
688
689 fndecl = get_callee_fndecl (t);
690 arg = negate_expr (CALL_EXPR_ARG (t, 0));
691 return build_call_expr_loc (loc, fndecl, 1, arg);
692 }
693 break;
694
695 case RSHIFT_EXPR:
696 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
697 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
698 {
699 tree op1 = TREE_OPERAND (t, 1);
700 if (wi::to_wide (op1) == TYPE_PRECISION (type) - 1)
701 {
702 tree ntype = TYPE_UNSIGNED (type)
703 ? signed_type_for (type)
704 : unsigned_type_for (type);
705 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
706 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
707 return fold_convert_loc (loc, type, temp);
708 }
709 }
710 break;
711
712 default:
713 break;
714 }
715
716 return NULL_TREE;
717 }
718
719 /* A wrapper for fold_negate_expr_1. */
720
721 static tree
722 fold_negate_expr (location_t loc, tree t)
723 {
724 tree type = TREE_TYPE (t);
725 STRIP_SIGN_NOPS (t);
726 tree tem = fold_negate_expr_1 (loc, t);
727 if (tem == NULL_TREE)
728 return NULL_TREE;
729 return fold_convert_loc (loc, type, tem);
730 }
731
732 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
733 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
734 return NULL_TREE. */
735
736 static tree
737 negate_expr (tree t)
738 {
739 tree type, tem;
740 location_t loc;
741
742 if (t == NULL_TREE)
743 return NULL_TREE;
744
745 loc = EXPR_LOCATION (t);
746 type = TREE_TYPE (t);
747 STRIP_SIGN_NOPS (t);
748
749 tem = fold_negate_expr (loc, t);
750 if (!tem)
751 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
752 return fold_convert_loc (loc, type, tem);
753 }
754 \f
755 /* Split a tree IN into a constant, literal and variable parts that could be
756 combined with CODE to make IN. "constant" means an expression with
757 TREE_CONSTANT but that isn't an actual constant. CODE must be a
758 commutative arithmetic operation. Store the constant part into *CONP,
759 the literal in *LITP and return the variable part. If a part isn't
760 present, set it to null. If the tree does not decompose in this way,
761 return the entire tree as the variable part and the other parts as null.
762
763 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
764 case, we negate an operand that was subtracted. Except if it is a
765 literal for which we use *MINUS_LITP instead.
766
767 If NEGATE_P is true, we are negating all of IN, again except a literal
768 for which we use *MINUS_LITP instead. If a variable part is of pointer
769 type, it is negated after converting to TYPE. This prevents us from
770 generating illegal MINUS pointer expression. LOC is the location of
771 the converted variable part.
772
773 If IN is itself a literal or constant, return it as appropriate.
774
775 Note that we do not guarantee that any of the three values will be the
776 same type as IN, but they will have the same signedness and mode. */
777
778 static tree
779 split_tree (tree in, tree type, enum tree_code code,
780 tree *minus_varp, tree *conp, tree *minus_conp,
781 tree *litp, tree *minus_litp, int negate_p)
782 {
783 tree var = 0;
784 *minus_varp = 0;
785 *conp = 0;
786 *minus_conp = 0;
787 *litp = 0;
788 *minus_litp = 0;
789
790 /* Strip any conversions that don't change the machine mode or signedness. */
791 STRIP_SIGN_NOPS (in);
792
793 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
794 || TREE_CODE (in) == FIXED_CST)
795 *litp = in;
796 else if (TREE_CODE (in) == code
797 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
798 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
799 /* We can associate addition and subtraction together (even
800 though the C standard doesn't say so) for integers because
801 the value is not affected. For reals, the value might be
802 affected, so we can't. */
803 && ((code == PLUS_EXPR && TREE_CODE (in) == POINTER_PLUS_EXPR)
804 || (code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
805 || (code == MINUS_EXPR
806 && (TREE_CODE (in) == PLUS_EXPR
807 || TREE_CODE (in) == POINTER_PLUS_EXPR)))))
808 {
809 tree op0 = TREE_OPERAND (in, 0);
810 tree op1 = TREE_OPERAND (in, 1);
811 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
812 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
813
814 /* First see if either of the operands is a literal, then a constant. */
815 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
816 || TREE_CODE (op0) == FIXED_CST)
817 *litp = op0, op0 = 0;
818 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
819 || TREE_CODE (op1) == FIXED_CST)
820 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
821
822 if (op0 != 0 && TREE_CONSTANT (op0))
823 *conp = op0, op0 = 0;
824 else if (op1 != 0 && TREE_CONSTANT (op1))
825 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
826
827 /* If we haven't dealt with either operand, this is not a case we can
828 decompose. Otherwise, VAR is either of the ones remaining, if any. */
829 if (op0 != 0 && op1 != 0)
830 var = in;
831 else if (op0 != 0)
832 var = op0;
833 else
834 var = op1, neg_var_p = neg1_p;
835
836 /* Now do any needed negations. */
837 if (neg_litp_p)
838 *minus_litp = *litp, *litp = 0;
839 if (neg_conp_p && *conp)
840 *minus_conp = *conp, *conp = 0;
841 if (neg_var_p && var)
842 *minus_varp = var, var = 0;
843 }
844 else if (TREE_CONSTANT (in))
845 *conp = in;
846 else if (TREE_CODE (in) == BIT_NOT_EXPR
847 && code == PLUS_EXPR)
848 {
849 /* -1 - X is folded to ~X, undo that here. Do _not_ do this
850 when IN is constant. */
851 *litp = build_minus_one_cst (type);
852 *minus_varp = TREE_OPERAND (in, 0);
853 }
854 else
855 var = in;
856
857 if (negate_p)
858 {
859 if (*litp)
860 *minus_litp = *litp, *litp = 0;
861 else if (*minus_litp)
862 *litp = *minus_litp, *minus_litp = 0;
863 if (*conp)
864 *minus_conp = *conp, *conp = 0;
865 else if (*minus_conp)
866 *conp = *minus_conp, *minus_conp = 0;
867 if (var)
868 *minus_varp = var, var = 0;
869 else if (*minus_varp)
870 var = *minus_varp, *minus_varp = 0;
871 }
872
873 if (*litp
874 && TREE_OVERFLOW_P (*litp))
875 *litp = drop_tree_overflow (*litp);
876 if (*minus_litp
877 && TREE_OVERFLOW_P (*minus_litp))
878 *minus_litp = drop_tree_overflow (*minus_litp);
879
880 return var;
881 }
882
883 /* Re-associate trees split by the above function. T1 and T2 are
884 either expressions to associate or null. Return the new
885 expression, if any. LOC is the location of the new expression. If
886 we build an operation, do it in TYPE and with CODE. */
887
888 static tree
889 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
890 {
891 if (t1 == 0)
892 {
893 gcc_assert (t2 == 0 || code != MINUS_EXPR);
894 return t2;
895 }
896 else if (t2 == 0)
897 return t1;
898
899 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
900 try to fold this since we will have infinite recursion. But do
901 deal with any NEGATE_EXPRs. */
902 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
903 || TREE_CODE (t1) == PLUS_EXPR || TREE_CODE (t2) == PLUS_EXPR
904 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
905 {
906 if (code == PLUS_EXPR)
907 {
908 if (TREE_CODE (t1) == NEGATE_EXPR)
909 return build2_loc (loc, MINUS_EXPR, type,
910 fold_convert_loc (loc, type, t2),
911 fold_convert_loc (loc, type,
912 TREE_OPERAND (t1, 0)));
913 else if (TREE_CODE (t2) == NEGATE_EXPR)
914 return build2_loc (loc, MINUS_EXPR, type,
915 fold_convert_loc (loc, type, t1),
916 fold_convert_loc (loc, type,
917 TREE_OPERAND (t2, 0)));
918 else if (integer_zerop (t2))
919 return fold_convert_loc (loc, type, t1);
920 }
921 else if (code == MINUS_EXPR)
922 {
923 if (integer_zerop (t2))
924 return fold_convert_loc (loc, type, t1);
925 }
926
927 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
928 fold_convert_loc (loc, type, t2));
929 }
930
931 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
932 fold_convert_loc (loc, type, t2));
933 }
934 \f
935 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
936 for use in int_const_binop, size_binop and size_diffop. */
937
938 static bool
939 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
940 {
941 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
942 return false;
943 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
944 return false;
945
946 switch (code)
947 {
948 case LSHIFT_EXPR:
949 case RSHIFT_EXPR:
950 case LROTATE_EXPR:
951 case RROTATE_EXPR:
952 return true;
953
954 default:
955 break;
956 }
957
958 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
959 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
960 && TYPE_MODE (type1) == TYPE_MODE (type2);
961 }
962
963
964 /* Combine two integer constants PARG1 and PARG2 under operation CODE
965 to produce a new constant. Return NULL_TREE if we don't know how
966 to evaluate CODE at compile-time. */
967
968 static tree
969 int_const_binop_1 (enum tree_code code, const_tree parg1, const_tree parg2,
970 int overflowable)
971 {
972 wide_int res;
973 tree t;
974 tree type = TREE_TYPE (parg1);
975 signop sign = TYPE_SIGN (type);
976 bool overflow = false;
977
978 wi::tree_to_wide_ref arg1 = wi::to_wide (parg1);
979 wide_int arg2 = wi::to_wide (parg2, TYPE_PRECISION (type));
980
981 switch (code)
982 {
983 case BIT_IOR_EXPR:
984 res = wi::bit_or (arg1, arg2);
985 break;
986
987 case BIT_XOR_EXPR:
988 res = wi::bit_xor (arg1, arg2);
989 break;
990
991 case BIT_AND_EXPR:
992 res = wi::bit_and (arg1, arg2);
993 break;
994
995 case RSHIFT_EXPR:
996 case LSHIFT_EXPR:
997 if (wi::neg_p (arg2))
998 {
999 arg2 = -arg2;
1000 if (code == RSHIFT_EXPR)
1001 code = LSHIFT_EXPR;
1002 else
1003 code = RSHIFT_EXPR;
1004 }
1005
1006 if (code == RSHIFT_EXPR)
1007 /* It's unclear from the C standard whether shifts can overflow.
1008 The following code ignores overflow; perhaps a C standard
1009 interpretation ruling is needed. */
1010 res = wi::rshift (arg1, arg2, sign);
1011 else
1012 res = wi::lshift (arg1, arg2);
1013 break;
1014
1015 case RROTATE_EXPR:
1016 case LROTATE_EXPR:
1017 if (wi::neg_p (arg2))
1018 {
1019 arg2 = -arg2;
1020 if (code == RROTATE_EXPR)
1021 code = LROTATE_EXPR;
1022 else
1023 code = RROTATE_EXPR;
1024 }
1025
1026 if (code == RROTATE_EXPR)
1027 res = wi::rrotate (arg1, arg2);
1028 else
1029 res = wi::lrotate (arg1, arg2);
1030 break;
1031
1032 case PLUS_EXPR:
1033 res = wi::add (arg1, arg2, sign, &overflow);
1034 break;
1035
1036 case MINUS_EXPR:
1037 res = wi::sub (arg1, arg2, sign, &overflow);
1038 break;
1039
1040 case MULT_EXPR:
1041 res = wi::mul (arg1, arg2, sign, &overflow);
1042 break;
1043
1044 case MULT_HIGHPART_EXPR:
1045 res = wi::mul_high (arg1, arg2, sign);
1046 break;
1047
1048 case TRUNC_DIV_EXPR:
1049 case EXACT_DIV_EXPR:
1050 if (arg2 == 0)
1051 return NULL_TREE;
1052 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1053 break;
1054
1055 case FLOOR_DIV_EXPR:
1056 if (arg2 == 0)
1057 return NULL_TREE;
1058 res = wi::div_floor (arg1, arg2, sign, &overflow);
1059 break;
1060
1061 case CEIL_DIV_EXPR:
1062 if (arg2 == 0)
1063 return NULL_TREE;
1064 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1065 break;
1066
1067 case ROUND_DIV_EXPR:
1068 if (arg2 == 0)
1069 return NULL_TREE;
1070 res = wi::div_round (arg1, arg2, sign, &overflow);
1071 break;
1072
1073 case TRUNC_MOD_EXPR:
1074 if (arg2 == 0)
1075 return NULL_TREE;
1076 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1077 break;
1078
1079 case FLOOR_MOD_EXPR:
1080 if (arg2 == 0)
1081 return NULL_TREE;
1082 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1083 break;
1084
1085 case CEIL_MOD_EXPR:
1086 if (arg2 == 0)
1087 return NULL_TREE;
1088 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1089 break;
1090
1091 case ROUND_MOD_EXPR:
1092 if (arg2 == 0)
1093 return NULL_TREE;
1094 res = wi::mod_round (arg1, arg2, sign, &overflow);
1095 break;
1096
1097 case MIN_EXPR:
1098 res = wi::min (arg1, arg2, sign);
1099 break;
1100
1101 case MAX_EXPR:
1102 res = wi::max (arg1, arg2, sign);
1103 break;
1104
1105 default:
1106 return NULL_TREE;
1107 }
1108
1109 t = force_fit_type (type, res, overflowable,
1110 (((sign == SIGNED || overflowable == -1)
1111 && overflow)
1112 | TREE_OVERFLOW (parg1) | TREE_OVERFLOW (parg2)));
1113
1114 return t;
1115 }
1116
1117 tree
1118 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1119 {
1120 return int_const_binop_1 (code, arg1, arg2, 1);
1121 }
1122
1123 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1124 constant. We assume ARG1 and ARG2 have the same data type, or at least
1125 are the same kind of constant and the same machine mode. Return zero if
1126 combining the constants is not allowed in the current operating mode. */
1127
1128 static tree
1129 const_binop (enum tree_code code, tree arg1, tree arg2)
1130 {
1131 /* Sanity check for the recursive cases. */
1132 if (!arg1 || !arg2)
1133 return NULL_TREE;
1134
1135 STRIP_NOPS (arg1);
1136 STRIP_NOPS (arg2);
1137
1138 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1139 {
1140 if (code == POINTER_PLUS_EXPR)
1141 return int_const_binop (PLUS_EXPR,
1142 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1143
1144 return int_const_binop (code, arg1, arg2);
1145 }
1146
1147 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1148 {
1149 machine_mode mode;
1150 REAL_VALUE_TYPE d1;
1151 REAL_VALUE_TYPE d2;
1152 REAL_VALUE_TYPE value;
1153 REAL_VALUE_TYPE result;
1154 bool inexact;
1155 tree t, type;
1156
1157 /* The following codes are handled by real_arithmetic. */
1158 switch (code)
1159 {
1160 case PLUS_EXPR:
1161 case MINUS_EXPR:
1162 case MULT_EXPR:
1163 case RDIV_EXPR:
1164 case MIN_EXPR:
1165 case MAX_EXPR:
1166 break;
1167
1168 default:
1169 return NULL_TREE;
1170 }
1171
1172 d1 = TREE_REAL_CST (arg1);
1173 d2 = TREE_REAL_CST (arg2);
1174
1175 type = TREE_TYPE (arg1);
1176 mode = TYPE_MODE (type);
1177
1178 /* Don't perform operation if we honor signaling NaNs and
1179 either operand is a signaling NaN. */
1180 if (HONOR_SNANS (mode)
1181 && (REAL_VALUE_ISSIGNALING_NAN (d1)
1182 || REAL_VALUE_ISSIGNALING_NAN (d2)))
1183 return NULL_TREE;
1184
1185 /* Don't perform operation if it would raise a division
1186 by zero exception. */
1187 if (code == RDIV_EXPR
1188 && real_equal (&d2, &dconst0)
1189 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1190 return NULL_TREE;
1191
1192 /* If either operand is a NaN, just return it. Otherwise, set up
1193 for floating-point trap; we return an overflow. */
1194 if (REAL_VALUE_ISNAN (d1))
1195 {
1196 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1197 is off. */
1198 d1.signalling = 0;
1199 t = build_real (type, d1);
1200 return t;
1201 }
1202 else if (REAL_VALUE_ISNAN (d2))
1203 {
1204 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1205 is off. */
1206 d2.signalling = 0;
1207 t = build_real (type, d2);
1208 return t;
1209 }
1210
1211 inexact = real_arithmetic (&value, code, &d1, &d2);
1212 real_convert (&result, mode, &value);
1213
1214 /* Don't constant fold this floating point operation if
1215 the result has overflowed and flag_trapping_math. */
1216 if (flag_trapping_math
1217 && MODE_HAS_INFINITIES (mode)
1218 && REAL_VALUE_ISINF (result)
1219 && !REAL_VALUE_ISINF (d1)
1220 && !REAL_VALUE_ISINF (d2))
1221 return NULL_TREE;
1222
1223 /* Don't constant fold this floating point operation if the
1224 result may dependent upon the run-time rounding mode and
1225 flag_rounding_math is set, or if GCC's software emulation
1226 is unable to accurately represent the result. */
1227 if ((flag_rounding_math
1228 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1229 && (inexact || !real_identical (&result, &value)))
1230 return NULL_TREE;
1231
1232 t = build_real (type, result);
1233
1234 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1235 return t;
1236 }
1237
1238 if (TREE_CODE (arg1) == FIXED_CST)
1239 {
1240 FIXED_VALUE_TYPE f1;
1241 FIXED_VALUE_TYPE f2;
1242 FIXED_VALUE_TYPE result;
1243 tree t, type;
1244 int sat_p;
1245 bool overflow_p;
1246
1247 /* The following codes are handled by fixed_arithmetic. */
1248 switch (code)
1249 {
1250 case PLUS_EXPR:
1251 case MINUS_EXPR:
1252 case MULT_EXPR:
1253 case TRUNC_DIV_EXPR:
1254 if (TREE_CODE (arg2) != FIXED_CST)
1255 return NULL_TREE;
1256 f2 = TREE_FIXED_CST (arg2);
1257 break;
1258
1259 case LSHIFT_EXPR:
1260 case RSHIFT_EXPR:
1261 {
1262 if (TREE_CODE (arg2) != INTEGER_CST)
1263 return NULL_TREE;
1264 wi::tree_to_wide_ref w2 = wi::to_wide (arg2);
1265 f2.data.high = w2.elt (1);
1266 f2.data.low = w2.ulow ();
1267 f2.mode = SImode;
1268 }
1269 break;
1270
1271 default:
1272 return NULL_TREE;
1273 }
1274
1275 f1 = TREE_FIXED_CST (arg1);
1276 type = TREE_TYPE (arg1);
1277 sat_p = TYPE_SATURATING (type);
1278 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1279 t = build_fixed (type, result);
1280 /* Propagate overflow flags. */
1281 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1282 TREE_OVERFLOW (t) = 1;
1283 return t;
1284 }
1285
1286 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1287 {
1288 tree type = TREE_TYPE (arg1);
1289 tree r1 = TREE_REALPART (arg1);
1290 tree i1 = TREE_IMAGPART (arg1);
1291 tree r2 = TREE_REALPART (arg2);
1292 tree i2 = TREE_IMAGPART (arg2);
1293 tree real, imag;
1294
1295 switch (code)
1296 {
1297 case PLUS_EXPR:
1298 case MINUS_EXPR:
1299 real = const_binop (code, r1, r2);
1300 imag = const_binop (code, i1, i2);
1301 break;
1302
1303 case MULT_EXPR:
1304 if (COMPLEX_FLOAT_TYPE_P (type))
1305 return do_mpc_arg2 (arg1, arg2, type,
1306 /* do_nonfinite= */ folding_initializer,
1307 mpc_mul);
1308
1309 real = const_binop (MINUS_EXPR,
1310 const_binop (MULT_EXPR, r1, r2),
1311 const_binop (MULT_EXPR, i1, i2));
1312 imag = const_binop (PLUS_EXPR,
1313 const_binop (MULT_EXPR, r1, i2),
1314 const_binop (MULT_EXPR, i1, r2));
1315 break;
1316
1317 case RDIV_EXPR:
1318 if (COMPLEX_FLOAT_TYPE_P (type))
1319 return do_mpc_arg2 (arg1, arg2, type,
1320 /* do_nonfinite= */ folding_initializer,
1321 mpc_div);
1322 /* Fallthru. */
1323 case TRUNC_DIV_EXPR:
1324 case CEIL_DIV_EXPR:
1325 case FLOOR_DIV_EXPR:
1326 case ROUND_DIV_EXPR:
1327 if (flag_complex_method == 0)
1328 {
1329 /* Keep this algorithm in sync with
1330 tree-complex.c:expand_complex_div_straight().
1331
1332 Expand complex division to scalars, straightforward algorithm.
1333 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1334 t = br*br + bi*bi
1335 */
1336 tree magsquared
1337 = const_binop (PLUS_EXPR,
1338 const_binop (MULT_EXPR, r2, r2),
1339 const_binop (MULT_EXPR, i2, i2));
1340 tree t1
1341 = const_binop (PLUS_EXPR,
1342 const_binop (MULT_EXPR, r1, r2),
1343 const_binop (MULT_EXPR, i1, i2));
1344 tree t2
1345 = const_binop (MINUS_EXPR,
1346 const_binop (MULT_EXPR, i1, r2),
1347 const_binop (MULT_EXPR, r1, i2));
1348
1349 real = const_binop (code, t1, magsquared);
1350 imag = const_binop (code, t2, magsquared);
1351 }
1352 else
1353 {
1354 /* Keep this algorithm in sync with
1355 tree-complex.c:expand_complex_div_wide().
1356
1357 Expand complex division to scalars, modified algorithm to minimize
1358 overflow with wide input ranges. */
1359 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1360 fold_abs_const (r2, TREE_TYPE (type)),
1361 fold_abs_const (i2, TREE_TYPE (type)));
1362
1363 if (integer_nonzerop (compare))
1364 {
1365 /* In the TRUE branch, we compute
1366 ratio = br/bi;
1367 div = (br * ratio) + bi;
1368 tr = (ar * ratio) + ai;
1369 ti = (ai * ratio) - ar;
1370 tr = tr / div;
1371 ti = ti / div; */
1372 tree ratio = const_binop (code, r2, i2);
1373 tree div = const_binop (PLUS_EXPR, i2,
1374 const_binop (MULT_EXPR, r2, ratio));
1375 real = const_binop (MULT_EXPR, r1, ratio);
1376 real = const_binop (PLUS_EXPR, real, i1);
1377 real = const_binop (code, real, div);
1378
1379 imag = const_binop (MULT_EXPR, i1, ratio);
1380 imag = const_binop (MINUS_EXPR, imag, r1);
1381 imag = const_binop (code, imag, div);
1382 }
1383 else
1384 {
1385 /* In the FALSE branch, we compute
1386 ratio = d/c;
1387 divisor = (d * ratio) + c;
1388 tr = (b * ratio) + a;
1389 ti = b - (a * ratio);
1390 tr = tr / div;
1391 ti = ti / div; */
1392 tree ratio = const_binop (code, i2, r2);
1393 tree div = const_binop (PLUS_EXPR, r2,
1394 const_binop (MULT_EXPR, i2, ratio));
1395
1396 real = const_binop (MULT_EXPR, i1, ratio);
1397 real = const_binop (PLUS_EXPR, real, r1);
1398 real = const_binop (code, real, div);
1399
1400 imag = const_binop (MULT_EXPR, r1, ratio);
1401 imag = const_binop (MINUS_EXPR, i1, imag);
1402 imag = const_binop (code, imag, div);
1403 }
1404 }
1405 break;
1406
1407 default:
1408 return NULL_TREE;
1409 }
1410
1411 if (real && imag)
1412 return build_complex (type, real, imag);
1413 }
1414
1415 if (TREE_CODE (arg1) == VECTOR_CST
1416 && TREE_CODE (arg2) == VECTOR_CST)
1417 {
1418 tree type = TREE_TYPE (arg1);
1419 int count = VECTOR_CST_NELTS (arg1), i;
1420
1421 auto_vec<tree, 32> elts (count);
1422 for (i = 0; i < count; i++)
1423 {
1424 tree elem1 = VECTOR_CST_ELT (arg1, i);
1425 tree elem2 = VECTOR_CST_ELT (arg2, i);
1426
1427 tree elt = const_binop (code, elem1, elem2);
1428
1429 /* It is possible that const_binop cannot handle the given
1430 code and return NULL_TREE */
1431 if (elt == NULL_TREE)
1432 return NULL_TREE;
1433 elts.quick_push (elt);
1434 }
1435
1436 return build_vector (type, elts);
1437 }
1438
1439 /* Shifts allow a scalar offset for a vector. */
1440 if (TREE_CODE (arg1) == VECTOR_CST
1441 && TREE_CODE (arg2) == INTEGER_CST)
1442 {
1443 tree type = TREE_TYPE (arg1);
1444 int count = VECTOR_CST_NELTS (arg1), i;
1445
1446 auto_vec<tree, 32> elts (count);
1447 for (i = 0; i < count; i++)
1448 {
1449 tree elem1 = VECTOR_CST_ELT (arg1, i);
1450
1451 tree elt = const_binop (code, elem1, arg2);
1452
1453 /* It is possible that const_binop cannot handle the given
1454 code and return NULL_TREE. */
1455 if (elt == NULL_TREE)
1456 return NULL_TREE;
1457 elts.quick_push (elt);
1458 }
1459
1460 return build_vector (type, elts);
1461 }
1462 return NULL_TREE;
1463 }
1464
1465 /* Overload that adds a TYPE parameter to be able to dispatch
1466 to fold_relational_const. */
1467
1468 tree
1469 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1470 {
1471 if (TREE_CODE_CLASS (code) == tcc_comparison)
1472 return fold_relational_const (code, type, arg1, arg2);
1473
1474 /* ??? Until we make the const_binop worker take the type of the
1475 result as argument put those cases that need it here. */
1476 switch (code)
1477 {
1478 case COMPLEX_EXPR:
1479 if ((TREE_CODE (arg1) == REAL_CST
1480 && TREE_CODE (arg2) == REAL_CST)
1481 || (TREE_CODE (arg1) == INTEGER_CST
1482 && TREE_CODE (arg2) == INTEGER_CST))
1483 return build_complex (type, arg1, arg2);
1484 return NULL_TREE;
1485
1486 case POINTER_DIFF_EXPR:
1487 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1488 {
1489 offset_int res = wi::sub (wi::to_offset (arg1),
1490 wi::to_offset (arg2));
1491 return force_fit_type (type, res, 1,
1492 TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1493 }
1494 return NULL_TREE;
1495
1496 case VEC_PACK_TRUNC_EXPR:
1497 case VEC_PACK_FIX_TRUNC_EXPR:
1498 {
1499 unsigned int out_nelts, in_nelts, i;
1500
1501 if (TREE_CODE (arg1) != VECTOR_CST
1502 || TREE_CODE (arg2) != VECTOR_CST)
1503 return NULL_TREE;
1504
1505 in_nelts = VECTOR_CST_NELTS (arg1);
1506 out_nelts = in_nelts * 2;
1507 gcc_assert (in_nelts == VECTOR_CST_NELTS (arg2)
1508 && out_nelts == TYPE_VECTOR_SUBPARTS (type));
1509
1510 auto_vec<tree, 32> elts (out_nelts);
1511 for (i = 0; i < out_nelts; i++)
1512 {
1513 tree elt = (i < in_nelts
1514 ? VECTOR_CST_ELT (arg1, i)
1515 : VECTOR_CST_ELT (arg2, i - in_nelts));
1516 elt = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1517 ? NOP_EXPR : FIX_TRUNC_EXPR,
1518 TREE_TYPE (type), elt);
1519 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1520 return NULL_TREE;
1521 elts.quick_push (elt);
1522 }
1523
1524 return build_vector (type, elts);
1525 }
1526
1527 case VEC_WIDEN_MULT_LO_EXPR:
1528 case VEC_WIDEN_MULT_HI_EXPR:
1529 case VEC_WIDEN_MULT_EVEN_EXPR:
1530 case VEC_WIDEN_MULT_ODD_EXPR:
1531 {
1532 unsigned int out_nelts, in_nelts, out, ofs, scale;
1533
1534 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1535 return NULL_TREE;
1536
1537 in_nelts = VECTOR_CST_NELTS (arg1);
1538 out_nelts = in_nelts / 2;
1539 gcc_assert (in_nelts == VECTOR_CST_NELTS (arg2)
1540 && out_nelts == TYPE_VECTOR_SUBPARTS (type));
1541
1542 if (code == VEC_WIDEN_MULT_LO_EXPR)
1543 scale = 0, ofs = BYTES_BIG_ENDIAN ? out_nelts : 0;
1544 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1545 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : out_nelts;
1546 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1547 scale = 1, ofs = 0;
1548 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1549 scale = 1, ofs = 1;
1550
1551 auto_vec<tree, 32> elts (out_nelts);
1552 for (out = 0; out < out_nelts; out++)
1553 {
1554 unsigned int in = (out << scale) + ofs;
1555 tree t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1556 VECTOR_CST_ELT (arg1, in));
1557 tree t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1558 VECTOR_CST_ELT (arg2, in));
1559
1560 if (t1 == NULL_TREE || t2 == NULL_TREE)
1561 return NULL_TREE;
1562 tree elt = const_binop (MULT_EXPR, t1, t2);
1563 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1564 return NULL_TREE;
1565 elts.quick_push (elt);
1566 }
1567
1568 return build_vector (type, elts);
1569 }
1570
1571 default:;
1572 }
1573
1574 if (TREE_CODE_CLASS (code) != tcc_binary)
1575 return NULL_TREE;
1576
1577 /* Make sure type and arg0 have the same saturating flag. */
1578 gcc_checking_assert (TYPE_SATURATING (type)
1579 == TYPE_SATURATING (TREE_TYPE (arg1)));
1580
1581 return const_binop (code, arg1, arg2);
1582 }
1583
1584 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1585 Return zero if computing the constants is not possible. */
1586
1587 tree
1588 const_unop (enum tree_code code, tree type, tree arg0)
1589 {
1590 /* Don't perform the operation, other than NEGATE and ABS, if
1591 flag_signaling_nans is on and the operand is a signaling NaN. */
1592 if (TREE_CODE (arg0) == REAL_CST
1593 && HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
1594 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0))
1595 && code != NEGATE_EXPR
1596 && code != ABS_EXPR)
1597 return NULL_TREE;
1598
1599 switch (code)
1600 {
1601 CASE_CONVERT:
1602 case FLOAT_EXPR:
1603 case FIX_TRUNC_EXPR:
1604 case FIXED_CONVERT_EXPR:
1605 return fold_convert_const (code, type, arg0);
1606
1607 case ADDR_SPACE_CONVERT_EXPR:
1608 /* If the source address is 0, and the source address space
1609 cannot have a valid object at 0, fold to dest type null. */
1610 if (integer_zerop (arg0)
1611 && !(targetm.addr_space.zero_address_valid
1612 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))))))
1613 return fold_convert_const (code, type, arg0);
1614 break;
1615
1616 case VIEW_CONVERT_EXPR:
1617 return fold_view_convert_expr (type, arg0);
1618
1619 case NEGATE_EXPR:
1620 {
1621 /* Can't call fold_negate_const directly here as that doesn't
1622 handle all cases and we might not be able to negate some
1623 constants. */
1624 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1625 if (tem && CONSTANT_CLASS_P (tem))
1626 return tem;
1627 break;
1628 }
1629
1630 case ABS_EXPR:
1631 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1632 return fold_abs_const (arg0, type);
1633 break;
1634
1635 case CONJ_EXPR:
1636 if (TREE_CODE (arg0) == COMPLEX_CST)
1637 {
1638 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1639 TREE_TYPE (type));
1640 return build_complex (type, TREE_REALPART (arg0), ipart);
1641 }
1642 break;
1643
1644 case BIT_NOT_EXPR:
1645 if (TREE_CODE (arg0) == INTEGER_CST)
1646 return fold_not_const (arg0, type);
1647 /* Perform BIT_NOT_EXPR on each element individually. */
1648 else if (TREE_CODE (arg0) == VECTOR_CST)
1649 {
1650 tree elem;
1651 unsigned count = VECTOR_CST_NELTS (arg0), i;
1652
1653 auto_vec<tree, 32> elements (count);
1654 for (i = 0; i < count; i++)
1655 {
1656 elem = VECTOR_CST_ELT (arg0, i);
1657 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1658 if (elem == NULL_TREE)
1659 break;
1660 elements.quick_push (elem);
1661 }
1662 if (i == count)
1663 return build_vector (type, elements);
1664 }
1665 break;
1666
1667 case TRUTH_NOT_EXPR:
1668 if (TREE_CODE (arg0) == INTEGER_CST)
1669 return constant_boolean_node (integer_zerop (arg0), type);
1670 break;
1671
1672 case REALPART_EXPR:
1673 if (TREE_CODE (arg0) == COMPLEX_CST)
1674 return fold_convert (type, TREE_REALPART (arg0));
1675 break;
1676
1677 case IMAGPART_EXPR:
1678 if (TREE_CODE (arg0) == COMPLEX_CST)
1679 return fold_convert (type, TREE_IMAGPART (arg0));
1680 break;
1681
1682 case VEC_UNPACK_LO_EXPR:
1683 case VEC_UNPACK_HI_EXPR:
1684 case VEC_UNPACK_FLOAT_LO_EXPR:
1685 case VEC_UNPACK_FLOAT_HI_EXPR:
1686 {
1687 unsigned int out_nelts, in_nelts, i;
1688 enum tree_code subcode;
1689
1690 if (TREE_CODE (arg0) != VECTOR_CST)
1691 return NULL_TREE;
1692
1693 in_nelts = VECTOR_CST_NELTS (arg0);
1694 out_nelts = in_nelts / 2;
1695 gcc_assert (out_nelts == TYPE_VECTOR_SUBPARTS (type));
1696
1697 unsigned int offset = 0;
1698 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1699 || code == VEC_UNPACK_FLOAT_LO_EXPR))
1700 offset = out_nelts;
1701
1702 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1703 subcode = NOP_EXPR;
1704 else
1705 subcode = FLOAT_EXPR;
1706
1707 auto_vec<tree, 32> elts (out_nelts);
1708 for (i = 0; i < out_nelts; i++)
1709 {
1710 tree elt = fold_convert_const (subcode, TREE_TYPE (type),
1711 VECTOR_CST_ELT (arg0, i + offset));
1712 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1713 return NULL_TREE;
1714 elts.quick_push (elt);
1715 }
1716
1717 return build_vector (type, elts);
1718 }
1719
1720 case REDUC_MIN_EXPR:
1721 case REDUC_MAX_EXPR:
1722 case REDUC_PLUS_EXPR:
1723 {
1724 unsigned int nelts, i;
1725 enum tree_code subcode;
1726
1727 if (TREE_CODE (arg0) != VECTOR_CST)
1728 return NULL_TREE;
1729 nelts = VECTOR_CST_NELTS (arg0);
1730
1731 switch (code)
1732 {
1733 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
1734 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
1735 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
1736 default: gcc_unreachable ();
1737 }
1738
1739 tree res = VECTOR_CST_ELT (arg0, 0);
1740 for (i = 1; i < nelts; i++)
1741 {
1742 res = const_binop (subcode, res, VECTOR_CST_ELT (arg0, i));
1743 if (res == NULL_TREE || !CONSTANT_CLASS_P (res))
1744 return NULL_TREE;
1745 }
1746
1747 return res;
1748 }
1749
1750 default:
1751 break;
1752 }
1753
1754 return NULL_TREE;
1755 }
1756
1757 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1758 indicates which particular sizetype to create. */
1759
1760 tree
1761 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1762 {
1763 return build_int_cst (sizetype_tab[(int) kind], number);
1764 }
1765 \f
1766 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1767 is a tree code. The type of the result is taken from the operands.
1768 Both must be equivalent integer types, ala int_binop_types_match_p.
1769 If the operands are constant, so is the result. */
1770
1771 tree
1772 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1773 {
1774 tree type = TREE_TYPE (arg0);
1775
1776 if (arg0 == error_mark_node || arg1 == error_mark_node)
1777 return error_mark_node;
1778
1779 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1780 TREE_TYPE (arg1)));
1781
1782 /* Handle the special case of two integer constants faster. */
1783 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1784 {
1785 /* And some specific cases even faster than that. */
1786 if (code == PLUS_EXPR)
1787 {
1788 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1789 return arg1;
1790 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1791 return arg0;
1792 }
1793 else if (code == MINUS_EXPR)
1794 {
1795 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1796 return arg0;
1797 }
1798 else if (code == MULT_EXPR)
1799 {
1800 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1801 return arg1;
1802 }
1803
1804 /* Handle general case of two integer constants. For sizetype
1805 constant calculations we always want to know about overflow,
1806 even in the unsigned case. */
1807 return int_const_binop_1 (code, arg0, arg1, -1);
1808 }
1809
1810 return fold_build2_loc (loc, code, type, arg0, arg1);
1811 }
1812
1813 /* Given two values, either both of sizetype or both of bitsizetype,
1814 compute the difference between the two values. Return the value
1815 in signed type corresponding to the type of the operands. */
1816
1817 tree
1818 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1819 {
1820 tree type = TREE_TYPE (arg0);
1821 tree ctype;
1822
1823 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1824 TREE_TYPE (arg1)));
1825
1826 /* If the type is already signed, just do the simple thing. */
1827 if (!TYPE_UNSIGNED (type))
1828 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1829
1830 if (type == sizetype)
1831 ctype = ssizetype;
1832 else if (type == bitsizetype)
1833 ctype = sbitsizetype;
1834 else
1835 ctype = signed_type_for (type);
1836
1837 /* If either operand is not a constant, do the conversions to the signed
1838 type and subtract. The hardware will do the right thing with any
1839 overflow in the subtraction. */
1840 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1841 return size_binop_loc (loc, MINUS_EXPR,
1842 fold_convert_loc (loc, ctype, arg0),
1843 fold_convert_loc (loc, ctype, arg1));
1844
1845 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1846 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1847 overflow) and negate (which can't either). Special-case a result
1848 of zero while we're here. */
1849 if (tree_int_cst_equal (arg0, arg1))
1850 return build_int_cst (ctype, 0);
1851 else if (tree_int_cst_lt (arg1, arg0))
1852 return fold_convert_loc (loc, ctype,
1853 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1854 else
1855 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1856 fold_convert_loc (loc, ctype,
1857 size_binop_loc (loc,
1858 MINUS_EXPR,
1859 arg1, arg0)));
1860 }
1861 \f
1862 /* A subroutine of fold_convert_const handling conversions of an
1863 INTEGER_CST to another integer type. */
1864
1865 static tree
1866 fold_convert_const_int_from_int (tree type, const_tree arg1)
1867 {
1868 /* Given an integer constant, make new constant with new type,
1869 appropriately sign-extended or truncated. Use widest_int
1870 so that any extension is done according ARG1's type. */
1871 return force_fit_type (type, wi::to_widest (arg1),
1872 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1873 TREE_OVERFLOW (arg1));
1874 }
1875
1876 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1877 to an integer type. */
1878
1879 static tree
1880 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1881 {
1882 bool overflow = false;
1883 tree t;
1884
1885 /* The following code implements the floating point to integer
1886 conversion rules required by the Java Language Specification,
1887 that IEEE NaNs are mapped to zero and values that overflow
1888 the target precision saturate, i.e. values greater than
1889 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1890 are mapped to INT_MIN. These semantics are allowed by the
1891 C and C++ standards that simply state that the behavior of
1892 FP-to-integer conversion is unspecified upon overflow. */
1893
1894 wide_int val;
1895 REAL_VALUE_TYPE r;
1896 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1897
1898 switch (code)
1899 {
1900 case FIX_TRUNC_EXPR:
1901 real_trunc (&r, VOIDmode, &x);
1902 break;
1903
1904 default:
1905 gcc_unreachable ();
1906 }
1907
1908 /* If R is NaN, return zero and show we have an overflow. */
1909 if (REAL_VALUE_ISNAN (r))
1910 {
1911 overflow = true;
1912 val = wi::zero (TYPE_PRECISION (type));
1913 }
1914
1915 /* See if R is less than the lower bound or greater than the
1916 upper bound. */
1917
1918 if (! overflow)
1919 {
1920 tree lt = TYPE_MIN_VALUE (type);
1921 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1922 if (real_less (&r, &l))
1923 {
1924 overflow = true;
1925 val = wi::to_wide (lt);
1926 }
1927 }
1928
1929 if (! overflow)
1930 {
1931 tree ut = TYPE_MAX_VALUE (type);
1932 if (ut)
1933 {
1934 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1935 if (real_less (&u, &r))
1936 {
1937 overflow = true;
1938 val = wi::to_wide (ut);
1939 }
1940 }
1941 }
1942
1943 if (! overflow)
1944 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1945
1946 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1947 return t;
1948 }
1949
1950 /* A subroutine of fold_convert_const handling conversions of a
1951 FIXED_CST to an integer type. */
1952
1953 static tree
1954 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1955 {
1956 tree t;
1957 double_int temp, temp_trunc;
1958 scalar_mode mode;
1959
1960 /* Right shift FIXED_CST to temp by fbit. */
1961 temp = TREE_FIXED_CST (arg1).data;
1962 mode = TREE_FIXED_CST (arg1).mode;
1963 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1964 {
1965 temp = temp.rshift (GET_MODE_FBIT (mode),
1966 HOST_BITS_PER_DOUBLE_INT,
1967 SIGNED_FIXED_POINT_MODE_P (mode));
1968
1969 /* Left shift temp to temp_trunc by fbit. */
1970 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1971 HOST_BITS_PER_DOUBLE_INT,
1972 SIGNED_FIXED_POINT_MODE_P (mode));
1973 }
1974 else
1975 {
1976 temp = double_int_zero;
1977 temp_trunc = double_int_zero;
1978 }
1979
1980 /* If FIXED_CST is negative, we need to round the value toward 0.
1981 By checking if the fractional bits are not zero to add 1 to temp. */
1982 if (SIGNED_FIXED_POINT_MODE_P (mode)
1983 && temp_trunc.is_negative ()
1984 && TREE_FIXED_CST (arg1).data != temp_trunc)
1985 temp += double_int_one;
1986
1987 /* Given a fixed-point constant, make new constant with new type,
1988 appropriately sign-extended or truncated. */
1989 t = force_fit_type (type, temp, -1,
1990 (temp.is_negative ()
1991 && (TYPE_UNSIGNED (type)
1992 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1993 | TREE_OVERFLOW (arg1));
1994
1995 return t;
1996 }
1997
1998 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1999 to another floating point type. */
2000
2001 static tree
2002 fold_convert_const_real_from_real (tree type, const_tree arg1)
2003 {
2004 REAL_VALUE_TYPE value;
2005 tree t;
2006
2007 /* Don't perform the operation if flag_signaling_nans is on
2008 and the operand is a signaling NaN. */
2009 if (HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
2010 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1)))
2011 return NULL_TREE;
2012
2013 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2014 t = build_real (type, value);
2015
2016 /* If converting an infinity or NAN to a representation that doesn't
2017 have one, set the overflow bit so that we can produce some kind of
2018 error message at the appropriate point if necessary. It's not the
2019 most user-friendly message, but it's better than nothing. */
2020 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2021 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2022 TREE_OVERFLOW (t) = 1;
2023 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2024 && !MODE_HAS_NANS (TYPE_MODE (type)))
2025 TREE_OVERFLOW (t) = 1;
2026 /* Regular overflow, conversion produced an infinity in a mode that
2027 can't represent them. */
2028 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2029 && REAL_VALUE_ISINF (value)
2030 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2031 TREE_OVERFLOW (t) = 1;
2032 else
2033 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2034 return t;
2035 }
2036
2037 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2038 to a floating point type. */
2039
2040 static tree
2041 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2042 {
2043 REAL_VALUE_TYPE value;
2044 tree t;
2045
2046 real_convert_from_fixed (&value, SCALAR_FLOAT_TYPE_MODE (type),
2047 &TREE_FIXED_CST (arg1));
2048 t = build_real (type, value);
2049
2050 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2051 return t;
2052 }
2053
2054 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2055 to another fixed-point type. */
2056
2057 static tree
2058 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2059 {
2060 FIXED_VALUE_TYPE value;
2061 tree t;
2062 bool overflow_p;
2063
2064 overflow_p = fixed_convert (&value, SCALAR_TYPE_MODE (type),
2065 &TREE_FIXED_CST (arg1), TYPE_SATURATING (type));
2066 t = build_fixed (type, value);
2067
2068 /* Propagate overflow flags. */
2069 if (overflow_p | TREE_OVERFLOW (arg1))
2070 TREE_OVERFLOW (t) = 1;
2071 return t;
2072 }
2073
2074 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2075 to a fixed-point type. */
2076
2077 static tree
2078 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2079 {
2080 FIXED_VALUE_TYPE value;
2081 tree t;
2082 bool overflow_p;
2083 double_int di;
2084
2085 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2086
2087 di.low = TREE_INT_CST_ELT (arg1, 0);
2088 if (TREE_INT_CST_NUNITS (arg1) == 1)
2089 di.high = (HOST_WIDE_INT) di.low < 0 ? HOST_WIDE_INT_M1 : 0;
2090 else
2091 di.high = TREE_INT_CST_ELT (arg1, 1);
2092
2093 overflow_p = fixed_convert_from_int (&value, SCALAR_TYPE_MODE (type), di,
2094 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2095 TYPE_SATURATING (type));
2096 t = build_fixed (type, value);
2097
2098 /* Propagate overflow flags. */
2099 if (overflow_p | TREE_OVERFLOW (arg1))
2100 TREE_OVERFLOW (t) = 1;
2101 return t;
2102 }
2103
2104 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2105 to a fixed-point type. */
2106
2107 static tree
2108 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2109 {
2110 FIXED_VALUE_TYPE value;
2111 tree t;
2112 bool overflow_p;
2113
2114 overflow_p = fixed_convert_from_real (&value, SCALAR_TYPE_MODE (type),
2115 &TREE_REAL_CST (arg1),
2116 TYPE_SATURATING (type));
2117 t = build_fixed (type, value);
2118
2119 /* Propagate overflow flags. */
2120 if (overflow_p | TREE_OVERFLOW (arg1))
2121 TREE_OVERFLOW (t) = 1;
2122 return t;
2123 }
2124
2125 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2126 type TYPE. If no simplification can be done return NULL_TREE. */
2127
2128 static tree
2129 fold_convert_const (enum tree_code code, tree type, tree arg1)
2130 {
2131 if (TREE_TYPE (arg1) == type)
2132 return arg1;
2133
2134 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2135 || TREE_CODE (type) == OFFSET_TYPE)
2136 {
2137 if (TREE_CODE (arg1) == INTEGER_CST)
2138 return fold_convert_const_int_from_int (type, arg1);
2139 else if (TREE_CODE (arg1) == REAL_CST)
2140 return fold_convert_const_int_from_real (code, type, arg1);
2141 else if (TREE_CODE (arg1) == FIXED_CST)
2142 return fold_convert_const_int_from_fixed (type, arg1);
2143 }
2144 else if (TREE_CODE (type) == REAL_TYPE)
2145 {
2146 if (TREE_CODE (arg1) == INTEGER_CST)
2147 return build_real_from_int_cst (type, arg1);
2148 else if (TREE_CODE (arg1) == REAL_CST)
2149 return fold_convert_const_real_from_real (type, arg1);
2150 else if (TREE_CODE (arg1) == FIXED_CST)
2151 return fold_convert_const_real_from_fixed (type, arg1);
2152 }
2153 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2154 {
2155 if (TREE_CODE (arg1) == FIXED_CST)
2156 return fold_convert_const_fixed_from_fixed (type, arg1);
2157 else if (TREE_CODE (arg1) == INTEGER_CST)
2158 return fold_convert_const_fixed_from_int (type, arg1);
2159 else if (TREE_CODE (arg1) == REAL_CST)
2160 return fold_convert_const_fixed_from_real (type, arg1);
2161 }
2162 else if (TREE_CODE (type) == VECTOR_TYPE)
2163 {
2164 if (TREE_CODE (arg1) == VECTOR_CST
2165 && TYPE_VECTOR_SUBPARTS (type) == VECTOR_CST_NELTS (arg1))
2166 {
2167 int len = VECTOR_CST_NELTS (arg1);
2168 tree elttype = TREE_TYPE (type);
2169 auto_vec<tree, 32> v (len);
2170 for (int i = 0; i < len; ++i)
2171 {
2172 tree elt = VECTOR_CST_ELT (arg1, i);
2173 tree cvt = fold_convert_const (code, elttype, elt);
2174 if (cvt == NULL_TREE)
2175 return NULL_TREE;
2176 v.quick_push (cvt);
2177 }
2178 return build_vector (type, v);
2179 }
2180 }
2181 return NULL_TREE;
2182 }
2183
2184 /* Construct a vector of zero elements of vector type TYPE. */
2185
2186 static tree
2187 build_zero_vector (tree type)
2188 {
2189 tree t;
2190
2191 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2192 return build_vector_from_val (type, t);
2193 }
2194
2195 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2196
2197 bool
2198 fold_convertible_p (const_tree type, const_tree arg)
2199 {
2200 tree orig = TREE_TYPE (arg);
2201
2202 if (type == orig)
2203 return true;
2204
2205 if (TREE_CODE (arg) == ERROR_MARK
2206 || TREE_CODE (type) == ERROR_MARK
2207 || TREE_CODE (orig) == ERROR_MARK)
2208 return false;
2209
2210 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2211 return true;
2212
2213 switch (TREE_CODE (type))
2214 {
2215 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2216 case POINTER_TYPE: case REFERENCE_TYPE:
2217 case OFFSET_TYPE:
2218 return (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2219 || TREE_CODE (orig) == OFFSET_TYPE);
2220
2221 case REAL_TYPE:
2222 case FIXED_POINT_TYPE:
2223 case VECTOR_TYPE:
2224 case VOID_TYPE:
2225 return TREE_CODE (type) == TREE_CODE (orig);
2226
2227 default:
2228 return false;
2229 }
2230 }
2231
2232 /* Convert expression ARG to type TYPE. Used by the middle-end for
2233 simple conversions in preference to calling the front-end's convert. */
2234
2235 tree
2236 fold_convert_loc (location_t loc, tree type, tree arg)
2237 {
2238 tree orig = TREE_TYPE (arg);
2239 tree tem;
2240
2241 if (type == orig)
2242 return arg;
2243
2244 if (TREE_CODE (arg) == ERROR_MARK
2245 || TREE_CODE (type) == ERROR_MARK
2246 || TREE_CODE (orig) == ERROR_MARK)
2247 return error_mark_node;
2248
2249 switch (TREE_CODE (type))
2250 {
2251 case POINTER_TYPE:
2252 case REFERENCE_TYPE:
2253 /* Handle conversions between pointers to different address spaces. */
2254 if (POINTER_TYPE_P (orig)
2255 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2256 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2257 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2258 /* fall through */
2259
2260 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2261 case OFFSET_TYPE:
2262 if (TREE_CODE (arg) == INTEGER_CST)
2263 {
2264 tem = fold_convert_const (NOP_EXPR, type, arg);
2265 if (tem != NULL_TREE)
2266 return tem;
2267 }
2268 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2269 || TREE_CODE (orig) == OFFSET_TYPE)
2270 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2271 if (TREE_CODE (orig) == COMPLEX_TYPE)
2272 return fold_convert_loc (loc, type,
2273 fold_build1_loc (loc, REALPART_EXPR,
2274 TREE_TYPE (orig), arg));
2275 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2276 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2277 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2278
2279 case REAL_TYPE:
2280 if (TREE_CODE (arg) == INTEGER_CST)
2281 {
2282 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2283 if (tem != NULL_TREE)
2284 return tem;
2285 }
2286 else if (TREE_CODE (arg) == REAL_CST)
2287 {
2288 tem = fold_convert_const (NOP_EXPR, type, arg);
2289 if (tem != NULL_TREE)
2290 return tem;
2291 }
2292 else if (TREE_CODE (arg) == FIXED_CST)
2293 {
2294 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2295 if (tem != NULL_TREE)
2296 return tem;
2297 }
2298
2299 switch (TREE_CODE (orig))
2300 {
2301 case INTEGER_TYPE:
2302 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2303 case POINTER_TYPE: case REFERENCE_TYPE:
2304 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2305
2306 case REAL_TYPE:
2307 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2308
2309 case FIXED_POINT_TYPE:
2310 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2311
2312 case COMPLEX_TYPE:
2313 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2314 return fold_convert_loc (loc, type, tem);
2315
2316 default:
2317 gcc_unreachable ();
2318 }
2319
2320 case FIXED_POINT_TYPE:
2321 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2322 || TREE_CODE (arg) == REAL_CST)
2323 {
2324 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2325 if (tem != NULL_TREE)
2326 goto fold_convert_exit;
2327 }
2328
2329 switch (TREE_CODE (orig))
2330 {
2331 case FIXED_POINT_TYPE:
2332 case INTEGER_TYPE:
2333 case ENUMERAL_TYPE:
2334 case BOOLEAN_TYPE:
2335 case REAL_TYPE:
2336 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2337
2338 case COMPLEX_TYPE:
2339 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2340 return fold_convert_loc (loc, type, tem);
2341
2342 default:
2343 gcc_unreachable ();
2344 }
2345
2346 case COMPLEX_TYPE:
2347 switch (TREE_CODE (orig))
2348 {
2349 case INTEGER_TYPE:
2350 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2351 case POINTER_TYPE: case REFERENCE_TYPE:
2352 case REAL_TYPE:
2353 case FIXED_POINT_TYPE:
2354 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2355 fold_convert_loc (loc, TREE_TYPE (type), arg),
2356 fold_convert_loc (loc, TREE_TYPE (type),
2357 integer_zero_node));
2358 case COMPLEX_TYPE:
2359 {
2360 tree rpart, ipart;
2361
2362 if (TREE_CODE (arg) == COMPLEX_EXPR)
2363 {
2364 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2365 TREE_OPERAND (arg, 0));
2366 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2367 TREE_OPERAND (arg, 1));
2368 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2369 }
2370
2371 arg = save_expr (arg);
2372 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2373 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2374 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2375 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2376 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2377 }
2378
2379 default:
2380 gcc_unreachable ();
2381 }
2382
2383 case VECTOR_TYPE:
2384 if (integer_zerop (arg))
2385 return build_zero_vector (type);
2386 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2387 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2388 || TREE_CODE (orig) == VECTOR_TYPE);
2389 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2390
2391 case VOID_TYPE:
2392 tem = fold_ignored_result (arg);
2393 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2394
2395 default:
2396 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2397 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2398 gcc_unreachable ();
2399 }
2400 fold_convert_exit:
2401 protected_set_expr_location_unshare (tem, loc);
2402 return tem;
2403 }
2404 \f
2405 /* Return false if expr can be assumed not to be an lvalue, true
2406 otherwise. */
2407
2408 static bool
2409 maybe_lvalue_p (const_tree x)
2410 {
2411 /* We only need to wrap lvalue tree codes. */
2412 switch (TREE_CODE (x))
2413 {
2414 case VAR_DECL:
2415 case PARM_DECL:
2416 case RESULT_DECL:
2417 case LABEL_DECL:
2418 case FUNCTION_DECL:
2419 case SSA_NAME:
2420
2421 case COMPONENT_REF:
2422 case MEM_REF:
2423 case INDIRECT_REF:
2424 case ARRAY_REF:
2425 case ARRAY_RANGE_REF:
2426 case BIT_FIELD_REF:
2427 case OBJ_TYPE_REF:
2428
2429 case REALPART_EXPR:
2430 case IMAGPART_EXPR:
2431 case PREINCREMENT_EXPR:
2432 case PREDECREMENT_EXPR:
2433 case SAVE_EXPR:
2434 case TRY_CATCH_EXPR:
2435 case WITH_CLEANUP_EXPR:
2436 case COMPOUND_EXPR:
2437 case MODIFY_EXPR:
2438 case TARGET_EXPR:
2439 case COND_EXPR:
2440 case BIND_EXPR:
2441 break;
2442
2443 default:
2444 /* Assume the worst for front-end tree codes. */
2445 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2446 break;
2447 return false;
2448 }
2449
2450 return true;
2451 }
2452
2453 /* Return an expr equal to X but certainly not valid as an lvalue. */
2454
2455 tree
2456 non_lvalue_loc (location_t loc, tree x)
2457 {
2458 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2459 us. */
2460 if (in_gimple_form)
2461 return x;
2462
2463 if (! maybe_lvalue_p (x))
2464 return x;
2465 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2466 }
2467
2468 /* When pedantic, return an expr equal to X but certainly not valid as a
2469 pedantic lvalue. Otherwise, return X. */
2470
2471 static tree
2472 pedantic_non_lvalue_loc (location_t loc, tree x)
2473 {
2474 return protected_set_expr_location_unshare (x, loc);
2475 }
2476 \f
2477 /* Given a tree comparison code, return the code that is the logical inverse.
2478 It is generally not safe to do this for floating-point comparisons, except
2479 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2480 ERROR_MARK in this case. */
2481
2482 enum tree_code
2483 invert_tree_comparison (enum tree_code code, bool honor_nans)
2484 {
2485 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2486 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2487 return ERROR_MARK;
2488
2489 switch (code)
2490 {
2491 case EQ_EXPR:
2492 return NE_EXPR;
2493 case NE_EXPR:
2494 return EQ_EXPR;
2495 case GT_EXPR:
2496 return honor_nans ? UNLE_EXPR : LE_EXPR;
2497 case GE_EXPR:
2498 return honor_nans ? UNLT_EXPR : LT_EXPR;
2499 case LT_EXPR:
2500 return honor_nans ? UNGE_EXPR : GE_EXPR;
2501 case LE_EXPR:
2502 return honor_nans ? UNGT_EXPR : GT_EXPR;
2503 case LTGT_EXPR:
2504 return UNEQ_EXPR;
2505 case UNEQ_EXPR:
2506 return LTGT_EXPR;
2507 case UNGT_EXPR:
2508 return LE_EXPR;
2509 case UNGE_EXPR:
2510 return LT_EXPR;
2511 case UNLT_EXPR:
2512 return GE_EXPR;
2513 case UNLE_EXPR:
2514 return GT_EXPR;
2515 case ORDERED_EXPR:
2516 return UNORDERED_EXPR;
2517 case UNORDERED_EXPR:
2518 return ORDERED_EXPR;
2519 default:
2520 gcc_unreachable ();
2521 }
2522 }
2523
2524 /* Similar, but return the comparison that results if the operands are
2525 swapped. This is safe for floating-point. */
2526
2527 enum tree_code
2528 swap_tree_comparison (enum tree_code code)
2529 {
2530 switch (code)
2531 {
2532 case EQ_EXPR:
2533 case NE_EXPR:
2534 case ORDERED_EXPR:
2535 case UNORDERED_EXPR:
2536 case LTGT_EXPR:
2537 case UNEQ_EXPR:
2538 return code;
2539 case GT_EXPR:
2540 return LT_EXPR;
2541 case GE_EXPR:
2542 return LE_EXPR;
2543 case LT_EXPR:
2544 return GT_EXPR;
2545 case LE_EXPR:
2546 return GE_EXPR;
2547 case UNGT_EXPR:
2548 return UNLT_EXPR;
2549 case UNGE_EXPR:
2550 return UNLE_EXPR;
2551 case UNLT_EXPR:
2552 return UNGT_EXPR;
2553 case UNLE_EXPR:
2554 return UNGE_EXPR;
2555 default:
2556 gcc_unreachable ();
2557 }
2558 }
2559
2560
2561 /* Convert a comparison tree code from an enum tree_code representation
2562 into a compcode bit-based encoding. This function is the inverse of
2563 compcode_to_comparison. */
2564
2565 static enum comparison_code
2566 comparison_to_compcode (enum tree_code code)
2567 {
2568 switch (code)
2569 {
2570 case LT_EXPR:
2571 return COMPCODE_LT;
2572 case EQ_EXPR:
2573 return COMPCODE_EQ;
2574 case LE_EXPR:
2575 return COMPCODE_LE;
2576 case GT_EXPR:
2577 return COMPCODE_GT;
2578 case NE_EXPR:
2579 return COMPCODE_NE;
2580 case GE_EXPR:
2581 return COMPCODE_GE;
2582 case ORDERED_EXPR:
2583 return COMPCODE_ORD;
2584 case UNORDERED_EXPR:
2585 return COMPCODE_UNORD;
2586 case UNLT_EXPR:
2587 return COMPCODE_UNLT;
2588 case UNEQ_EXPR:
2589 return COMPCODE_UNEQ;
2590 case UNLE_EXPR:
2591 return COMPCODE_UNLE;
2592 case UNGT_EXPR:
2593 return COMPCODE_UNGT;
2594 case LTGT_EXPR:
2595 return COMPCODE_LTGT;
2596 case UNGE_EXPR:
2597 return COMPCODE_UNGE;
2598 default:
2599 gcc_unreachable ();
2600 }
2601 }
2602
2603 /* Convert a compcode bit-based encoding of a comparison operator back
2604 to GCC's enum tree_code representation. This function is the
2605 inverse of comparison_to_compcode. */
2606
2607 static enum tree_code
2608 compcode_to_comparison (enum comparison_code code)
2609 {
2610 switch (code)
2611 {
2612 case COMPCODE_LT:
2613 return LT_EXPR;
2614 case COMPCODE_EQ:
2615 return EQ_EXPR;
2616 case COMPCODE_LE:
2617 return LE_EXPR;
2618 case COMPCODE_GT:
2619 return GT_EXPR;
2620 case COMPCODE_NE:
2621 return NE_EXPR;
2622 case COMPCODE_GE:
2623 return GE_EXPR;
2624 case COMPCODE_ORD:
2625 return ORDERED_EXPR;
2626 case COMPCODE_UNORD:
2627 return UNORDERED_EXPR;
2628 case COMPCODE_UNLT:
2629 return UNLT_EXPR;
2630 case COMPCODE_UNEQ:
2631 return UNEQ_EXPR;
2632 case COMPCODE_UNLE:
2633 return UNLE_EXPR;
2634 case COMPCODE_UNGT:
2635 return UNGT_EXPR;
2636 case COMPCODE_LTGT:
2637 return LTGT_EXPR;
2638 case COMPCODE_UNGE:
2639 return UNGE_EXPR;
2640 default:
2641 gcc_unreachable ();
2642 }
2643 }
2644
2645 /* Return a tree for the comparison which is the combination of
2646 doing the AND or OR (depending on CODE) of the two operations LCODE
2647 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2648 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2649 if this makes the transformation invalid. */
2650
2651 tree
2652 combine_comparisons (location_t loc,
2653 enum tree_code code, enum tree_code lcode,
2654 enum tree_code rcode, tree truth_type,
2655 tree ll_arg, tree lr_arg)
2656 {
2657 bool honor_nans = HONOR_NANS (ll_arg);
2658 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2659 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2660 int compcode;
2661
2662 switch (code)
2663 {
2664 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2665 compcode = lcompcode & rcompcode;
2666 break;
2667
2668 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2669 compcode = lcompcode | rcompcode;
2670 break;
2671
2672 default:
2673 return NULL_TREE;
2674 }
2675
2676 if (!honor_nans)
2677 {
2678 /* Eliminate unordered comparisons, as well as LTGT and ORD
2679 which are not used unless the mode has NaNs. */
2680 compcode &= ~COMPCODE_UNORD;
2681 if (compcode == COMPCODE_LTGT)
2682 compcode = COMPCODE_NE;
2683 else if (compcode == COMPCODE_ORD)
2684 compcode = COMPCODE_TRUE;
2685 }
2686 else if (flag_trapping_math)
2687 {
2688 /* Check that the original operation and the optimized ones will trap
2689 under the same condition. */
2690 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2691 && (lcompcode != COMPCODE_EQ)
2692 && (lcompcode != COMPCODE_ORD);
2693 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2694 && (rcompcode != COMPCODE_EQ)
2695 && (rcompcode != COMPCODE_ORD);
2696 bool trap = (compcode & COMPCODE_UNORD) == 0
2697 && (compcode != COMPCODE_EQ)
2698 && (compcode != COMPCODE_ORD);
2699
2700 /* In a short-circuited boolean expression the LHS might be
2701 such that the RHS, if evaluated, will never trap. For
2702 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2703 if neither x nor y is NaN. (This is a mixed blessing: for
2704 example, the expression above will never trap, hence
2705 optimizing it to x < y would be invalid). */
2706 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2707 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2708 rtrap = false;
2709
2710 /* If the comparison was short-circuited, and only the RHS
2711 trapped, we may now generate a spurious trap. */
2712 if (rtrap && !ltrap
2713 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2714 return NULL_TREE;
2715
2716 /* If we changed the conditions that cause a trap, we lose. */
2717 if ((ltrap || rtrap) != trap)
2718 return NULL_TREE;
2719 }
2720
2721 if (compcode == COMPCODE_TRUE)
2722 return constant_boolean_node (true, truth_type);
2723 else if (compcode == COMPCODE_FALSE)
2724 return constant_boolean_node (false, truth_type);
2725 else
2726 {
2727 enum tree_code tcode;
2728
2729 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2730 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2731 }
2732 }
2733 \f
2734 /* Return nonzero if two operands (typically of the same tree node)
2735 are necessarily equal. FLAGS modifies behavior as follows:
2736
2737 If OEP_ONLY_CONST is set, only return nonzero for constants.
2738 This function tests whether the operands are indistinguishable;
2739 it does not test whether they are equal using C's == operation.
2740 The distinction is important for IEEE floating point, because
2741 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2742 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2743
2744 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2745 even though it may hold multiple values during a function.
2746 This is because a GCC tree node guarantees that nothing else is
2747 executed between the evaluation of its "operands" (which may often
2748 be evaluated in arbitrary order). Hence if the operands themselves
2749 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2750 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2751 unset means assuming isochronic (or instantaneous) tree equivalence.
2752 Unless comparing arbitrary expression trees, such as from different
2753 statements, this flag can usually be left unset.
2754
2755 If OEP_PURE_SAME is set, then pure functions with identical arguments
2756 are considered the same. It is used when the caller has other ways
2757 to ensure that global memory is unchanged in between.
2758
2759 If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
2760 not values of expressions.
2761
2762 If OEP_LEXICOGRAPHIC is set, then also handle expressions with side-effects
2763 such as MODIFY_EXPR, RETURN_EXPR, as well as STATEMENT_LISTs.
2764
2765 Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
2766 any operand with side effect. This is unnecesarily conservative in the
2767 case we know that arg0 and arg1 are in disjoint code paths (such as in
2768 ?: operator). In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
2769 addresses with TREE_CONSTANT flag set so we know that &var == &var
2770 even if var is volatile. */
2771
2772 int
2773 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2774 {
2775 /* When checking, verify at the outermost operand_equal_p call that
2776 if operand_equal_p returns non-zero then ARG0 and ARG1 has the same
2777 hash value. */
2778 if (flag_checking && !(flags & OEP_NO_HASH_CHECK))
2779 {
2780 if (operand_equal_p (arg0, arg1, flags | OEP_NO_HASH_CHECK))
2781 {
2782 if (arg0 != arg1)
2783 {
2784 inchash::hash hstate0 (0), hstate1 (0);
2785 inchash::add_expr (arg0, hstate0, flags | OEP_HASH_CHECK);
2786 inchash::add_expr (arg1, hstate1, flags | OEP_HASH_CHECK);
2787 hashval_t h0 = hstate0.end ();
2788 hashval_t h1 = hstate1.end ();
2789 gcc_assert (h0 == h1);
2790 }
2791 return 1;
2792 }
2793 else
2794 return 0;
2795 }
2796
2797 /* If either is ERROR_MARK, they aren't equal. */
2798 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2799 || TREE_TYPE (arg0) == error_mark_node
2800 || TREE_TYPE (arg1) == error_mark_node)
2801 return 0;
2802
2803 /* Similar, if either does not have a type (like a released SSA name),
2804 they aren't equal. */
2805 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2806 return 0;
2807
2808 /* We cannot consider pointers to different address space equal. */
2809 if (POINTER_TYPE_P (TREE_TYPE (arg0))
2810 && POINTER_TYPE_P (TREE_TYPE (arg1))
2811 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2812 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2813 return 0;
2814
2815 /* Check equality of integer constants before bailing out due to
2816 precision differences. */
2817 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2818 {
2819 /* Address of INTEGER_CST is not defined; check that we did not forget
2820 to drop the OEP_ADDRESS_OF flags. */
2821 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2822 return tree_int_cst_equal (arg0, arg1);
2823 }
2824
2825 if (!(flags & OEP_ADDRESS_OF))
2826 {
2827 /* If both types don't have the same signedness, then we can't consider
2828 them equal. We must check this before the STRIP_NOPS calls
2829 because they may change the signedness of the arguments. As pointers
2830 strictly don't have a signedness, require either two pointers or
2831 two non-pointers as well. */
2832 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2833 || POINTER_TYPE_P (TREE_TYPE (arg0))
2834 != POINTER_TYPE_P (TREE_TYPE (arg1)))
2835 return 0;
2836
2837 /* If both types don't have the same precision, then it is not safe
2838 to strip NOPs. */
2839 if (element_precision (TREE_TYPE (arg0))
2840 != element_precision (TREE_TYPE (arg1)))
2841 return 0;
2842
2843 STRIP_NOPS (arg0);
2844 STRIP_NOPS (arg1);
2845 }
2846 #if 0
2847 /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
2848 sanity check once the issue is solved. */
2849 else
2850 /* Addresses of conversions and SSA_NAMEs (and many other things)
2851 are not defined. Check that we did not forget to drop the
2852 OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
2853 gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
2854 && TREE_CODE (arg0) != SSA_NAME);
2855 #endif
2856
2857 /* In case both args are comparisons but with different comparison
2858 code, try to swap the comparison operands of one arg to produce
2859 a match and compare that variant. */
2860 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2861 && COMPARISON_CLASS_P (arg0)
2862 && COMPARISON_CLASS_P (arg1))
2863 {
2864 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2865
2866 if (TREE_CODE (arg0) == swap_code)
2867 return operand_equal_p (TREE_OPERAND (arg0, 0),
2868 TREE_OPERAND (arg1, 1), flags)
2869 && operand_equal_p (TREE_OPERAND (arg0, 1),
2870 TREE_OPERAND (arg1, 0), flags);
2871 }
2872
2873 if (TREE_CODE (arg0) != TREE_CODE (arg1))
2874 {
2875 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2876 if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
2877 ;
2878 else if (flags & OEP_ADDRESS_OF)
2879 {
2880 /* If we are interested in comparing addresses ignore
2881 MEM_REF wrappings of the base that can appear just for
2882 TBAA reasons. */
2883 if (TREE_CODE (arg0) == MEM_REF
2884 && DECL_P (arg1)
2885 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
2886 && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
2887 && integer_zerop (TREE_OPERAND (arg0, 1)))
2888 return 1;
2889 else if (TREE_CODE (arg1) == MEM_REF
2890 && DECL_P (arg0)
2891 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
2892 && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
2893 && integer_zerop (TREE_OPERAND (arg1, 1)))
2894 return 1;
2895 return 0;
2896 }
2897 else
2898 return 0;
2899 }
2900
2901 /* When not checking adddresses, this is needed for conversions and for
2902 COMPONENT_REF. Might as well play it safe and always test this. */
2903 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2904 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2905 || (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))
2906 && !(flags & OEP_ADDRESS_OF)))
2907 return 0;
2908
2909 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2910 We don't care about side effects in that case because the SAVE_EXPR
2911 takes care of that for us. In all other cases, two expressions are
2912 equal if they have no side effects. If we have two identical
2913 expressions with side effects that should be treated the same due
2914 to the only side effects being identical SAVE_EXPR's, that will
2915 be detected in the recursive calls below.
2916 If we are taking an invariant address of two identical objects
2917 they are necessarily equal as well. */
2918 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2919 && (TREE_CODE (arg0) == SAVE_EXPR
2920 || (flags & OEP_MATCH_SIDE_EFFECTS)
2921 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2922 return 1;
2923
2924 /* Next handle constant cases, those for which we can return 1 even
2925 if ONLY_CONST is set. */
2926 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2927 switch (TREE_CODE (arg0))
2928 {
2929 case INTEGER_CST:
2930 return tree_int_cst_equal (arg0, arg1);
2931
2932 case FIXED_CST:
2933 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2934 TREE_FIXED_CST (arg1));
2935
2936 case REAL_CST:
2937 if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
2938 return 1;
2939
2940
2941 if (!HONOR_SIGNED_ZEROS (arg0))
2942 {
2943 /* If we do not distinguish between signed and unsigned zero,
2944 consider them equal. */
2945 if (real_zerop (arg0) && real_zerop (arg1))
2946 return 1;
2947 }
2948 return 0;
2949
2950 case VECTOR_CST:
2951 {
2952 unsigned i;
2953
2954 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2955 return 0;
2956
2957 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2958 {
2959 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2960 VECTOR_CST_ELT (arg1, i), flags))
2961 return 0;
2962 }
2963 return 1;
2964 }
2965
2966 case COMPLEX_CST:
2967 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2968 flags)
2969 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2970 flags));
2971
2972 case STRING_CST:
2973 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2974 && ! memcmp (TREE_STRING_POINTER (arg0),
2975 TREE_STRING_POINTER (arg1),
2976 TREE_STRING_LENGTH (arg0)));
2977
2978 case ADDR_EXPR:
2979 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2980 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2981 flags | OEP_ADDRESS_OF
2982 | OEP_MATCH_SIDE_EFFECTS);
2983 case CONSTRUCTOR:
2984 /* In GIMPLE empty constructors are allowed in initializers of
2985 aggregates. */
2986 return !CONSTRUCTOR_NELTS (arg0) && !CONSTRUCTOR_NELTS (arg1);
2987 default:
2988 break;
2989 }
2990
2991 if (flags & OEP_ONLY_CONST)
2992 return 0;
2993
2994 /* Define macros to test an operand from arg0 and arg1 for equality and a
2995 variant that allows null and views null as being different from any
2996 non-null value. In the latter case, if either is null, the both
2997 must be; otherwise, do the normal comparison. */
2998 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2999 TREE_OPERAND (arg1, N), flags)
3000
3001 #define OP_SAME_WITH_NULL(N) \
3002 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3003 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3004
3005 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3006 {
3007 case tcc_unary:
3008 /* Two conversions are equal only if signedness and modes match. */
3009 switch (TREE_CODE (arg0))
3010 {
3011 CASE_CONVERT:
3012 case FIX_TRUNC_EXPR:
3013 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3014 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3015 return 0;
3016 break;
3017 default:
3018 break;
3019 }
3020
3021 return OP_SAME (0);
3022
3023
3024 case tcc_comparison:
3025 case tcc_binary:
3026 if (OP_SAME (0) && OP_SAME (1))
3027 return 1;
3028
3029 /* For commutative ops, allow the other order. */
3030 return (commutative_tree_code (TREE_CODE (arg0))
3031 && operand_equal_p (TREE_OPERAND (arg0, 0),
3032 TREE_OPERAND (arg1, 1), flags)
3033 && operand_equal_p (TREE_OPERAND (arg0, 1),
3034 TREE_OPERAND (arg1, 0), flags));
3035
3036 case tcc_reference:
3037 /* If either of the pointer (or reference) expressions we are
3038 dereferencing contain a side effect, these cannot be equal,
3039 but their addresses can be. */
3040 if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0
3041 && (TREE_SIDE_EFFECTS (arg0)
3042 || TREE_SIDE_EFFECTS (arg1)))
3043 return 0;
3044
3045 switch (TREE_CODE (arg0))
3046 {
3047 case INDIRECT_REF:
3048 if (!(flags & OEP_ADDRESS_OF)
3049 && (TYPE_ALIGN (TREE_TYPE (arg0))
3050 != TYPE_ALIGN (TREE_TYPE (arg1))))
3051 return 0;
3052 flags &= ~OEP_ADDRESS_OF;
3053 return OP_SAME (0);
3054
3055 case IMAGPART_EXPR:
3056 /* Require the same offset. */
3057 if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3058 TYPE_SIZE (TREE_TYPE (arg1)),
3059 flags & ~OEP_ADDRESS_OF))
3060 return 0;
3061
3062 /* Fallthru. */
3063 case REALPART_EXPR:
3064 case VIEW_CONVERT_EXPR:
3065 return OP_SAME (0);
3066
3067 case TARGET_MEM_REF:
3068 case MEM_REF:
3069 if (!(flags & OEP_ADDRESS_OF))
3070 {
3071 /* Require equal access sizes */
3072 if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1))
3073 && (!TYPE_SIZE (TREE_TYPE (arg0))
3074 || !TYPE_SIZE (TREE_TYPE (arg1))
3075 || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3076 TYPE_SIZE (TREE_TYPE (arg1)),
3077 flags)))
3078 return 0;
3079 /* Verify that access happens in similar types. */
3080 if (!types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3081 return 0;
3082 /* Verify that accesses are TBAA compatible. */
3083 if (!alias_ptr_types_compatible_p
3084 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
3085 TREE_TYPE (TREE_OPERAND (arg1, 1)))
3086 || (MR_DEPENDENCE_CLIQUE (arg0)
3087 != MR_DEPENDENCE_CLIQUE (arg1))
3088 || (MR_DEPENDENCE_BASE (arg0)
3089 != MR_DEPENDENCE_BASE (arg1)))
3090 return 0;
3091 /* Verify that alignment is compatible. */
3092 if (TYPE_ALIGN (TREE_TYPE (arg0))
3093 != TYPE_ALIGN (TREE_TYPE (arg1)))
3094 return 0;
3095 }
3096 flags &= ~OEP_ADDRESS_OF;
3097 return (OP_SAME (0) && OP_SAME (1)
3098 /* TARGET_MEM_REF require equal extra operands. */
3099 && (TREE_CODE (arg0) != TARGET_MEM_REF
3100 || (OP_SAME_WITH_NULL (2)
3101 && OP_SAME_WITH_NULL (3)
3102 && OP_SAME_WITH_NULL (4))));
3103
3104 case ARRAY_REF:
3105 case ARRAY_RANGE_REF:
3106 if (!OP_SAME (0))
3107 return 0;
3108 flags &= ~OEP_ADDRESS_OF;
3109 /* Compare the array index by value if it is constant first as we
3110 may have different types but same value here. */
3111 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3112 TREE_OPERAND (arg1, 1))
3113 || OP_SAME (1))
3114 && OP_SAME_WITH_NULL (2)
3115 && OP_SAME_WITH_NULL (3)
3116 /* Compare low bound and element size as with OEP_ADDRESS_OF
3117 we have to account for the offset of the ref. */
3118 && (TREE_TYPE (TREE_OPERAND (arg0, 0))
3119 == TREE_TYPE (TREE_OPERAND (arg1, 0))
3120 || (operand_equal_p (array_ref_low_bound
3121 (CONST_CAST_TREE (arg0)),
3122 array_ref_low_bound
3123 (CONST_CAST_TREE (arg1)), flags)
3124 && operand_equal_p (array_ref_element_size
3125 (CONST_CAST_TREE (arg0)),
3126 array_ref_element_size
3127 (CONST_CAST_TREE (arg1)),
3128 flags))));
3129
3130 case COMPONENT_REF:
3131 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3132 may be NULL when we're called to compare MEM_EXPRs. */
3133 if (!OP_SAME_WITH_NULL (0)
3134 || !OP_SAME (1))
3135 return 0;
3136 flags &= ~OEP_ADDRESS_OF;
3137 return OP_SAME_WITH_NULL (2);
3138
3139 case BIT_FIELD_REF:
3140 if (!OP_SAME (0))
3141 return 0;
3142 flags &= ~OEP_ADDRESS_OF;
3143 return OP_SAME (1) && OP_SAME (2);
3144
3145 default:
3146 return 0;
3147 }
3148
3149 case tcc_expression:
3150 switch (TREE_CODE (arg0))
3151 {
3152 case ADDR_EXPR:
3153 /* Be sure we pass right ADDRESS_OF flag. */
3154 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3155 return operand_equal_p (TREE_OPERAND (arg0, 0),
3156 TREE_OPERAND (arg1, 0),
3157 flags | OEP_ADDRESS_OF);
3158
3159 case TRUTH_NOT_EXPR:
3160 return OP_SAME (0);
3161
3162 case TRUTH_ANDIF_EXPR:
3163 case TRUTH_ORIF_EXPR:
3164 return OP_SAME (0) && OP_SAME (1);
3165
3166 case FMA_EXPR:
3167 case WIDEN_MULT_PLUS_EXPR:
3168 case WIDEN_MULT_MINUS_EXPR:
3169 if (!OP_SAME (2))
3170 return 0;
3171 /* The multiplcation operands are commutative. */
3172 /* FALLTHRU */
3173
3174 case TRUTH_AND_EXPR:
3175 case TRUTH_OR_EXPR:
3176 case TRUTH_XOR_EXPR:
3177 if (OP_SAME (0) && OP_SAME (1))
3178 return 1;
3179
3180 /* Otherwise take into account this is a commutative operation. */
3181 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3182 TREE_OPERAND (arg1, 1), flags)
3183 && operand_equal_p (TREE_OPERAND (arg0, 1),
3184 TREE_OPERAND (arg1, 0), flags));
3185
3186 case COND_EXPR:
3187 if (! OP_SAME (1) || ! OP_SAME_WITH_NULL (2))
3188 return 0;
3189 flags &= ~OEP_ADDRESS_OF;
3190 return OP_SAME (0);
3191
3192 case BIT_INSERT_EXPR:
3193 /* BIT_INSERT_EXPR has an implict operand as the type precision
3194 of op1. Need to check to make sure they are the same. */
3195 if (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
3196 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
3197 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 1)))
3198 != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 1))))
3199 return false;
3200 /* FALLTHRU */
3201
3202 case VEC_COND_EXPR:
3203 case DOT_PROD_EXPR:
3204 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3205
3206 case MODIFY_EXPR:
3207 case INIT_EXPR:
3208 case COMPOUND_EXPR:
3209 case PREDECREMENT_EXPR:
3210 case PREINCREMENT_EXPR:
3211 case POSTDECREMENT_EXPR:
3212 case POSTINCREMENT_EXPR:
3213 if (flags & OEP_LEXICOGRAPHIC)
3214 return OP_SAME (0) && OP_SAME (1);
3215 return 0;
3216
3217 case CLEANUP_POINT_EXPR:
3218 case EXPR_STMT:
3219 if (flags & OEP_LEXICOGRAPHIC)
3220 return OP_SAME (0);
3221 return 0;
3222
3223 default:
3224 return 0;
3225 }
3226
3227 case tcc_vl_exp:
3228 switch (TREE_CODE (arg0))
3229 {
3230 case CALL_EXPR:
3231 if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3232 != (CALL_EXPR_FN (arg1) == NULL_TREE))
3233 /* If not both CALL_EXPRs are either internal or normal function
3234 functions, then they are not equal. */
3235 return 0;
3236 else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3237 {
3238 /* If the CALL_EXPRs call different internal functions, then they
3239 are not equal. */
3240 if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3241 return 0;
3242 }
3243 else
3244 {
3245 /* If the CALL_EXPRs call different functions, then they are not
3246 equal. */
3247 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3248 flags))
3249 return 0;
3250 }
3251
3252 /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */
3253 {
3254 unsigned int cef = call_expr_flags (arg0);
3255 if (flags & OEP_PURE_SAME)
3256 cef &= ECF_CONST | ECF_PURE;
3257 else
3258 cef &= ECF_CONST;
3259 if (!cef && !(flags & OEP_LEXICOGRAPHIC))
3260 return 0;
3261 }
3262
3263 /* Now see if all the arguments are the same. */
3264 {
3265 const_call_expr_arg_iterator iter0, iter1;
3266 const_tree a0, a1;
3267 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3268 a1 = first_const_call_expr_arg (arg1, &iter1);
3269 a0 && a1;
3270 a0 = next_const_call_expr_arg (&iter0),
3271 a1 = next_const_call_expr_arg (&iter1))
3272 if (! operand_equal_p (a0, a1, flags))
3273 return 0;
3274
3275 /* If we get here and both argument lists are exhausted
3276 then the CALL_EXPRs are equal. */
3277 return ! (a0 || a1);
3278 }
3279 default:
3280 return 0;
3281 }
3282
3283 case tcc_declaration:
3284 /* Consider __builtin_sqrt equal to sqrt. */
3285 return (TREE_CODE (arg0) == FUNCTION_DECL
3286 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3287 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3288 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3289
3290 case tcc_exceptional:
3291 if (TREE_CODE (arg0) == CONSTRUCTOR)
3292 {
3293 /* In GIMPLE constructors are used only to build vectors from
3294 elements. Individual elements in the constructor must be
3295 indexed in increasing order and form an initial sequence.
3296
3297 We make no effort to compare constructors in generic.
3298 (see sem_variable::equals in ipa-icf which can do so for
3299 constants). */
3300 if (!VECTOR_TYPE_P (TREE_TYPE (arg0))
3301 || !VECTOR_TYPE_P (TREE_TYPE (arg1)))
3302 return 0;
3303
3304 /* Be sure that vectors constructed have the same representation.
3305 We only tested element precision and modes to match.
3306 Vectors may be BLKmode and thus also check that the number of
3307 parts match. */
3308 if (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))
3309 != TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)))
3310 return 0;
3311
3312 vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3313 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3314 unsigned int len = vec_safe_length (v0);
3315
3316 if (len != vec_safe_length (v1))
3317 return 0;
3318
3319 for (unsigned int i = 0; i < len; i++)
3320 {
3321 constructor_elt *c0 = &(*v0)[i];
3322 constructor_elt *c1 = &(*v1)[i];
3323
3324 if (!operand_equal_p (c0->value, c1->value, flags)
3325 /* In GIMPLE the indexes can be either NULL or matching i.
3326 Double check this so we won't get false
3327 positives for GENERIC. */
3328 || (c0->index
3329 && (TREE_CODE (c0->index) != INTEGER_CST
3330 || !compare_tree_int (c0->index, i)))
3331 || (c1->index
3332 && (TREE_CODE (c1->index) != INTEGER_CST
3333 || !compare_tree_int (c1->index, i))))
3334 return 0;
3335 }
3336 return 1;
3337 }
3338 else if (TREE_CODE (arg0) == STATEMENT_LIST
3339 && (flags & OEP_LEXICOGRAPHIC))
3340 {
3341 /* Compare the STATEMENT_LISTs. */
3342 tree_stmt_iterator tsi1, tsi2;
3343 tree body1 = CONST_CAST_TREE (arg0);
3344 tree body2 = CONST_CAST_TREE (arg1);
3345 for (tsi1 = tsi_start (body1), tsi2 = tsi_start (body2); ;
3346 tsi_next (&tsi1), tsi_next (&tsi2))
3347 {
3348 /* The lists don't have the same number of statements. */
3349 if (tsi_end_p (tsi1) ^ tsi_end_p (tsi2))
3350 return 0;
3351 if (tsi_end_p (tsi1) && tsi_end_p (tsi2))
3352 return 1;
3353 if (!operand_equal_p (tsi_stmt (tsi1), tsi_stmt (tsi2),
3354 OEP_LEXICOGRAPHIC))
3355 return 0;
3356 }
3357 }
3358 return 0;
3359
3360 case tcc_statement:
3361 switch (TREE_CODE (arg0))
3362 {
3363 case RETURN_EXPR:
3364 if (flags & OEP_LEXICOGRAPHIC)
3365 return OP_SAME_WITH_NULL (0);
3366 return 0;
3367 default:
3368 return 0;
3369 }
3370
3371 default:
3372 return 0;
3373 }
3374
3375 #undef OP_SAME
3376 #undef OP_SAME_WITH_NULL
3377 }
3378 \f
3379 /* Similar to operand_equal_p, but see if ARG0 might be a variant of ARG1
3380 with a different signedness or a narrower precision. */
3381
3382 static bool
3383 operand_equal_for_comparison_p (tree arg0, tree arg1)
3384 {
3385 if (operand_equal_p (arg0, arg1, 0))
3386 return true;
3387
3388 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3389 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3390 return false;
3391
3392 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3393 and see if the inner values are the same. This removes any
3394 signedness comparison, which doesn't matter here. */
3395 tree op0 = arg0;
3396 tree op1 = arg1;
3397 STRIP_NOPS (op0);
3398 STRIP_NOPS (op1);
3399 if (operand_equal_p (op0, op1, 0))
3400 return true;
3401
3402 /* Discard a single widening conversion from ARG1 and see if the inner
3403 value is the same as ARG0. */
3404 if (CONVERT_EXPR_P (arg1)
3405 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0)))
3406 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))
3407 < TYPE_PRECISION (TREE_TYPE (arg1))
3408 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
3409 return true;
3410
3411 return false;
3412 }
3413 \f
3414 /* See if ARG is an expression that is either a comparison or is performing
3415 arithmetic on comparisons. The comparisons must only be comparing
3416 two different values, which will be stored in *CVAL1 and *CVAL2; if
3417 they are nonzero it means that some operands have already been found.
3418 No variables may be used anywhere else in the expression except in the
3419 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3420 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3421
3422 If this is true, return 1. Otherwise, return zero. */
3423
3424 static int
3425 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3426 {
3427 enum tree_code code = TREE_CODE (arg);
3428 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3429
3430 /* We can handle some of the tcc_expression cases here. */
3431 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3432 tclass = tcc_unary;
3433 else if (tclass == tcc_expression
3434 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3435 || code == COMPOUND_EXPR))
3436 tclass = tcc_binary;
3437
3438 else if (tclass == tcc_expression && code == SAVE_EXPR
3439 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3440 {
3441 /* If we've already found a CVAL1 or CVAL2, this expression is
3442 two complex to handle. */
3443 if (*cval1 || *cval2)
3444 return 0;
3445
3446 tclass = tcc_unary;
3447 *save_p = 1;
3448 }
3449
3450 switch (tclass)
3451 {
3452 case tcc_unary:
3453 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3454
3455 case tcc_binary:
3456 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3457 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3458 cval1, cval2, save_p));
3459
3460 case tcc_constant:
3461 return 1;
3462
3463 case tcc_expression:
3464 if (code == COND_EXPR)
3465 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3466 cval1, cval2, save_p)
3467 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3468 cval1, cval2, save_p)
3469 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3470 cval1, cval2, save_p));
3471 return 0;
3472
3473 case tcc_comparison:
3474 /* First see if we can handle the first operand, then the second. For
3475 the second operand, we know *CVAL1 can't be zero. It must be that
3476 one side of the comparison is each of the values; test for the
3477 case where this isn't true by failing if the two operands
3478 are the same. */
3479
3480 if (operand_equal_p (TREE_OPERAND (arg, 0),
3481 TREE_OPERAND (arg, 1), 0))
3482 return 0;
3483
3484 if (*cval1 == 0)
3485 *cval1 = TREE_OPERAND (arg, 0);
3486 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3487 ;
3488 else if (*cval2 == 0)
3489 *cval2 = TREE_OPERAND (arg, 0);
3490 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3491 ;
3492 else
3493 return 0;
3494
3495 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3496 ;
3497 else if (*cval2 == 0)
3498 *cval2 = TREE_OPERAND (arg, 1);
3499 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3500 ;
3501 else
3502 return 0;
3503
3504 return 1;
3505
3506 default:
3507 return 0;
3508 }
3509 }
3510 \f
3511 /* ARG is a tree that is known to contain just arithmetic operations and
3512 comparisons. Evaluate the operations in the tree substituting NEW0 for
3513 any occurrence of OLD0 as an operand of a comparison and likewise for
3514 NEW1 and OLD1. */
3515
3516 static tree
3517 eval_subst (location_t loc, tree arg, tree old0, tree new0,
3518 tree old1, tree new1)
3519 {
3520 tree type = TREE_TYPE (arg);
3521 enum tree_code code = TREE_CODE (arg);
3522 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3523
3524 /* We can handle some of the tcc_expression cases here. */
3525 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3526 tclass = tcc_unary;
3527 else if (tclass == tcc_expression
3528 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3529 tclass = tcc_binary;
3530
3531 switch (tclass)
3532 {
3533 case tcc_unary:
3534 return fold_build1_loc (loc, code, type,
3535 eval_subst (loc, TREE_OPERAND (arg, 0),
3536 old0, new0, old1, new1));
3537
3538 case tcc_binary:
3539 return fold_build2_loc (loc, code, type,
3540 eval_subst (loc, TREE_OPERAND (arg, 0),
3541 old0, new0, old1, new1),
3542 eval_subst (loc, TREE_OPERAND (arg, 1),
3543 old0, new0, old1, new1));
3544
3545 case tcc_expression:
3546 switch (code)
3547 {
3548 case SAVE_EXPR:
3549 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3550 old1, new1);
3551
3552 case COMPOUND_EXPR:
3553 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3554 old1, new1);
3555
3556 case COND_EXPR:
3557 return fold_build3_loc (loc, code, type,
3558 eval_subst (loc, TREE_OPERAND (arg, 0),
3559 old0, new0, old1, new1),
3560 eval_subst (loc, TREE_OPERAND (arg, 1),
3561 old0, new0, old1, new1),
3562 eval_subst (loc, TREE_OPERAND (arg, 2),
3563 old0, new0, old1, new1));
3564 default:
3565 break;
3566 }
3567 /* Fall through - ??? */
3568
3569 case tcc_comparison:
3570 {
3571 tree arg0 = TREE_OPERAND (arg, 0);
3572 tree arg1 = TREE_OPERAND (arg, 1);
3573
3574 /* We need to check both for exact equality and tree equality. The
3575 former will be true if the operand has a side-effect. In that
3576 case, we know the operand occurred exactly once. */
3577
3578 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3579 arg0 = new0;
3580 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3581 arg0 = new1;
3582
3583 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3584 arg1 = new0;
3585 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3586 arg1 = new1;
3587
3588 return fold_build2_loc (loc, code, type, arg0, arg1);
3589 }
3590
3591 default:
3592 return arg;
3593 }
3594 }
3595 \f
3596 /* Return a tree for the case when the result of an expression is RESULT
3597 converted to TYPE and OMITTED was previously an operand of the expression
3598 but is now not needed (e.g., we folded OMITTED * 0).
3599
3600 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3601 the conversion of RESULT to TYPE. */
3602
3603 tree
3604 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3605 {
3606 tree t = fold_convert_loc (loc, type, result);
3607
3608 /* If the resulting operand is an empty statement, just return the omitted
3609 statement casted to void. */
3610 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3611 return build1_loc (loc, NOP_EXPR, void_type_node,
3612 fold_ignored_result (omitted));
3613
3614 if (TREE_SIDE_EFFECTS (omitted))
3615 return build2_loc (loc, COMPOUND_EXPR, type,
3616 fold_ignored_result (omitted), t);
3617
3618 return non_lvalue_loc (loc, t);
3619 }
3620
3621 /* Return a tree for the case when the result of an expression is RESULT
3622 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3623 of the expression but are now not needed.
3624
3625 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3626 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3627 evaluated before OMITTED2. Otherwise, if neither has side effects,
3628 just do the conversion of RESULT to TYPE. */
3629
3630 tree
3631 omit_two_operands_loc (location_t loc, tree type, tree result,
3632 tree omitted1, tree omitted2)
3633 {
3634 tree t = fold_convert_loc (loc, type, result);
3635
3636 if (TREE_SIDE_EFFECTS (omitted2))
3637 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3638 if (TREE_SIDE_EFFECTS (omitted1))
3639 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3640
3641 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3642 }
3643
3644 \f
3645 /* Return a simplified tree node for the truth-negation of ARG. This
3646 never alters ARG itself. We assume that ARG is an operation that
3647 returns a truth value (0 or 1).
3648
3649 FIXME: one would think we would fold the result, but it causes
3650 problems with the dominator optimizer. */
3651
3652 static tree
3653 fold_truth_not_expr (location_t loc, tree arg)
3654 {
3655 tree type = TREE_TYPE (arg);
3656 enum tree_code code = TREE_CODE (arg);
3657 location_t loc1, loc2;
3658
3659 /* If this is a comparison, we can simply invert it, except for
3660 floating-point non-equality comparisons, in which case we just
3661 enclose a TRUTH_NOT_EXPR around what we have. */
3662
3663 if (TREE_CODE_CLASS (code) == tcc_comparison)
3664 {
3665 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3666 if (FLOAT_TYPE_P (op_type)
3667 && flag_trapping_math
3668 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3669 && code != NE_EXPR && code != EQ_EXPR)
3670 return NULL_TREE;
3671
3672 code = invert_tree_comparison (code, HONOR_NANS (op_type));
3673 if (code == ERROR_MARK)
3674 return NULL_TREE;
3675
3676 tree ret = build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3677 TREE_OPERAND (arg, 1));
3678 if (TREE_NO_WARNING (arg))
3679 TREE_NO_WARNING (ret) = 1;
3680 return ret;
3681 }
3682
3683 switch (code)
3684 {
3685 case INTEGER_CST:
3686 return constant_boolean_node (integer_zerop (arg), type);
3687
3688 case TRUTH_AND_EXPR:
3689 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3690 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3691 return build2_loc (loc, TRUTH_OR_EXPR, type,
3692 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3693 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3694
3695 case TRUTH_OR_EXPR:
3696 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3697 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3698 return build2_loc (loc, TRUTH_AND_EXPR, type,
3699 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3700 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3701
3702 case TRUTH_XOR_EXPR:
3703 /* Here we can invert either operand. We invert the first operand
3704 unless the second operand is a TRUTH_NOT_EXPR in which case our
3705 result is the XOR of the first operand with the inside of the
3706 negation of the second operand. */
3707
3708 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3709 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3710 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3711 else
3712 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3713 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3714 TREE_OPERAND (arg, 1));
3715
3716 case TRUTH_ANDIF_EXPR:
3717 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3718 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3719 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3720 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3721 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3722
3723 case TRUTH_ORIF_EXPR:
3724 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3725 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3726 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3727 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3728 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3729
3730 case TRUTH_NOT_EXPR:
3731 return TREE_OPERAND (arg, 0);
3732
3733 case COND_EXPR:
3734 {
3735 tree arg1 = TREE_OPERAND (arg, 1);
3736 tree arg2 = TREE_OPERAND (arg, 2);
3737
3738 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3739 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3740
3741 /* A COND_EXPR may have a throw as one operand, which
3742 then has void type. Just leave void operands
3743 as they are. */
3744 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3745 VOID_TYPE_P (TREE_TYPE (arg1))
3746 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3747 VOID_TYPE_P (TREE_TYPE (arg2))
3748 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3749 }
3750
3751 case COMPOUND_EXPR:
3752 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3753 return build2_loc (loc, COMPOUND_EXPR, type,
3754 TREE_OPERAND (arg, 0),
3755 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3756
3757 case NON_LVALUE_EXPR:
3758 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3759 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3760
3761 CASE_CONVERT:
3762 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3763 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3764
3765 /* fall through */
3766
3767 case FLOAT_EXPR:
3768 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3769 return build1_loc (loc, TREE_CODE (arg), type,
3770 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3771
3772 case BIT_AND_EXPR:
3773 if (!integer_onep (TREE_OPERAND (arg, 1)))
3774 return NULL_TREE;
3775 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3776
3777 case SAVE_EXPR:
3778 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3779
3780 case CLEANUP_POINT_EXPR:
3781 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3782 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3783 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3784
3785 default:
3786 return NULL_TREE;
3787 }
3788 }
3789
3790 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3791 assume that ARG is an operation that returns a truth value (0 or 1
3792 for scalars, 0 or -1 for vectors). Return the folded expression if
3793 folding is successful. Otherwise, return NULL_TREE. */
3794
3795 static tree
3796 fold_invert_truthvalue (location_t loc, tree arg)
3797 {
3798 tree type = TREE_TYPE (arg);
3799 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3800 ? BIT_NOT_EXPR
3801 : TRUTH_NOT_EXPR,
3802 type, arg);
3803 }
3804
3805 /* Return a simplified tree node for the truth-negation of ARG. This
3806 never alters ARG itself. We assume that ARG is an operation that
3807 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3808
3809 tree
3810 invert_truthvalue_loc (location_t loc, tree arg)
3811 {
3812 if (TREE_CODE (arg) == ERROR_MARK)
3813 return arg;
3814
3815 tree type = TREE_TYPE (arg);
3816 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3817 ? BIT_NOT_EXPR
3818 : TRUTH_NOT_EXPR,
3819 type, arg);
3820 }
3821 \f
3822 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3823 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero
3824 and uses reverse storage order if REVERSEP is nonzero. ORIG_INNER
3825 is the original memory reference used to preserve the alias set of
3826 the access. */
3827
3828 static tree
3829 make_bit_field_ref (location_t loc, tree inner, tree orig_inner, tree type,
3830 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
3831 int unsignedp, int reversep)
3832 {
3833 tree result, bftype;
3834
3835 /* Attempt not to lose the access path if possible. */
3836 if (TREE_CODE (orig_inner) == COMPONENT_REF)
3837 {
3838 tree ninner = TREE_OPERAND (orig_inner, 0);
3839 machine_mode nmode;
3840 HOST_WIDE_INT nbitsize, nbitpos;
3841 tree noffset;
3842 int nunsignedp, nreversep, nvolatilep = 0;
3843 tree base = get_inner_reference (ninner, &nbitsize, &nbitpos,
3844 &noffset, &nmode, &nunsignedp,
3845 &nreversep, &nvolatilep);
3846 if (base == inner
3847 && noffset == NULL_TREE
3848 && nbitsize >= bitsize
3849 && nbitpos <= bitpos
3850 && bitpos + bitsize <= nbitpos + nbitsize
3851 && !reversep
3852 && !nreversep
3853 && !nvolatilep)
3854 {
3855 inner = ninner;
3856 bitpos -= nbitpos;
3857 }
3858 }
3859
3860 alias_set_type iset = get_alias_set (orig_inner);
3861 if (iset == 0 && get_alias_set (inner) != iset)
3862 inner = fold_build2 (MEM_REF, TREE_TYPE (inner),
3863 build_fold_addr_expr (inner),
3864 build_int_cst (ptr_type_node, 0));
3865
3866 if (bitpos == 0 && !reversep)
3867 {
3868 tree size = TYPE_SIZE (TREE_TYPE (inner));
3869 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3870 || POINTER_TYPE_P (TREE_TYPE (inner)))
3871 && tree_fits_shwi_p (size)
3872 && tree_to_shwi (size) == bitsize)
3873 return fold_convert_loc (loc, type, inner);
3874 }
3875
3876 bftype = type;
3877 if (TYPE_PRECISION (bftype) != bitsize
3878 || TYPE_UNSIGNED (bftype) == !unsignedp)
3879 bftype = build_nonstandard_integer_type (bitsize, 0);
3880
3881 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3882 bitsize_int (bitsize), bitsize_int (bitpos));
3883 REF_REVERSE_STORAGE_ORDER (result) = reversep;
3884
3885 if (bftype != type)
3886 result = fold_convert_loc (loc, type, result);
3887
3888 return result;
3889 }
3890
3891 /* Optimize a bit-field compare.
3892
3893 There are two cases: First is a compare against a constant and the
3894 second is a comparison of two items where the fields are at the same
3895 bit position relative to the start of a chunk (byte, halfword, word)
3896 large enough to contain it. In these cases we can avoid the shift
3897 implicit in bitfield extractions.
3898
3899 For constants, we emit a compare of the shifted constant with the
3900 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3901 compared. For two fields at the same position, we do the ANDs with the
3902 similar mask and compare the result of the ANDs.
3903
3904 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3905 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3906 are the left and right operands of the comparison, respectively.
3907
3908 If the optimization described above can be done, we return the resulting
3909 tree. Otherwise we return zero. */
3910
3911 static tree
3912 optimize_bit_field_compare (location_t loc, enum tree_code code,
3913 tree compare_type, tree lhs, tree rhs)
3914 {
3915 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3916 tree type = TREE_TYPE (lhs);
3917 tree unsigned_type;
3918 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3919 machine_mode lmode, rmode;
3920 scalar_int_mode nmode;
3921 int lunsignedp, runsignedp;
3922 int lreversep, rreversep;
3923 int lvolatilep = 0, rvolatilep = 0;
3924 tree linner, rinner = NULL_TREE;
3925 tree mask;
3926 tree offset;
3927
3928 /* Get all the information about the extractions being done. If the bit size
3929 if the same as the size of the underlying object, we aren't doing an
3930 extraction at all and so can do nothing. We also don't want to
3931 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3932 then will no longer be able to replace it. */
3933 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3934 &lunsignedp, &lreversep, &lvolatilep);
3935 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3936 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3937 return 0;
3938
3939 if (const_p)
3940 rreversep = lreversep;
3941 else
3942 {
3943 /* If this is not a constant, we can only do something if bit positions,
3944 sizes, signedness and storage order are the same. */
3945 rinner
3946 = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3947 &runsignedp, &rreversep, &rvolatilep);
3948
3949 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3950 || lunsignedp != runsignedp || lreversep != rreversep || offset != 0
3951 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3952 return 0;
3953 }
3954
3955 /* Honor the C++ memory model and mimic what RTL expansion does. */
3956 unsigned HOST_WIDE_INT bitstart = 0;
3957 unsigned HOST_WIDE_INT bitend = 0;
3958 if (TREE_CODE (lhs) == COMPONENT_REF)
3959 {
3960 get_bit_range (&bitstart, &bitend, lhs, &lbitpos, &offset);
3961 if (offset != NULL_TREE)
3962 return 0;
3963 }
3964
3965 /* See if we can find a mode to refer to this field. We should be able to,
3966 but fail if we can't. */
3967 if (!get_best_mode (lbitsize, lbitpos, bitstart, bitend,
3968 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3969 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3970 TYPE_ALIGN (TREE_TYPE (rinner))),
3971 BITS_PER_WORD, false, &nmode))
3972 return 0;
3973
3974 /* Set signed and unsigned types of the precision of this mode for the
3975 shifts below. */
3976 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3977
3978 /* Compute the bit position and size for the new reference and our offset
3979 within it. If the new reference is the same size as the original, we
3980 won't optimize anything, so return zero. */
3981 nbitsize = GET_MODE_BITSIZE (nmode);
3982 nbitpos = lbitpos & ~ (nbitsize - 1);
3983 lbitpos -= nbitpos;
3984 if (nbitsize == lbitsize)
3985 return 0;
3986
3987 if (lreversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
3988 lbitpos = nbitsize - lbitsize - lbitpos;
3989
3990 /* Make the mask to be used against the extracted field. */
3991 mask = build_int_cst_type (unsigned_type, -1);
3992 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3993 mask = const_binop (RSHIFT_EXPR, mask,
3994 size_int (nbitsize - lbitsize - lbitpos));
3995
3996 if (! const_p)
3997 {
3998 if (nbitpos < 0)
3999 return 0;
4000
4001 /* If not comparing with constant, just rework the comparison
4002 and return. */
4003 tree t1 = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4004 nbitsize, nbitpos, 1, lreversep);
4005 t1 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t1, mask);
4006 tree t2 = make_bit_field_ref (loc, rinner, rhs, unsigned_type,
4007 nbitsize, nbitpos, 1, rreversep);
4008 t2 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t2, mask);
4009 return fold_build2_loc (loc, code, compare_type, t1, t2);
4010 }
4011
4012 /* Otherwise, we are handling the constant case. See if the constant is too
4013 big for the field. Warn and return a tree for 0 (false) if so. We do
4014 this not only for its own sake, but to avoid having to test for this
4015 error case below. If we didn't, we might generate wrong code.
4016
4017 For unsigned fields, the constant shifted right by the field length should
4018 be all zero. For signed fields, the high-order bits should agree with
4019 the sign bit. */
4020
4021 if (lunsignedp)
4022 {
4023 if (wi::lrshift (wi::to_wide (rhs), lbitsize) != 0)
4024 {
4025 warning (0, "comparison is always %d due to width of bit-field",
4026 code == NE_EXPR);
4027 return constant_boolean_node (code == NE_EXPR, compare_type);
4028 }
4029 }
4030 else
4031 {
4032 wide_int tem = wi::arshift (wi::to_wide (rhs), lbitsize - 1);
4033 if (tem != 0 && tem != -1)
4034 {
4035 warning (0, "comparison is always %d due to width of bit-field",
4036 code == NE_EXPR);
4037 return constant_boolean_node (code == NE_EXPR, compare_type);
4038 }
4039 }
4040
4041 if (nbitpos < 0)
4042 return 0;
4043
4044 /* Single-bit compares should always be against zero. */
4045 if (lbitsize == 1 && ! integer_zerop (rhs))
4046 {
4047 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4048 rhs = build_int_cst (type, 0);
4049 }
4050
4051 /* Make a new bitfield reference, shift the constant over the
4052 appropriate number of bits and mask it with the computed mask
4053 (in case this was a signed field). If we changed it, make a new one. */
4054 lhs = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4055 nbitsize, nbitpos, 1, lreversep);
4056
4057 rhs = const_binop (BIT_AND_EXPR,
4058 const_binop (LSHIFT_EXPR,
4059 fold_convert_loc (loc, unsigned_type, rhs),
4060 size_int (lbitpos)),
4061 mask);
4062
4063 lhs = build2_loc (loc, code, compare_type,
4064 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
4065 return lhs;
4066 }
4067 \f
4068 /* Subroutine for fold_truth_andor_1: decode a field reference.
4069
4070 If EXP is a comparison reference, we return the innermost reference.
4071
4072 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4073 set to the starting bit number.
4074
4075 If the innermost field can be completely contained in a mode-sized
4076 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4077
4078 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4079 otherwise it is not changed.
4080
4081 *PUNSIGNEDP is set to the signedness of the field.
4082
4083 *PREVERSEP is set to the storage order of the field.
4084
4085 *PMASK is set to the mask used. This is either contained in a
4086 BIT_AND_EXPR or derived from the width of the field.
4087
4088 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4089
4090 Return 0 if this is not a component reference or is one that we can't
4091 do anything with. */
4092
4093 static tree
4094 decode_field_reference (location_t loc, tree *exp_, HOST_WIDE_INT *pbitsize,
4095 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
4096 int *punsignedp, int *preversep, int *pvolatilep,
4097 tree *pmask, tree *pand_mask)
4098 {
4099 tree exp = *exp_;
4100 tree outer_type = 0;
4101 tree and_mask = 0;
4102 tree mask, inner, offset;
4103 tree unsigned_type;
4104 unsigned int precision;
4105
4106 /* All the optimizations using this function assume integer fields.
4107 There are problems with FP fields since the type_for_size call
4108 below can fail for, e.g., XFmode. */
4109 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4110 return 0;
4111
4112 /* We are interested in the bare arrangement of bits, so strip everything
4113 that doesn't affect the machine mode. However, record the type of the
4114 outermost expression if it may matter below. */
4115 if (CONVERT_EXPR_P (exp)
4116 || TREE_CODE (exp) == NON_LVALUE_EXPR)
4117 outer_type = TREE_TYPE (exp);
4118 STRIP_NOPS (exp);
4119
4120 if (TREE_CODE (exp) == BIT_AND_EXPR)
4121 {
4122 and_mask = TREE_OPERAND (exp, 1);
4123 exp = TREE_OPERAND (exp, 0);
4124 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4125 if (TREE_CODE (and_mask) != INTEGER_CST)
4126 return 0;
4127 }
4128
4129 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
4130 punsignedp, preversep, pvolatilep);
4131 if ((inner == exp && and_mask == 0)
4132 || *pbitsize < 0 || offset != 0
4133 || TREE_CODE (inner) == PLACEHOLDER_EXPR
4134 /* Reject out-of-bound accesses (PR79731). */
4135 || (! AGGREGATE_TYPE_P (TREE_TYPE (inner))
4136 && compare_tree_int (TYPE_SIZE (TREE_TYPE (inner)),
4137 *pbitpos + *pbitsize) < 0))
4138 return 0;
4139
4140 *exp_ = exp;
4141
4142 /* If the number of bits in the reference is the same as the bitsize of
4143 the outer type, then the outer type gives the signedness. Otherwise
4144 (in case of a small bitfield) the signedness is unchanged. */
4145 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4146 *punsignedp = TYPE_UNSIGNED (outer_type);
4147
4148 /* Compute the mask to access the bitfield. */
4149 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4150 precision = TYPE_PRECISION (unsigned_type);
4151
4152 mask = build_int_cst_type (unsigned_type, -1);
4153
4154 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4155 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4156
4157 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4158 if (and_mask != 0)
4159 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4160 fold_convert_loc (loc, unsigned_type, and_mask), mask);
4161
4162 *pmask = mask;
4163 *pand_mask = and_mask;
4164 return inner;
4165 }
4166
4167 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4168 bit positions and MASK is SIGNED. */
4169
4170 static int
4171 all_ones_mask_p (const_tree mask, unsigned int size)
4172 {
4173 tree type = TREE_TYPE (mask);
4174 unsigned int precision = TYPE_PRECISION (type);
4175
4176 /* If this function returns true when the type of the mask is
4177 UNSIGNED, then there will be errors. In particular see
4178 gcc.c-torture/execute/990326-1.c. There does not appear to be
4179 any documentation paper trail as to why this is so. But the pre
4180 wide-int worked with that restriction and it has been preserved
4181 here. */
4182 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
4183 return false;
4184
4185 return wi::mask (size, false, precision) == wi::to_wide (mask);
4186 }
4187
4188 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4189 represents the sign bit of EXP's type. If EXP represents a sign
4190 or zero extension, also test VAL against the unextended type.
4191 The return value is the (sub)expression whose sign bit is VAL,
4192 or NULL_TREE otherwise. */
4193
4194 tree
4195 sign_bit_p (tree exp, const_tree val)
4196 {
4197 int width;
4198 tree t;
4199
4200 /* Tree EXP must have an integral type. */
4201 t = TREE_TYPE (exp);
4202 if (! INTEGRAL_TYPE_P (t))
4203 return NULL_TREE;
4204
4205 /* Tree VAL must be an integer constant. */
4206 if (TREE_CODE (val) != INTEGER_CST
4207 || TREE_OVERFLOW (val))
4208 return NULL_TREE;
4209
4210 width = TYPE_PRECISION (t);
4211 if (wi::only_sign_bit_p (wi::to_wide (val), width))
4212 return exp;
4213
4214 /* Handle extension from a narrower type. */
4215 if (TREE_CODE (exp) == NOP_EXPR
4216 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4217 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4218
4219 return NULL_TREE;
4220 }
4221
4222 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
4223 to be evaluated unconditionally. */
4224
4225 static int
4226 simple_operand_p (const_tree exp)
4227 {
4228 /* Strip any conversions that don't change the machine mode. */
4229 STRIP_NOPS (exp);
4230
4231 return (CONSTANT_CLASS_P (exp)
4232 || TREE_CODE (exp) == SSA_NAME
4233 || (DECL_P (exp)
4234 && ! TREE_ADDRESSABLE (exp)
4235 && ! TREE_THIS_VOLATILE (exp)
4236 && ! DECL_NONLOCAL (exp)
4237 /* Don't regard global variables as simple. They may be
4238 allocated in ways unknown to the compiler (shared memory,
4239 #pragma weak, etc). */
4240 && ! TREE_PUBLIC (exp)
4241 && ! DECL_EXTERNAL (exp)
4242 /* Weakrefs are not safe to be read, since they can be NULL.
4243 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4244 have DECL_WEAK flag set. */
4245 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4246 /* Loading a static variable is unduly expensive, but global
4247 registers aren't expensive. */
4248 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4249 }
4250
4251 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4252 to be evaluated unconditionally.
4253 I addition to simple_operand_p, we assume that comparisons, conversions,
4254 and logic-not operations are simple, if their operands are simple, too. */
4255
4256 static bool
4257 simple_operand_p_2 (tree exp)
4258 {
4259 enum tree_code code;
4260
4261 if (TREE_SIDE_EFFECTS (exp)
4262 || tree_could_trap_p (exp))
4263 return false;
4264
4265 while (CONVERT_EXPR_P (exp))
4266 exp = TREE_OPERAND (exp, 0);
4267
4268 code = TREE_CODE (exp);
4269
4270 if (TREE_CODE_CLASS (code) == tcc_comparison)
4271 return (simple_operand_p (TREE_OPERAND (exp, 0))
4272 && simple_operand_p (TREE_OPERAND (exp, 1)));
4273
4274 if (code == TRUTH_NOT_EXPR)
4275 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4276
4277 return simple_operand_p (exp);
4278 }
4279
4280 \f
4281 /* The following functions are subroutines to fold_range_test and allow it to
4282 try to change a logical combination of comparisons into a range test.
4283
4284 For example, both
4285 X == 2 || X == 3 || X == 4 || X == 5
4286 and
4287 X >= 2 && X <= 5
4288 are converted to
4289 (unsigned) (X - 2) <= 3
4290
4291 We describe each set of comparisons as being either inside or outside
4292 a range, using a variable named like IN_P, and then describe the
4293 range with a lower and upper bound. If one of the bounds is omitted,
4294 it represents either the highest or lowest value of the type.
4295
4296 In the comments below, we represent a range by two numbers in brackets
4297 preceded by a "+" to designate being inside that range, or a "-" to
4298 designate being outside that range, so the condition can be inverted by
4299 flipping the prefix. An omitted bound is represented by a "-". For
4300 example, "- [-, 10]" means being outside the range starting at the lowest
4301 possible value and ending at 10, in other words, being greater than 10.
4302 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4303 always false.
4304
4305 We set up things so that the missing bounds are handled in a consistent
4306 manner so neither a missing bound nor "true" and "false" need to be
4307 handled using a special case. */
4308
4309 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4310 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4311 and UPPER1_P are nonzero if the respective argument is an upper bound
4312 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4313 must be specified for a comparison. ARG1 will be converted to ARG0's
4314 type if both are specified. */
4315
4316 static tree
4317 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4318 tree arg1, int upper1_p)
4319 {
4320 tree tem;
4321 int result;
4322 int sgn0, sgn1;
4323
4324 /* If neither arg represents infinity, do the normal operation.
4325 Else, if not a comparison, return infinity. Else handle the special
4326 comparison rules. Note that most of the cases below won't occur, but
4327 are handled for consistency. */
4328
4329 if (arg0 != 0 && arg1 != 0)
4330 {
4331 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4332 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4333 STRIP_NOPS (tem);
4334 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4335 }
4336
4337 if (TREE_CODE_CLASS (code) != tcc_comparison)
4338 return 0;
4339
4340 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4341 for neither. In real maths, we cannot assume open ended ranges are
4342 the same. But, this is computer arithmetic, where numbers are finite.
4343 We can therefore make the transformation of any unbounded range with
4344 the value Z, Z being greater than any representable number. This permits
4345 us to treat unbounded ranges as equal. */
4346 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4347 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4348 switch (code)
4349 {
4350 case EQ_EXPR:
4351 result = sgn0 == sgn1;
4352 break;
4353 case NE_EXPR:
4354 result = sgn0 != sgn1;
4355 break;
4356 case LT_EXPR:
4357 result = sgn0 < sgn1;
4358 break;
4359 case LE_EXPR:
4360 result = sgn0 <= sgn1;
4361 break;
4362 case GT_EXPR:
4363 result = sgn0 > sgn1;
4364 break;
4365 case GE_EXPR:
4366 result = sgn0 >= sgn1;
4367 break;
4368 default:
4369 gcc_unreachable ();
4370 }
4371
4372 return constant_boolean_node (result, type);
4373 }
4374 \f
4375 /* Helper routine for make_range. Perform one step for it, return
4376 new expression if the loop should continue or NULL_TREE if it should
4377 stop. */
4378
4379 tree
4380 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4381 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4382 bool *strict_overflow_p)
4383 {
4384 tree arg0_type = TREE_TYPE (arg0);
4385 tree n_low, n_high, low = *p_low, high = *p_high;
4386 int in_p = *p_in_p, n_in_p;
4387
4388 switch (code)
4389 {
4390 case TRUTH_NOT_EXPR:
4391 /* We can only do something if the range is testing for zero. */
4392 if (low == NULL_TREE || high == NULL_TREE
4393 || ! integer_zerop (low) || ! integer_zerop (high))
4394 return NULL_TREE;
4395 *p_in_p = ! in_p;
4396 return arg0;
4397
4398 case EQ_EXPR: case NE_EXPR:
4399 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4400 /* We can only do something if the range is testing for zero
4401 and if the second operand is an integer constant. Note that
4402 saying something is "in" the range we make is done by
4403 complementing IN_P since it will set in the initial case of
4404 being not equal to zero; "out" is leaving it alone. */
4405 if (low == NULL_TREE || high == NULL_TREE
4406 || ! integer_zerop (low) || ! integer_zerop (high)
4407 || TREE_CODE (arg1) != INTEGER_CST)
4408 return NULL_TREE;
4409
4410 switch (code)
4411 {
4412 case NE_EXPR: /* - [c, c] */
4413 low = high = arg1;
4414 break;
4415 case EQ_EXPR: /* + [c, c] */
4416 in_p = ! in_p, low = high = arg1;
4417 break;
4418 case GT_EXPR: /* - [-, c] */
4419 low = 0, high = arg1;
4420 break;
4421 case GE_EXPR: /* + [c, -] */
4422 in_p = ! in_p, low = arg1, high = 0;
4423 break;
4424 case LT_EXPR: /* - [c, -] */
4425 low = arg1, high = 0;
4426 break;
4427 case LE_EXPR: /* + [-, c] */
4428 in_p = ! in_p, low = 0, high = arg1;
4429 break;
4430 default:
4431 gcc_unreachable ();
4432 }
4433
4434 /* If this is an unsigned comparison, we also know that EXP is
4435 greater than or equal to zero. We base the range tests we make
4436 on that fact, so we record it here so we can parse existing
4437 range tests. We test arg0_type since often the return type
4438 of, e.g. EQ_EXPR, is boolean. */
4439 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4440 {
4441 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4442 in_p, low, high, 1,
4443 build_int_cst (arg0_type, 0),
4444 NULL_TREE))
4445 return NULL_TREE;
4446
4447 in_p = n_in_p, low = n_low, high = n_high;
4448
4449 /* If the high bound is missing, but we have a nonzero low
4450 bound, reverse the range so it goes from zero to the low bound
4451 minus 1. */
4452 if (high == 0 && low && ! integer_zerop (low))
4453 {
4454 in_p = ! in_p;
4455 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4456 build_int_cst (TREE_TYPE (low), 1), 0);
4457 low = build_int_cst (arg0_type, 0);
4458 }
4459 }
4460
4461 *p_low = low;
4462 *p_high = high;
4463 *p_in_p = in_p;
4464 return arg0;
4465
4466 case NEGATE_EXPR:
4467 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4468 low and high are non-NULL, then normalize will DTRT. */
4469 if (!TYPE_UNSIGNED (arg0_type)
4470 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4471 {
4472 if (low == NULL_TREE)
4473 low = TYPE_MIN_VALUE (arg0_type);
4474 if (high == NULL_TREE)
4475 high = TYPE_MAX_VALUE (arg0_type);
4476 }
4477
4478 /* (-x) IN [a,b] -> x in [-b, -a] */
4479 n_low = range_binop (MINUS_EXPR, exp_type,
4480 build_int_cst (exp_type, 0),
4481 0, high, 1);
4482 n_high = range_binop (MINUS_EXPR, exp_type,
4483 build_int_cst (exp_type, 0),
4484 0, low, 0);
4485 if (n_high != 0 && TREE_OVERFLOW (n_high))
4486 return NULL_TREE;
4487 goto normalize;
4488
4489 case BIT_NOT_EXPR:
4490 /* ~ X -> -X - 1 */
4491 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4492 build_int_cst (exp_type, 1));
4493
4494 case PLUS_EXPR:
4495 case MINUS_EXPR:
4496 if (TREE_CODE (arg1) != INTEGER_CST)
4497 return NULL_TREE;
4498
4499 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4500 move a constant to the other side. */
4501 if (!TYPE_UNSIGNED (arg0_type)
4502 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4503 return NULL_TREE;
4504
4505 /* If EXP is signed, any overflow in the computation is undefined,
4506 so we don't worry about it so long as our computations on
4507 the bounds don't overflow. For unsigned, overflow is defined
4508 and this is exactly the right thing. */
4509 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4510 arg0_type, low, 0, arg1, 0);
4511 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4512 arg0_type, high, 1, arg1, 0);
4513 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4514 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4515 return NULL_TREE;
4516
4517 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4518 *strict_overflow_p = true;
4519
4520 normalize:
4521 /* Check for an unsigned range which has wrapped around the maximum
4522 value thus making n_high < n_low, and normalize it. */
4523 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4524 {
4525 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4526 build_int_cst (TREE_TYPE (n_high), 1), 0);
4527 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4528 build_int_cst (TREE_TYPE (n_low), 1), 0);
4529
4530 /* If the range is of the form +/- [ x+1, x ], we won't
4531 be able to normalize it. But then, it represents the
4532 whole range or the empty set, so make it
4533 +/- [ -, - ]. */
4534 if (tree_int_cst_equal (n_low, low)
4535 && tree_int_cst_equal (n_high, high))
4536 low = high = 0;
4537 else
4538 in_p = ! in_p;
4539 }
4540 else
4541 low = n_low, high = n_high;
4542
4543 *p_low = low;
4544 *p_high = high;
4545 *p_in_p = in_p;
4546 return arg0;
4547
4548 CASE_CONVERT:
4549 case NON_LVALUE_EXPR:
4550 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4551 return NULL_TREE;
4552
4553 if (! INTEGRAL_TYPE_P (arg0_type)
4554 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4555 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4556 return NULL_TREE;
4557
4558 n_low = low, n_high = high;
4559
4560 if (n_low != 0)
4561 n_low = fold_convert_loc (loc, arg0_type, n_low);
4562
4563 if (n_high != 0)
4564 n_high = fold_convert_loc (loc, arg0_type, n_high);
4565
4566 /* If we're converting arg0 from an unsigned type, to exp,
4567 a signed type, we will be doing the comparison as unsigned.
4568 The tests above have already verified that LOW and HIGH
4569 are both positive.
4570
4571 So we have to ensure that we will handle large unsigned
4572 values the same way that the current signed bounds treat
4573 negative values. */
4574
4575 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4576 {
4577 tree high_positive;
4578 tree equiv_type;
4579 /* For fixed-point modes, we need to pass the saturating flag
4580 as the 2nd parameter. */
4581 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4582 equiv_type
4583 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4584 TYPE_SATURATING (arg0_type));
4585 else
4586 equiv_type
4587 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4588
4589 /* A range without an upper bound is, naturally, unbounded.
4590 Since convert would have cropped a very large value, use
4591 the max value for the destination type. */
4592 high_positive
4593 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4594 : TYPE_MAX_VALUE (arg0_type);
4595
4596 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4597 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4598 fold_convert_loc (loc, arg0_type,
4599 high_positive),
4600 build_int_cst (arg0_type, 1));
4601
4602 /* If the low bound is specified, "and" the range with the
4603 range for which the original unsigned value will be
4604 positive. */
4605 if (low != 0)
4606 {
4607 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4608 1, fold_convert_loc (loc, arg0_type,
4609 integer_zero_node),
4610 high_positive))
4611 return NULL_TREE;
4612
4613 in_p = (n_in_p == in_p);
4614 }
4615 else
4616 {
4617 /* Otherwise, "or" the range with the range of the input
4618 that will be interpreted as negative. */
4619 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4620 1, fold_convert_loc (loc, arg0_type,
4621 integer_zero_node),
4622 high_positive))
4623 return NULL_TREE;
4624
4625 in_p = (in_p != n_in_p);
4626 }
4627 }
4628
4629 *p_low = n_low;
4630 *p_high = n_high;
4631 *p_in_p = in_p;
4632 return arg0;
4633
4634 default:
4635 return NULL_TREE;
4636 }
4637 }
4638
4639 /* Given EXP, a logical expression, set the range it is testing into
4640 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4641 actually being tested. *PLOW and *PHIGH will be made of the same
4642 type as the returned expression. If EXP is not a comparison, we
4643 will most likely not be returning a useful value and range. Set
4644 *STRICT_OVERFLOW_P to true if the return value is only valid
4645 because signed overflow is undefined; otherwise, do not change
4646 *STRICT_OVERFLOW_P. */
4647
4648 tree
4649 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4650 bool *strict_overflow_p)
4651 {
4652 enum tree_code code;
4653 tree arg0, arg1 = NULL_TREE;
4654 tree exp_type, nexp;
4655 int in_p;
4656 tree low, high;
4657 location_t loc = EXPR_LOCATION (exp);
4658
4659 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4660 and see if we can refine the range. Some of the cases below may not
4661 happen, but it doesn't seem worth worrying about this. We "continue"
4662 the outer loop when we've changed something; otherwise we "break"
4663 the switch, which will "break" the while. */
4664
4665 in_p = 0;
4666 low = high = build_int_cst (TREE_TYPE (exp), 0);
4667
4668 while (1)
4669 {
4670 code = TREE_CODE (exp);
4671 exp_type = TREE_TYPE (exp);
4672 arg0 = NULL_TREE;
4673
4674 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4675 {
4676 if (TREE_OPERAND_LENGTH (exp) > 0)
4677 arg0 = TREE_OPERAND (exp, 0);
4678 if (TREE_CODE_CLASS (code) == tcc_binary
4679 || TREE_CODE_CLASS (code) == tcc_comparison
4680 || (TREE_CODE_CLASS (code) == tcc_expression
4681 && TREE_OPERAND_LENGTH (exp) > 1))
4682 arg1 = TREE_OPERAND (exp, 1);
4683 }
4684 if (arg0 == NULL_TREE)
4685 break;
4686
4687 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4688 &high, &in_p, strict_overflow_p);
4689 if (nexp == NULL_TREE)
4690 break;
4691 exp = nexp;
4692 }
4693
4694 /* If EXP is a constant, we can evaluate whether this is true or false. */
4695 if (TREE_CODE (exp) == INTEGER_CST)
4696 {
4697 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4698 exp, 0, low, 0))
4699 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4700 exp, 1, high, 1)));
4701 low = high = 0;
4702 exp = 0;
4703 }
4704
4705 *pin_p = in_p, *plow = low, *phigh = high;
4706 return exp;
4707 }
4708
4709 /* Returns TRUE if [LOW, HIGH] range check can be optimized to
4710 a bitwise check i.e. when
4711 LOW == 0xXX...X00...0
4712 HIGH == 0xXX...X11...1
4713 Return corresponding mask in MASK and stem in VALUE. */
4714
4715 static bool
4716 maskable_range_p (const_tree low, const_tree high, tree type, tree *mask,
4717 tree *value)
4718 {
4719 if (TREE_CODE (low) != INTEGER_CST
4720 || TREE_CODE (high) != INTEGER_CST)
4721 return false;
4722
4723 unsigned prec = TYPE_PRECISION (type);
4724 wide_int lo = wi::to_wide (low, prec);
4725 wide_int hi = wi::to_wide (high, prec);
4726
4727 wide_int end_mask = lo ^ hi;
4728 if ((end_mask & (end_mask + 1)) != 0
4729 || (lo & end_mask) != 0)
4730 return false;
4731
4732 wide_int stem_mask = ~end_mask;
4733 wide_int stem = lo & stem_mask;
4734 if (stem != (hi & stem_mask))
4735 return false;
4736
4737 *mask = wide_int_to_tree (type, stem_mask);
4738 *value = wide_int_to_tree (type, stem);
4739
4740 return true;
4741 }
4742 \f
4743 /* Helper routine for build_range_check and match.pd. Return the type to
4744 perform the check or NULL if it shouldn't be optimized. */
4745
4746 tree
4747 range_check_type (tree etype)
4748 {
4749 /* First make sure that arithmetics in this type is valid, then make sure
4750 that it wraps around. */
4751 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4752 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4753 TYPE_UNSIGNED (etype));
4754
4755 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4756 {
4757 tree utype, minv, maxv;
4758
4759 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4760 for the type in question, as we rely on this here. */
4761 utype = unsigned_type_for (etype);
4762 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4763 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4764 build_int_cst (TREE_TYPE (maxv), 1), 1);
4765 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4766
4767 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4768 minv, 1, maxv, 1)))
4769 etype = utype;
4770 else
4771 return NULL_TREE;
4772 }
4773 return etype;
4774 }
4775
4776 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4777 type, TYPE, return an expression to test if EXP is in (or out of, depending
4778 on IN_P) the range. Return 0 if the test couldn't be created. */
4779
4780 tree
4781 build_range_check (location_t loc, tree type, tree exp, int in_p,
4782 tree low, tree high)
4783 {
4784 tree etype = TREE_TYPE (exp), mask, value;
4785
4786 /* Disable this optimization for function pointer expressions
4787 on targets that require function pointer canonicalization. */
4788 if (targetm.have_canonicalize_funcptr_for_compare ()
4789 && TREE_CODE (etype) == POINTER_TYPE
4790 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4791 return NULL_TREE;
4792
4793 if (! in_p)
4794 {
4795 value = build_range_check (loc, type, exp, 1, low, high);
4796 if (value != 0)
4797 return invert_truthvalue_loc (loc, value);
4798
4799 return 0;
4800 }
4801
4802 if (low == 0 && high == 0)
4803 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4804
4805 if (low == 0)
4806 return fold_build2_loc (loc, LE_EXPR, type, exp,
4807 fold_convert_loc (loc, etype, high));
4808
4809 if (high == 0)
4810 return fold_build2_loc (loc, GE_EXPR, type, exp,
4811 fold_convert_loc (loc, etype, low));
4812
4813 if (operand_equal_p (low, high, 0))
4814 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4815 fold_convert_loc (loc, etype, low));
4816
4817 if (TREE_CODE (exp) == BIT_AND_EXPR
4818 && maskable_range_p (low, high, etype, &mask, &value))
4819 return fold_build2_loc (loc, EQ_EXPR, type,
4820 fold_build2_loc (loc, BIT_AND_EXPR, etype,
4821 exp, mask),
4822 value);
4823
4824 if (integer_zerop (low))
4825 {
4826 if (! TYPE_UNSIGNED (etype))
4827 {
4828 etype = unsigned_type_for (etype);
4829 high = fold_convert_loc (loc, etype, high);
4830 exp = fold_convert_loc (loc, etype, exp);
4831 }
4832 return build_range_check (loc, type, exp, 1, 0, high);
4833 }
4834
4835 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4836 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4837 {
4838 int prec = TYPE_PRECISION (etype);
4839
4840 if (wi::mask <widest_int> (prec - 1, false) == wi::to_widest (high))
4841 {
4842 if (TYPE_UNSIGNED (etype))
4843 {
4844 tree signed_etype = signed_type_for (etype);
4845 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4846 etype
4847 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4848 else
4849 etype = signed_etype;
4850 exp = fold_convert_loc (loc, etype, exp);
4851 }
4852 return fold_build2_loc (loc, GT_EXPR, type, exp,
4853 build_int_cst (etype, 0));
4854 }
4855 }
4856
4857 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4858 This requires wrap-around arithmetics for the type of the expression. */
4859 etype = range_check_type (etype);
4860 if (etype == NULL_TREE)
4861 return NULL_TREE;
4862
4863 if (POINTER_TYPE_P (etype))
4864 etype = unsigned_type_for (etype);
4865
4866 high = fold_convert_loc (loc, etype, high);
4867 low = fold_convert_loc (loc, etype, low);
4868 exp = fold_convert_loc (loc, etype, exp);
4869
4870 value = const_binop (MINUS_EXPR, high, low);
4871
4872 if (value != 0 && !TREE_OVERFLOW (value))
4873 return build_range_check (loc, type,
4874 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4875 1, build_int_cst (etype, 0), value);
4876
4877 return 0;
4878 }
4879 \f
4880 /* Return the predecessor of VAL in its type, handling the infinite case. */
4881
4882 static tree
4883 range_predecessor (tree val)
4884 {
4885 tree type = TREE_TYPE (val);
4886
4887 if (INTEGRAL_TYPE_P (type)
4888 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4889 return 0;
4890 else
4891 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4892 build_int_cst (TREE_TYPE (val), 1), 0);
4893 }
4894
4895 /* Return the successor of VAL in its type, handling the infinite case. */
4896
4897 static tree
4898 range_successor (tree val)
4899 {
4900 tree type = TREE_TYPE (val);
4901
4902 if (INTEGRAL_TYPE_P (type)
4903 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4904 return 0;
4905 else
4906 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4907 build_int_cst (TREE_TYPE (val), 1), 0);
4908 }
4909
4910 /* Given two ranges, see if we can merge them into one. Return 1 if we
4911 can, 0 if we can't. Set the output range into the specified parameters. */
4912
4913 bool
4914 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4915 tree high0, int in1_p, tree low1, tree high1)
4916 {
4917 int no_overlap;
4918 int subset;
4919 int temp;
4920 tree tem;
4921 int in_p;
4922 tree low, high;
4923 int lowequal = ((low0 == 0 && low1 == 0)
4924 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4925 low0, 0, low1, 0)));
4926 int highequal = ((high0 == 0 && high1 == 0)
4927 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4928 high0, 1, high1, 1)));
4929
4930 /* Make range 0 be the range that starts first, or ends last if they
4931 start at the same value. Swap them if it isn't. */
4932 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4933 low0, 0, low1, 0))
4934 || (lowequal
4935 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4936 high1, 1, high0, 1))))
4937 {
4938 temp = in0_p, in0_p = in1_p, in1_p = temp;
4939 tem = low0, low0 = low1, low1 = tem;
4940 tem = high0, high0 = high1, high1 = tem;
4941 }
4942
4943 /* Now flag two cases, whether the ranges are disjoint or whether the
4944 second range is totally subsumed in the first. Note that the tests
4945 below are simplified by the ones above. */
4946 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4947 high0, 1, low1, 0));
4948 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4949 high1, 1, high0, 1));
4950
4951 /* We now have four cases, depending on whether we are including or
4952 excluding the two ranges. */
4953 if (in0_p && in1_p)
4954 {
4955 /* If they don't overlap, the result is false. If the second range
4956 is a subset it is the result. Otherwise, the range is from the start
4957 of the second to the end of the first. */
4958 if (no_overlap)
4959 in_p = 0, low = high = 0;
4960 else if (subset)
4961 in_p = 1, low = low1, high = high1;
4962 else
4963 in_p = 1, low = low1, high = high0;
4964 }
4965
4966 else if (in0_p && ! in1_p)
4967 {
4968 /* If they don't overlap, the result is the first range. If they are
4969 equal, the result is false. If the second range is a subset of the
4970 first, and the ranges begin at the same place, we go from just after
4971 the end of the second range to the end of the first. If the second
4972 range is not a subset of the first, or if it is a subset and both
4973 ranges end at the same place, the range starts at the start of the
4974 first range and ends just before the second range.
4975 Otherwise, we can't describe this as a single range. */
4976 if (no_overlap)
4977 in_p = 1, low = low0, high = high0;
4978 else if (lowequal && highequal)
4979 in_p = 0, low = high = 0;
4980 else if (subset && lowequal)
4981 {
4982 low = range_successor (high1);
4983 high = high0;
4984 in_p = 1;
4985 if (low == 0)
4986 {
4987 /* We are in the weird situation where high0 > high1 but
4988 high1 has no successor. Punt. */
4989 return 0;
4990 }
4991 }
4992 else if (! subset || highequal)
4993 {
4994 low = low0;
4995 high = range_predecessor (low1);
4996 in_p = 1;
4997 if (high == 0)
4998 {
4999 /* low0 < low1 but low1 has no predecessor. Punt. */
5000 return 0;
5001 }
5002 }
5003 else
5004 return 0;
5005 }
5006
5007 else if (! in0_p && in1_p)
5008 {
5009 /* If they don't overlap, the result is the second range. If the second
5010 is a subset of the first, the result is false. Otherwise,
5011 the range starts just after the first range and ends at the
5012 end of the second. */
5013 if (no_overlap)
5014 in_p = 1, low = low1, high = high1;
5015 else if (subset || highequal)
5016 in_p = 0, low = high = 0;
5017 else
5018 {
5019 low = range_successor (high0);
5020 high = high1;
5021 in_p = 1;
5022 if (low == 0)
5023 {
5024 /* high1 > high0 but high0 has no successor. Punt. */
5025 return 0;
5026 }
5027 }
5028 }
5029
5030 else
5031 {
5032 /* The case where we are excluding both ranges. Here the complex case
5033 is if they don't overlap. In that case, the only time we have a
5034 range is if they are adjacent. If the second is a subset of the
5035 first, the result is the first. Otherwise, the range to exclude
5036 starts at the beginning of the first range and ends at the end of the
5037 second. */
5038 if (no_overlap)
5039 {
5040 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
5041 range_successor (high0),
5042 1, low1, 0)))
5043 in_p = 0, low = low0, high = high1;
5044 else
5045 {
5046 /* Canonicalize - [min, x] into - [-, x]. */
5047 if (low0 && TREE_CODE (low0) == INTEGER_CST)
5048 switch (TREE_CODE (TREE_TYPE (low0)))
5049 {
5050 case ENUMERAL_TYPE:
5051 if (TYPE_PRECISION (TREE_TYPE (low0))
5052 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
5053 break;
5054 /* FALLTHROUGH */
5055 case INTEGER_TYPE:
5056 if (tree_int_cst_equal (low0,
5057 TYPE_MIN_VALUE (TREE_TYPE (low0))))
5058 low0 = 0;
5059 break;
5060 case POINTER_TYPE:
5061 if (TYPE_UNSIGNED (TREE_TYPE (low0))
5062 && integer_zerop (low0))
5063 low0 = 0;
5064 break;
5065 default:
5066 break;
5067 }
5068
5069 /* Canonicalize - [x, max] into - [x, -]. */
5070 if (high1 && TREE_CODE (high1) == INTEGER_CST)
5071 switch (TREE_CODE (TREE_TYPE (high1)))
5072 {
5073 case ENUMERAL_TYPE:
5074 if (TYPE_PRECISION (TREE_TYPE (high1))
5075 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
5076 break;
5077 /* FALLTHROUGH */
5078 case INTEGER_TYPE:
5079 if (tree_int_cst_equal (high1,
5080 TYPE_MAX_VALUE (TREE_TYPE (high1))))
5081 high1 = 0;
5082 break;
5083 case POINTER_TYPE:
5084 if (TYPE_UNSIGNED (TREE_TYPE (high1))
5085 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
5086 high1, 1,
5087 build_int_cst (TREE_TYPE (high1), 1),
5088 1)))
5089 high1 = 0;
5090 break;
5091 default:
5092 break;
5093 }
5094
5095 /* The ranges might be also adjacent between the maximum and
5096 minimum values of the given type. For
5097 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5098 return + [x + 1, y - 1]. */
5099 if (low0 == 0 && high1 == 0)
5100 {
5101 low = range_successor (high0);
5102 high = range_predecessor (low1);
5103 if (low == 0 || high == 0)
5104 return 0;
5105
5106 in_p = 1;
5107 }
5108 else
5109 return 0;
5110 }
5111 }
5112 else if (subset)
5113 in_p = 0, low = low0, high = high0;
5114 else
5115 in_p = 0, low = low0, high = high1;
5116 }
5117
5118 *pin_p = in_p, *plow = low, *phigh = high;
5119 return 1;
5120 }
5121 \f
5122
5123 /* Subroutine of fold, looking inside expressions of the form
5124 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
5125 of the COND_EXPR. This function is being used also to optimize
5126 A op B ? C : A, by reversing the comparison first.
5127
5128 Return a folded expression whose code is not a COND_EXPR
5129 anymore, or NULL_TREE if no folding opportunity is found. */
5130
5131 static tree
5132 fold_cond_expr_with_comparison (location_t loc, tree type,
5133 tree arg0, tree arg1, tree arg2)
5134 {
5135 enum tree_code comp_code = TREE_CODE (arg0);
5136 tree arg00 = TREE_OPERAND (arg0, 0);
5137 tree arg01 = TREE_OPERAND (arg0, 1);
5138 tree arg1_type = TREE_TYPE (arg1);
5139 tree tem;
5140
5141 STRIP_NOPS (arg1);
5142 STRIP_NOPS (arg2);
5143
5144 /* If we have A op 0 ? A : -A, consider applying the following
5145 transformations:
5146
5147 A == 0? A : -A same as -A
5148 A != 0? A : -A same as A
5149 A >= 0? A : -A same as abs (A)
5150 A > 0? A : -A same as abs (A)
5151 A <= 0? A : -A same as -abs (A)
5152 A < 0? A : -A same as -abs (A)
5153
5154 None of these transformations work for modes with signed
5155 zeros. If A is +/-0, the first two transformations will
5156 change the sign of the result (from +0 to -0, or vice
5157 versa). The last four will fix the sign of the result,
5158 even though the original expressions could be positive or
5159 negative, depending on the sign of A.
5160
5161 Note that all these transformations are correct if A is
5162 NaN, since the two alternatives (A and -A) are also NaNs. */
5163 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5164 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5165 ? real_zerop (arg01)
5166 : integer_zerop (arg01))
5167 && ((TREE_CODE (arg2) == NEGATE_EXPR
5168 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5169 /* In the case that A is of the form X-Y, '-A' (arg2) may
5170 have already been folded to Y-X, check for that. */
5171 || (TREE_CODE (arg1) == MINUS_EXPR
5172 && TREE_CODE (arg2) == MINUS_EXPR
5173 && operand_equal_p (TREE_OPERAND (arg1, 0),
5174 TREE_OPERAND (arg2, 1), 0)
5175 && operand_equal_p (TREE_OPERAND (arg1, 1),
5176 TREE_OPERAND (arg2, 0), 0))))
5177 switch (comp_code)
5178 {
5179 case EQ_EXPR:
5180 case UNEQ_EXPR:
5181 tem = fold_convert_loc (loc, arg1_type, arg1);
5182 return fold_convert_loc (loc, type, negate_expr (tem));
5183 case NE_EXPR:
5184 case LTGT_EXPR:
5185 return fold_convert_loc (loc, type, arg1);
5186 case UNGE_EXPR:
5187 case UNGT_EXPR:
5188 if (flag_trapping_math)
5189 break;
5190 /* Fall through. */
5191 case GE_EXPR:
5192 case GT_EXPR:
5193 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5194 break;
5195 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5196 return fold_convert_loc (loc, type, tem);
5197 case UNLE_EXPR:
5198 case UNLT_EXPR:
5199 if (flag_trapping_math)
5200 break;
5201 /* FALLTHRU */
5202 case LE_EXPR:
5203 case LT_EXPR:
5204 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5205 break;
5206 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5207 return negate_expr (fold_convert_loc (loc, type, tem));
5208 default:
5209 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5210 break;
5211 }
5212
5213 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5214 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5215 both transformations are correct when A is NaN: A != 0
5216 is then true, and A == 0 is false. */
5217
5218 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5219 && integer_zerop (arg01) && integer_zerop (arg2))
5220 {
5221 if (comp_code == NE_EXPR)
5222 return fold_convert_loc (loc, type, arg1);
5223 else if (comp_code == EQ_EXPR)
5224 return build_zero_cst (type);
5225 }
5226
5227 /* Try some transformations of A op B ? A : B.
5228
5229 A == B? A : B same as B
5230 A != B? A : B same as A
5231 A >= B? A : B same as max (A, B)
5232 A > B? A : B same as max (B, A)
5233 A <= B? A : B same as min (A, B)
5234 A < B? A : B same as min (B, A)
5235
5236 As above, these transformations don't work in the presence
5237 of signed zeros. For example, if A and B are zeros of
5238 opposite sign, the first two transformations will change
5239 the sign of the result. In the last four, the original
5240 expressions give different results for (A=+0, B=-0) and
5241 (A=-0, B=+0), but the transformed expressions do not.
5242
5243 The first two transformations are correct if either A or B
5244 is a NaN. In the first transformation, the condition will
5245 be false, and B will indeed be chosen. In the case of the
5246 second transformation, the condition A != B will be true,
5247 and A will be chosen.
5248
5249 The conversions to max() and min() are not correct if B is
5250 a number and A is not. The conditions in the original
5251 expressions will be false, so all four give B. The min()
5252 and max() versions would give a NaN instead. */
5253 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5254 && operand_equal_for_comparison_p (arg01, arg2)
5255 /* Avoid these transformations if the COND_EXPR may be used
5256 as an lvalue in the C++ front-end. PR c++/19199. */
5257 && (in_gimple_form
5258 || VECTOR_TYPE_P (type)
5259 || (! lang_GNU_CXX ()
5260 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5261 || ! maybe_lvalue_p (arg1)
5262 || ! maybe_lvalue_p (arg2)))
5263 {
5264 tree comp_op0 = arg00;
5265 tree comp_op1 = arg01;
5266 tree comp_type = TREE_TYPE (comp_op0);
5267
5268 switch (comp_code)
5269 {
5270 case EQ_EXPR:
5271 return fold_convert_loc (loc, type, arg2);
5272 case NE_EXPR:
5273 return fold_convert_loc (loc, type, arg1);
5274 case LE_EXPR:
5275 case LT_EXPR:
5276 case UNLE_EXPR:
5277 case UNLT_EXPR:
5278 /* In C++ a ?: expression can be an lvalue, so put the
5279 operand which will be used if they are equal first
5280 so that we can convert this back to the
5281 corresponding COND_EXPR. */
5282 if (!HONOR_NANS (arg1))
5283 {
5284 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5285 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5286 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5287 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5288 : fold_build2_loc (loc, MIN_EXPR, comp_type,
5289 comp_op1, comp_op0);
5290 return fold_convert_loc (loc, type, tem);
5291 }
5292 break;
5293 case GE_EXPR:
5294 case GT_EXPR:
5295 case UNGE_EXPR:
5296 case UNGT_EXPR:
5297 if (!HONOR_NANS (arg1))
5298 {
5299 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5300 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5301 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5302 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5303 : fold_build2_loc (loc, MAX_EXPR, comp_type,
5304 comp_op1, comp_op0);
5305 return fold_convert_loc (loc, type, tem);
5306 }
5307 break;
5308 case UNEQ_EXPR:
5309 if (!HONOR_NANS (arg1))
5310 return fold_convert_loc (loc, type, arg2);
5311 break;
5312 case LTGT_EXPR:
5313 if (!HONOR_NANS (arg1))
5314 return fold_convert_loc (loc, type, arg1);
5315 break;
5316 default:
5317 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5318 break;
5319 }
5320 }
5321
5322 return NULL_TREE;
5323 }
5324
5325
5326 \f
5327 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5328 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5329 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5330 false) >= 2)
5331 #endif
5332
5333 /* EXP is some logical combination of boolean tests. See if we can
5334 merge it into some range test. Return the new tree if so. */
5335
5336 static tree
5337 fold_range_test (location_t loc, enum tree_code code, tree type,
5338 tree op0, tree op1)
5339 {
5340 int or_op = (code == TRUTH_ORIF_EXPR
5341 || code == TRUTH_OR_EXPR);
5342 int in0_p, in1_p, in_p;
5343 tree low0, low1, low, high0, high1, high;
5344 bool strict_overflow_p = false;
5345 tree tem, lhs, rhs;
5346 const char * const warnmsg = G_("assuming signed overflow does not occur "
5347 "when simplifying range test");
5348
5349 if (!INTEGRAL_TYPE_P (type))
5350 return 0;
5351
5352 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5353 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5354
5355 /* If this is an OR operation, invert both sides; we will invert
5356 again at the end. */
5357 if (or_op)
5358 in0_p = ! in0_p, in1_p = ! in1_p;
5359
5360 /* If both expressions are the same, if we can merge the ranges, and we
5361 can build the range test, return it or it inverted. If one of the
5362 ranges is always true or always false, consider it to be the same
5363 expression as the other. */
5364 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5365 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5366 in1_p, low1, high1)
5367 && 0 != (tem = (build_range_check (loc, type,
5368 lhs != 0 ? lhs
5369 : rhs != 0 ? rhs : integer_zero_node,
5370 in_p, low, high))))
5371 {
5372 if (strict_overflow_p)
5373 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5374 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5375 }
5376
5377 /* On machines where the branch cost is expensive, if this is a
5378 short-circuited branch and the underlying object on both sides
5379 is the same, make a non-short-circuit operation. */
5380 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5381 && !flag_sanitize_coverage
5382 && lhs != 0 && rhs != 0
5383 && (code == TRUTH_ANDIF_EXPR
5384 || code == TRUTH_ORIF_EXPR)
5385 && operand_equal_p (lhs, rhs, 0))
5386 {
5387 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5388 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5389 which cases we can't do this. */
5390 if (simple_operand_p (lhs))
5391 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5392 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5393 type, op0, op1);
5394
5395 else if (!lang_hooks.decls.global_bindings_p ()
5396 && !CONTAINS_PLACEHOLDER_P (lhs))
5397 {
5398 tree common = save_expr (lhs);
5399
5400 if (0 != (lhs = build_range_check (loc, type, common,
5401 or_op ? ! in0_p : in0_p,
5402 low0, high0))
5403 && (0 != (rhs = build_range_check (loc, type, common,
5404 or_op ? ! in1_p : in1_p,
5405 low1, high1))))
5406 {
5407 if (strict_overflow_p)
5408 fold_overflow_warning (warnmsg,
5409 WARN_STRICT_OVERFLOW_COMPARISON);
5410 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5411 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5412 type, lhs, rhs);
5413 }
5414 }
5415 }
5416
5417 return 0;
5418 }
5419 \f
5420 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5421 bit value. Arrange things so the extra bits will be set to zero if and
5422 only if C is signed-extended to its full width. If MASK is nonzero,
5423 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5424
5425 static tree
5426 unextend (tree c, int p, int unsignedp, tree mask)
5427 {
5428 tree type = TREE_TYPE (c);
5429 int modesize = GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (type));
5430 tree temp;
5431
5432 if (p == modesize || unsignedp)
5433 return c;
5434
5435 /* We work by getting just the sign bit into the low-order bit, then
5436 into the high-order bit, then sign-extend. We then XOR that value
5437 with C. */
5438 temp = build_int_cst (TREE_TYPE (c),
5439 wi::extract_uhwi (wi::to_wide (c), p - 1, 1));
5440
5441 /* We must use a signed type in order to get an arithmetic right shift.
5442 However, we must also avoid introducing accidental overflows, so that
5443 a subsequent call to integer_zerop will work. Hence we must
5444 do the type conversion here. At this point, the constant is either
5445 zero or one, and the conversion to a signed type can never overflow.
5446 We could get an overflow if this conversion is done anywhere else. */
5447 if (TYPE_UNSIGNED (type))
5448 temp = fold_convert (signed_type_for (type), temp);
5449
5450 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5451 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5452 if (mask != 0)
5453 temp = const_binop (BIT_AND_EXPR, temp,
5454 fold_convert (TREE_TYPE (c), mask));
5455 /* If necessary, convert the type back to match the type of C. */
5456 if (TYPE_UNSIGNED (type))
5457 temp = fold_convert (type, temp);
5458
5459 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5460 }
5461 \f
5462 /* For an expression that has the form
5463 (A && B) || ~B
5464 or
5465 (A || B) && ~B,
5466 we can drop one of the inner expressions and simplify to
5467 A || ~B
5468 or
5469 A && ~B
5470 LOC is the location of the resulting expression. OP is the inner
5471 logical operation; the left-hand side in the examples above, while CMPOP
5472 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5473 removing a condition that guards another, as in
5474 (A != NULL && A->...) || A == NULL
5475 which we must not transform. If RHS_ONLY is true, only eliminate the
5476 right-most operand of the inner logical operation. */
5477
5478 static tree
5479 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5480 bool rhs_only)
5481 {
5482 tree type = TREE_TYPE (cmpop);
5483 enum tree_code code = TREE_CODE (cmpop);
5484 enum tree_code truthop_code = TREE_CODE (op);
5485 tree lhs = TREE_OPERAND (op, 0);
5486 tree rhs = TREE_OPERAND (op, 1);
5487 tree orig_lhs = lhs, orig_rhs = rhs;
5488 enum tree_code rhs_code = TREE_CODE (rhs);
5489 enum tree_code lhs_code = TREE_CODE (lhs);
5490 enum tree_code inv_code;
5491
5492 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5493 return NULL_TREE;
5494
5495 if (TREE_CODE_CLASS (code) != tcc_comparison)
5496 return NULL_TREE;
5497
5498 if (rhs_code == truthop_code)
5499 {
5500 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5501 if (newrhs != NULL_TREE)
5502 {
5503 rhs = newrhs;
5504 rhs_code = TREE_CODE (rhs);
5505 }
5506 }
5507 if (lhs_code == truthop_code && !rhs_only)
5508 {
5509 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5510 if (newlhs != NULL_TREE)
5511 {
5512 lhs = newlhs;
5513 lhs_code = TREE_CODE (lhs);
5514 }
5515 }
5516
5517 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
5518 if (inv_code == rhs_code
5519 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5520 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5521 return lhs;
5522 if (!rhs_only && inv_code == lhs_code
5523 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5524 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5525 return rhs;
5526 if (rhs != orig_rhs || lhs != orig_lhs)
5527 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5528 lhs, rhs);
5529 return NULL_TREE;
5530 }
5531
5532 /* Find ways of folding logical expressions of LHS and RHS:
5533 Try to merge two comparisons to the same innermost item.
5534 Look for range tests like "ch >= '0' && ch <= '9'".
5535 Look for combinations of simple terms on machines with expensive branches
5536 and evaluate the RHS unconditionally.
5537
5538 For example, if we have p->a == 2 && p->b == 4 and we can make an
5539 object large enough to span both A and B, we can do this with a comparison
5540 against the object ANDed with the a mask.
5541
5542 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5543 operations to do this with one comparison.
5544
5545 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5546 function and the one above.
5547
5548 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5549 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5550
5551 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5552 two operands.
5553
5554 We return the simplified tree or 0 if no optimization is possible. */
5555
5556 static tree
5557 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5558 tree lhs, tree rhs)
5559 {
5560 /* If this is the "or" of two comparisons, we can do something if
5561 the comparisons are NE_EXPR. If this is the "and", we can do something
5562 if the comparisons are EQ_EXPR. I.e.,
5563 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5564
5565 WANTED_CODE is this operation code. For single bit fields, we can
5566 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5567 comparison for one-bit fields. */
5568
5569 enum tree_code wanted_code;
5570 enum tree_code lcode, rcode;
5571 tree ll_arg, lr_arg, rl_arg, rr_arg;
5572 tree ll_inner, lr_inner, rl_inner, rr_inner;
5573 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5574 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5575 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5576 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5577 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5578 int ll_reversep, lr_reversep, rl_reversep, rr_reversep;
5579 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5580 scalar_int_mode lnmode, rnmode;
5581 tree ll_mask, lr_mask, rl_mask, rr_mask;
5582 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5583 tree l_const, r_const;
5584 tree lntype, rntype, result;
5585 HOST_WIDE_INT first_bit, end_bit;
5586 int volatilep;
5587
5588 /* Start by getting the comparison codes. Fail if anything is volatile.
5589 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5590 it were surrounded with a NE_EXPR. */
5591
5592 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5593 return 0;
5594
5595 lcode = TREE_CODE (lhs);
5596 rcode = TREE_CODE (rhs);
5597
5598 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5599 {
5600 lhs = build2 (NE_EXPR, truth_type, lhs,
5601 build_int_cst (TREE_TYPE (lhs), 0));
5602 lcode = NE_EXPR;
5603 }
5604
5605 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5606 {
5607 rhs = build2 (NE_EXPR, truth_type, rhs,
5608 build_int_cst (TREE_TYPE (rhs), 0));
5609 rcode = NE_EXPR;
5610 }
5611
5612 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5613 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5614 return 0;
5615
5616 ll_arg = TREE_OPERAND (lhs, 0);
5617 lr_arg = TREE_OPERAND (lhs, 1);
5618 rl_arg = TREE_OPERAND (rhs, 0);
5619 rr_arg = TREE_OPERAND (rhs, 1);
5620
5621 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5622 if (simple_operand_p (ll_arg)
5623 && simple_operand_p (lr_arg))
5624 {
5625 if (operand_equal_p (ll_arg, rl_arg, 0)
5626 && operand_equal_p (lr_arg, rr_arg, 0))
5627 {
5628 result = combine_comparisons (loc, code, lcode, rcode,
5629 truth_type, ll_arg, lr_arg);
5630 if (result)
5631 return result;
5632 }
5633 else if (operand_equal_p (ll_arg, rr_arg, 0)
5634 && operand_equal_p (lr_arg, rl_arg, 0))
5635 {
5636 result = combine_comparisons (loc, code, lcode,
5637 swap_tree_comparison (rcode),
5638 truth_type, ll_arg, lr_arg);
5639 if (result)
5640 return result;
5641 }
5642 }
5643
5644 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5645 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5646
5647 /* If the RHS can be evaluated unconditionally and its operands are
5648 simple, it wins to evaluate the RHS unconditionally on machines
5649 with expensive branches. In this case, this isn't a comparison
5650 that can be merged. */
5651
5652 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5653 false) >= 2
5654 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5655 && simple_operand_p (rl_arg)
5656 && simple_operand_p (rr_arg))
5657 {
5658 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5659 if (code == TRUTH_OR_EXPR
5660 && lcode == NE_EXPR && integer_zerop (lr_arg)
5661 && rcode == NE_EXPR && integer_zerop (rr_arg)
5662 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5663 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5664 return build2_loc (loc, NE_EXPR, truth_type,
5665 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5666 ll_arg, rl_arg),
5667 build_int_cst (TREE_TYPE (ll_arg), 0));
5668
5669 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5670 if (code == TRUTH_AND_EXPR
5671 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5672 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5673 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5674 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5675 return build2_loc (loc, EQ_EXPR, truth_type,
5676 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5677 ll_arg, rl_arg),
5678 build_int_cst (TREE_TYPE (ll_arg), 0));
5679 }
5680
5681 /* See if the comparisons can be merged. Then get all the parameters for
5682 each side. */
5683
5684 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5685 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5686 return 0;
5687
5688 ll_reversep = lr_reversep = rl_reversep = rr_reversep = 0;
5689 volatilep = 0;
5690 ll_inner = decode_field_reference (loc, &ll_arg,
5691 &ll_bitsize, &ll_bitpos, &ll_mode,
5692 &ll_unsignedp, &ll_reversep, &volatilep,
5693 &ll_mask, &ll_and_mask);
5694 lr_inner = decode_field_reference (loc, &lr_arg,
5695 &lr_bitsize, &lr_bitpos, &lr_mode,
5696 &lr_unsignedp, &lr_reversep, &volatilep,
5697 &lr_mask, &lr_and_mask);
5698 rl_inner = decode_field_reference (loc, &rl_arg,
5699 &rl_bitsize, &rl_bitpos, &rl_mode,
5700 &rl_unsignedp, &rl_reversep, &volatilep,
5701 &rl_mask, &rl_and_mask);
5702 rr_inner = decode_field_reference (loc, &rr_arg,
5703 &rr_bitsize, &rr_bitpos, &rr_mode,
5704 &rr_unsignedp, &rr_reversep, &volatilep,
5705 &rr_mask, &rr_and_mask);
5706
5707 /* It must be true that the inner operation on the lhs of each
5708 comparison must be the same if we are to be able to do anything.
5709 Then see if we have constants. If not, the same must be true for
5710 the rhs's. */
5711 if (volatilep
5712 || ll_reversep != rl_reversep
5713 || ll_inner == 0 || rl_inner == 0
5714 || ! operand_equal_p (ll_inner, rl_inner, 0))
5715 return 0;
5716
5717 if (TREE_CODE (lr_arg) == INTEGER_CST
5718 && TREE_CODE (rr_arg) == INTEGER_CST)
5719 {
5720 l_const = lr_arg, r_const = rr_arg;
5721 lr_reversep = ll_reversep;
5722 }
5723 else if (lr_reversep != rr_reversep
5724 || lr_inner == 0 || rr_inner == 0
5725 || ! operand_equal_p (lr_inner, rr_inner, 0))
5726 return 0;
5727 else
5728 l_const = r_const = 0;
5729
5730 /* If either comparison code is not correct for our logical operation,
5731 fail. However, we can convert a one-bit comparison against zero into
5732 the opposite comparison against that bit being set in the field. */
5733
5734 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5735 if (lcode != wanted_code)
5736 {
5737 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5738 {
5739 /* Make the left operand unsigned, since we are only interested
5740 in the value of one bit. Otherwise we are doing the wrong
5741 thing below. */
5742 ll_unsignedp = 1;
5743 l_const = ll_mask;
5744 }
5745 else
5746 return 0;
5747 }
5748
5749 /* This is analogous to the code for l_const above. */
5750 if (rcode != wanted_code)
5751 {
5752 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5753 {
5754 rl_unsignedp = 1;
5755 r_const = rl_mask;
5756 }
5757 else
5758 return 0;
5759 }
5760
5761 /* See if we can find a mode that contains both fields being compared on
5762 the left. If we can't, fail. Otherwise, update all constants and masks
5763 to be relative to a field of that size. */
5764 first_bit = MIN (ll_bitpos, rl_bitpos);
5765 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5766 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5767 TYPE_ALIGN (TREE_TYPE (ll_inner)), BITS_PER_WORD,
5768 volatilep, &lnmode))
5769 return 0;
5770
5771 lnbitsize = GET_MODE_BITSIZE (lnmode);
5772 lnbitpos = first_bit & ~ (lnbitsize - 1);
5773 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5774 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5775
5776 if (ll_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5777 {
5778 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5779 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5780 }
5781
5782 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5783 size_int (xll_bitpos));
5784 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5785 size_int (xrl_bitpos));
5786
5787 if (l_const)
5788 {
5789 l_const = fold_convert_loc (loc, lntype, l_const);
5790 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5791 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5792 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5793 fold_build1_loc (loc, BIT_NOT_EXPR,
5794 lntype, ll_mask))))
5795 {
5796 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5797
5798 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5799 }
5800 }
5801 if (r_const)
5802 {
5803 r_const = fold_convert_loc (loc, lntype, r_const);
5804 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5805 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5806 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5807 fold_build1_loc (loc, BIT_NOT_EXPR,
5808 lntype, rl_mask))))
5809 {
5810 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5811
5812 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5813 }
5814 }
5815
5816 /* If the right sides are not constant, do the same for it. Also,
5817 disallow this optimization if a size or signedness mismatch occurs
5818 between the left and right sides. */
5819 if (l_const == 0)
5820 {
5821 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5822 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5823 /* Make sure the two fields on the right
5824 correspond to the left without being swapped. */
5825 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5826 return 0;
5827
5828 first_bit = MIN (lr_bitpos, rr_bitpos);
5829 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5830 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5831 TYPE_ALIGN (TREE_TYPE (lr_inner)), BITS_PER_WORD,
5832 volatilep, &rnmode))
5833 return 0;
5834
5835 rnbitsize = GET_MODE_BITSIZE (rnmode);
5836 rnbitpos = first_bit & ~ (rnbitsize - 1);
5837 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5838 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5839
5840 if (lr_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5841 {
5842 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5843 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5844 }
5845
5846 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5847 rntype, lr_mask),
5848 size_int (xlr_bitpos));
5849 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5850 rntype, rr_mask),
5851 size_int (xrr_bitpos));
5852
5853 /* Make a mask that corresponds to both fields being compared.
5854 Do this for both items being compared. If the operands are the
5855 same size and the bits being compared are in the same position
5856 then we can do this by masking both and comparing the masked
5857 results. */
5858 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5859 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5860 if (lnbitsize == rnbitsize
5861 && xll_bitpos == xlr_bitpos
5862 && lnbitpos >= 0
5863 && rnbitpos >= 0)
5864 {
5865 lhs = make_bit_field_ref (loc, ll_inner, ll_arg,
5866 lntype, lnbitsize, lnbitpos,
5867 ll_unsignedp || rl_unsignedp, ll_reversep);
5868 if (! all_ones_mask_p (ll_mask, lnbitsize))
5869 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5870
5871 rhs = make_bit_field_ref (loc, lr_inner, lr_arg,
5872 rntype, rnbitsize, rnbitpos,
5873 lr_unsignedp || rr_unsignedp, lr_reversep);
5874 if (! all_ones_mask_p (lr_mask, rnbitsize))
5875 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5876
5877 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5878 }
5879
5880 /* There is still another way we can do something: If both pairs of
5881 fields being compared are adjacent, we may be able to make a wider
5882 field containing them both.
5883
5884 Note that we still must mask the lhs/rhs expressions. Furthermore,
5885 the mask must be shifted to account for the shift done by
5886 make_bit_field_ref. */
5887 if (((ll_bitsize + ll_bitpos == rl_bitpos
5888 && lr_bitsize + lr_bitpos == rr_bitpos)
5889 || (ll_bitpos == rl_bitpos + rl_bitsize
5890 && lr_bitpos == rr_bitpos + rr_bitsize))
5891 && ll_bitpos >= 0
5892 && rl_bitpos >= 0
5893 && lr_bitpos >= 0
5894 && rr_bitpos >= 0)
5895 {
5896 tree type;
5897
5898 lhs = make_bit_field_ref (loc, ll_inner, ll_arg, lntype,
5899 ll_bitsize + rl_bitsize,
5900 MIN (ll_bitpos, rl_bitpos),
5901 ll_unsignedp, ll_reversep);
5902 rhs = make_bit_field_ref (loc, lr_inner, lr_arg, rntype,
5903 lr_bitsize + rr_bitsize,
5904 MIN (lr_bitpos, rr_bitpos),
5905 lr_unsignedp, lr_reversep);
5906
5907 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5908 size_int (MIN (xll_bitpos, xrl_bitpos)));
5909 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5910 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5911
5912 /* Convert to the smaller type before masking out unwanted bits. */
5913 type = lntype;
5914 if (lntype != rntype)
5915 {
5916 if (lnbitsize > rnbitsize)
5917 {
5918 lhs = fold_convert_loc (loc, rntype, lhs);
5919 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5920 type = rntype;
5921 }
5922 else if (lnbitsize < rnbitsize)
5923 {
5924 rhs = fold_convert_loc (loc, lntype, rhs);
5925 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5926 type = lntype;
5927 }
5928 }
5929
5930 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5931 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5932
5933 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5934 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5935
5936 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5937 }
5938
5939 return 0;
5940 }
5941
5942 /* Handle the case of comparisons with constants. If there is something in
5943 common between the masks, those bits of the constants must be the same.
5944 If not, the condition is always false. Test for this to avoid generating
5945 incorrect code below. */
5946 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5947 if (! integer_zerop (result)
5948 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5949 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5950 {
5951 if (wanted_code == NE_EXPR)
5952 {
5953 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5954 return constant_boolean_node (true, truth_type);
5955 }
5956 else
5957 {
5958 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5959 return constant_boolean_node (false, truth_type);
5960 }
5961 }
5962
5963 if (lnbitpos < 0)
5964 return 0;
5965
5966 /* Construct the expression we will return. First get the component
5967 reference we will make. Unless the mask is all ones the width of
5968 that field, perform the mask operation. Then compare with the
5969 merged constant. */
5970 result = make_bit_field_ref (loc, ll_inner, ll_arg,
5971 lntype, lnbitsize, lnbitpos,
5972 ll_unsignedp || rl_unsignedp, ll_reversep);
5973
5974 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5975 if (! all_ones_mask_p (ll_mask, lnbitsize))
5976 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5977
5978 return build2_loc (loc, wanted_code, truth_type, result,
5979 const_binop (BIT_IOR_EXPR, l_const, r_const));
5980 }
5981 \f
5982 /* T is an integer expression that is being multiplied, divided, or taken a
5983 modulus (CODE says which and what kind of divide or modulus) by a
5984 constant C. See if we can eliminate that operation by folding it with
5985 other operations already in T. WIDE_TYPE, if non-null, is a type that
5986 should be used for the computation if wider than our type.
5987
5988 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5989 (X * 2) + (Y * 4). We must, however, be assured that either the original
5990 expression would not overflow or that overflow is undefined for the type
5991 in the language in question.
5992
5993 If we return a non-null expression, it is an equivalent form of the
5994 original computation, but need not be in the original type.
5995
5996 We set *STRICT_OVERFLOW_P to true if the return values depends on
5997 signed overflow being undefined. Otherwise we do not change
5998 *STRICT_OVERFLOW_P. */
5999
6000 static tree
6001 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6002 bool *strict_overflow_p)
6003 {
6004 /* To avoid exponential search depth, refuse to allow recursion past
6005 three levels. Beyond that (1) it's highly unlikely that we'll find
6006 something interesting and (2) we've probably processed it before
6007 when we built the inner expression. */
6008
6009 static int depth;
6010 tree ret;
6011
6012 if (depth > 3)
6013 return NULL;
6014
6015 depth++;
6016 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6017 depth--;
6018
6019 return ret;
6020 }
6021
6022 static tree
6023 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6024 bool *strict_overflow_p)
6025 {
6026 tree type = TREE_TYPE (t);
6027 enum tree_code tcode = TREE_CODE (t);
6028 tree ctype = (wide_type != 0
6029 && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (wide_type))
6030 > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type)))
6031 ? wide_type : type);
6032 tree t1, t2;
6033 int same_p = tcode == code;
6034 tree op0 = NULL_TREE, op1 = NULL_TREE;
6035 bool sub_strict_overflow_p;
6036
6037 /* Don't deal with constants of zero here; they confuse the code below. */
6038 if (integer_zerop (c))
6039 return NULL_TREE;
6040
6041 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6042 op0 = TREE_OPERAND (t, 0);
6043
6044 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6045 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6046
6047 /* Note that we need not handle conditional operations here since fold
6048 already handles those cases. So just do arithmetic here. */
6049 switch (tcode)
6050 {
6051 case INTEGER_CST:
6052 /* For a constant, we can always simplify if we are a multiply
6053 or (for divide and modulus) if it is a multiple of our constant. */
6054 if (code == MULT_EXPR
6055 || wi::multiple_of_p (wi::to_wide (t), wi::to_wide (c),
6056 TYPE_SIGN (type)))
6057 {
6058 tree tem = const_binop (code, fold_convert (ctype, t),
6059 fold_convert (ctype, c));
6060 /* If the multiplication overflowed, we lost information on it.
6061 See PR68142 and PR69845. */
6062 if (TREE_OVERFLOW (tem))
6063 return NULL_TREE;
6064 return tem;
6065 }
6066 break;
6067
6068 CASE_CONVERT: case NON_LVALUE_EXPR:
6069 /* If op0 is an expression ... */
6070 if ((COMPARISON_CLASS_P (op0)
6071 || UNARY_CLASS_P (op0)
6072 || BINARY_CLASS_P (op0)
6073 || VL_EXP_CLASS_P (op0)
6074 || EXPRESSION_CLASS_P (op0))
6075 /* ... and has wrapping overflow, and its type is smaller
6076 than ctype, then we cannot pass through as widening. */
6077 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6078 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0)))
6079 && (TYPE_PRECISION (ctype)
6080 > TYPE_PRECISION (TREE_TYPE (op0))))
6081 /* ... or this is a truncation (t is narrower than op0),
6082 then we cannot pass through this narrowing. */
6083 || (TYPE_PRECISION (type)
6084 < TYPE_PRECISION (TREE_TYPE (op0)))
6085 /* ... or signedness changes for division or modulus,
6086 then we cannot pass through this conversion. */
6087 || (code != MULT_EXPR
6088 && (TYPE_UNSIGNED (ctype)
6089 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6090 /* ... or has undefined overflow while the converted to
6091 type has not, we cannot do the operation in the inner type
6092 as that would introduce undefined overflow. */
6093 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6094 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0)))
6095 && !TYPE_OVERFLOW_UNDEFINED (type))))
6096 break;
6097
6098 /* Pass the constant down and see if we can make a simplification. If
6099 we can, replace this expression with the inner simplification for
6100 possible later conversion to our or some other type. */
6101 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6102 && TREE_CODE (t2) == INTEGER_CST
6103 && !TREE_OVERFLOW (t2)
6104 && (0 != (t1 = extract_muldiv (op0, t2, code,
6105 code == MULT_EXPR
6106 ? ctype : NULL_TREE,
6107 strict_overflow_p))))
6108 return t1;
6109 break;
6110
6111 case ABS_EXPR:
6112 /* If widening the type changes it from signed to unsigned, then we
6113 must avoid building ABS_EXPR itself as unsigned. */
6114 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6115 {
6116 tree cstype = (*signed_type_for) (ctype);
6117 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6118 != 0)
6119 {
6120 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6121 return fold_convert (ctype, t1);
6122 }
6123 break;
6124 }
6125 /* If the constant is negative, we cannot simplify this. */
6126 if (tree_int_cst_sgn (c) == -1)
6127 break;
6128 /* FALLTHROUGH */
6129 case NEGATE_EXPR:
6130 /* For division and modulus, type can't be unsigned, as e.g.
6131 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6132 For signed types, even with wrapping overflow, this is fine. */
6133 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6134 break;
6135 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6136 != 0)
6137 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6138 break;
6139
6140 case MIN_EXPR: case MAX_EXPR:
6141 /* If widening the type changes the signedness, then we can't perform
6142 this optimization as that changes the result. */
6143 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6144 break;
6145
6146 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6147 sub_strict_overflow_p = false;
6148 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6149 &sub_strict_overflow_p)) != 0
6150 && (t2 = extract_muldiv (op1, c, code, wide_type,
6151 &sub_strict_overflow_p)) != 0)
6152 {
6153 if (tree_int_cst_sgn (c) < 0)
6154 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6155 if (sub_strict_overflow_p)
6156 *strict_overflow_p = true;
6157 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6158 fold_convert (ctype, t2));
6159 }
6160 break;
6161
6162 case LSHIFT_EXPR: case RSHIFT_EXPR:
6163 /* If the second operand is constant, this is a multiplication
6164 or floor division, by a power of two, so we can treat it that
6165 way unless the multiplier or divisor overflows. Signed
6166 left-shift overflow is implementation-defined rather than
6167 undefined in C90, so do not convert signed left shift into
6168 multiplication. */
6169 if (TREE_CODE (op1) == INTEGER_CST
6170 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6171 /* const_binop may not detect overflow correctly,
6172 so check for it explicitly here. */
6173 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
6174 wi::to_wide (op1))
6175 && 0 != (t1 = fold_convert (ctype,
6176 const_binop (LSHIFT_EXPR,
6177 size_one_node,
6178 op1)))
6179 && !TREE_OVERFLOW (t1))
6180 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6181 ? MULT_EXPR : FLOOR_DIV_EXPR,
6182 ctype,
6183 fold_convert (ctype, op0),
6184 t1),
6185 c, code, wide_type, strict_overflow_p);
6186 break;
6187
6188 case PLUS_EXPR: case MINUS_EXPR:
6189 /* See if we can eliminate the operation on both sides. If we can, we
6190 can return a new PLUS or MINUS. If we can't, the only remaining
6191 cases where we can do anything are if the second operand is a
6192 constant. */
6193 sub_strict_overflow_p = false;
6194 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6195 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6196 if (t1 != 0 && t2 != 0
6197 && TYPE_OVERFLOW_WRAPS (ctype)
6198 && (code == MULT_EXPR
6199 /* If not multiplication, we can only do this if both operands
6200 are divisible by c. */
6201 || (multiple_of_p (ctype, op0, c)
6202 && multiple_of_p (ctype, op1, c))))
6203 {
6204 if (sub_strict_overflow_p)
6205 *strict_overflow_p = true;
6206 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6207 fold_convert (ctype, t2));
6208 }
6209
6210 /* If this was a subtraction, negate OP1 and set it to be an addition.
6211 This simplifies the logic below. */
6212 if (tcode == MINUS_EXPR)
6213 {
6214 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6215 /* If OP1 was not easily negatable, the constant may be OP0. */
6216 if (TREE_CODE (op0) == INTEGER_CST)
6217 {
6218 std::swap (op0, op1);
6219 std::swap (t1, t2);
6220 }
6221 }
6222
6223 if (TREE_CODE (op1) != INTEGER_CST)
6224 break;
6225
6226 /* If either OP1 or C are negative, this optimization is not safe for
6227 some of the division and remainder types while for others we need
6228 to change the code. */
6229 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6230 {
6231 if (code == CEIL_DIV_EXPR)
6232 code = FLOOR_DIV_EXPR;
6233 else if (code == FLOOR_DIV_EXPR)
6234 code = CEIL_DIV_EXPR;
6235 else if (code != MULT_EXPR
6236 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6237 break;
6238 }
6239
6240 /* If it's a multiply or a division/modulus operation of a multiple
6241 of our constant, do the operation and verify it doesn't overflow. */
6242 if (code == MULT_EXPR
6243 || wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6244 TYPE_SIGN (type)))
6245 {
6246 op1 = const_binop (code, fold_convert (ctype, op1),
6247 fold_convert (ctype, c));
6248 /* We allow the constant to overflow with wrapping semantics. */
6249 if (op1 == 0
6250 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6251 break;
6252 }
6253 else
6254 break;
6255
6256 /* If we have an unsigned type, we cannot widen the operation since it
6257 will change the result if the original computation overflowed. */
6258 if (TYPE_UNSIGNED (ctype) && ctype != type)
6259 break;
6260
6261 /* The last case is if we are a multiply. In that case, we can
6262 apply the distributive law to commute the multiply and addition
6263 if the multiplication of the constants doesn't overflow
6264 and overflow is defined. With undefined overflow
6265 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
6266 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
6267 return fold_build2 (tcode, ctype,
6268 fold_build2 (code, ctype,
6269 fold_convert (ctype, op0),
6270 fold_convert (ctype, c)),
6271 op1);
6272
6273 break;
6274
6275 case MULT_EXPR:
6276 /* We have a special case here if we are doing something like
6277 (C * 8) % 4 since we know that's zero. */
6278 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6279 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6280 /* If the multiplication can overflow we cannot optimize this. */
6281 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6282 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6283 && wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6284 TYPE_SIGN (type)))
6285 {
6286 *strict_overflow_p = true;
6287 return omit_one_operand (type, integer_zero_node, op0);
6288 }
6289
6290 /* ... fall through ... */
6291
6292 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6293 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6294 /* If we can extract our operation from the LHS, do so and return a
6295 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6296 do something only if the second operand is a constant. */
6297 if (same_p
6298 && TYPE_OVERFLOW_WRAPS (ctype)
6299 && (t1 = extract_muldiv (op0, c, code, wide_type,
6300 strict_overflow_p)) != 0)
6301 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6302 fold_convert (ctype, op1));
6303 else if (tcode == MULT_EXPR && code == MULT_EXPR
6304 && TYPE_OVERFLOW_WRAPS (ctype)
6305 && (t1 = extract_muldiv (op1, c, code, wide_type,
6306 strict_overflow_p)) != 0)
6307 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6308 fold_convert (ctype, t1));
6309 else if (TREE_CODE (op1) != INTEGER_CST)
6310 return 0;
6311
6312 /* If these are the same operation types, we can associate them
6313 assuming no overflow. */
6314 if (tcode == code)
6315 {
6316 bool overflow_p = false;
6317 bool overflow_mul_p;
6318 signop sign = TYPE_SIGN (ctype);
6319 unsigned prec = TYPE_PRECISION (ctype);
6320 wide_int mul = wi::mul (wi::to_wide (op1, prec),
6321 wi::to_wide (c, prec),
6322 sign, &overflow_mul_p);
6323 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6324 if (overflow_mul_p
6325 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6326 overflow_p = true;
6327 if (!overflow_p)
6328 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6329 wide_int_to_tree (ctype, mul));
6330 }
6331
6332 /* If these operations "cancel" each other, we have the main
6333 optimizations of this pass, which occur when either constant is a
6334 multiple of the other, in which case we replace this with either an
6335 operation or CODE or TCODE.
6336
6337 If we have an unsigned type, we cannot do this since it will change
6338 the result if the original computation overflowed. */
6339 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6340 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6341 || (tcode == MULT_EXPR
6342 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6343 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6344 && code != MULT_EXPR)))
6345 {
6346 if (wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6347 TYPE_SIGN (type)))
6348 {
6349 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6350 *strict_overflow_p = true;
6351 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6352 fold_convert (ctype,
6353 const_binop (TRUNC_DIV_EXPR,
6354 op1, c)));
6355 }
6356 else if (wi::multiple_of_p (wi::to_wide (c), wi::to_wide (op1),
6357 TYPE_SIGN (type)))
6358 {
6359 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6360 *strict_overflow_p = true;
6361 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6362 fold_convert (ctype,
6363 const_binop (TRUNC_DIV_EXPR,
6364 c, op1)));
6365 }
6366 }
6367 break;
6368
6369 default:
6370 break;
6371 }
6372
6373 return 0;
6374 }
6375 \f
6376 /* Return a node which has the indicated constant VALUE (either 0 or
6377 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6378 and is of the indicated TYPE. */
6379
6380 tree
6381 constant_boolean_node (bool value, tree type)
6382 {
6383 if (type == integer_type_node)
6384 return value ? integer_one_node : integer_zero_node;
6385 else if (type == boolean_type_node)
6386 return value ? boolean_true_node : boolean_false_node;
6387 else if (TREE_CODE (type) == VECTOR_TYPE)
6388 return build_vector_from_val (type,
6389 build_int_cst (TREE_TYPE (type),
6390 value ? -1 : 0));
6391 else
6392 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6393 }
6394
6395
6396 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6397 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6398 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6399 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6400 COND is the first argument to CODE; otherwise (as in the example
6401 given here), it is the second argument. TYPE is the type of the
6402 original expression. Return NULL_TREE if no simplification is
6403 possible. */
6404
6405 static tree
6406 fold_binary_op_with_conditional_arg (location_t loc,
6407 enum tree_code code,
6408 tree type, tree op0, tree op1,
6409 tree cond, tree arg, int cond_first_p)
6410 {
6411 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6412 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6413 tree test, true_value, false_value;
6414 tree lhs = NULL_TREE;
6415 tree rhs = NULL_TREE;
6416 enum tree_code cond_code = COND_EXPR;
6417
6418 if (TREE_CODE (cond) == COND_EXPR
6419 || TREE_CODE (cond) == VEC_COND_EXPR)
6420 {
6421 test = TREE_OPERAND (cond, 0);
6422 true_value = TREE_OPERAND (cond, 1);
6423 false_value = TREE_OPERAND (cond, 2);
6424 /* If this operand throws an expression, then it does not make
6425 sense to try to perform a logical or arithmetic operation
6426 involving it. */
6427 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6428 lhs = true_value;
6429 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6430 rhs = false_value;
6431 }
6432 else if (!(TREE_CODE (type) != VECTOR_TYPE
6433 && TREE_CODE (TREE_TYPE (cond)) == VECTOR_TYPE))
6434 {
6435 tree testtype = TREE_TYPE (cond);
6436 test = cond;
6437 true_value = constant_boolean_node (true, testtype);
6438 false_value = constant_boolean_node (false, testtype);
6439 }
6440 else
6441 /* Detect the case of mixing vector and scalar types - bail out. */
6442 return NULL_TREE;
6443
6444 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6445 cond_code = VEC_COND_EXPR;
6446
6447 /* This transformation is only worthwhile if we don't have to wrap ARG
6448 in a SAVE_EXPR and the operation can be simplified without recursing
6449 on at least one of the branches once its pushed inside the COND_EXPR. */
6450 if (!TREE_CONSTANT (arg)
6451 && (TREE_SIDE_EFFECTS (arg)
6452 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6453 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6454 return NULL_TREE;
6455
6456 arg = fold_convert_loc (loc, arg_type, arg);
6457 if (lhs == 0)
6458 {
6459 true_value = fold_convert_loc (loc, cond_type, true_value);
6460 if (cond_first_p)
6461 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6462 else
6463 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6464 }
6465 if (rhs == 0)
6466 {
6467 false_value = fold_convert_loc (loc, cond_type, false_value);
6468 if (cond_first_p)
6469 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6470 else
6471 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6472 }
6473
6474 /* Check that we have simplified at least one of the branches. */
6475 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6476 return NULL_TREE;
6477
6478 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6479 }
6480
6481 \f
6482 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6483
6484 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6485 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6486 ADDEND is the same as X.
6487
6488 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6489 and finite. The problematic cases are when X is zero, and its mode
6490 has signed zeros. In the case of rounding towards -infinity,
6491 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6492 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6493
6494 bool
6495 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6496 {
6497 if (!real_zerop (addend))
6498 return false;
6499
6500 /* Don't allow the fold with -fsignaling-nans. */
6501 if (HONOR_SNANS (element_mode (type)))
6502 return false;
6503
6504 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6505 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
6506 return true;
6507
6508 /* In a vector or complex, we would need to check the sign of all zeros. */
6509 if (TREE_CODE (addend) != REAL_CST)
6510 return false;
6511
6512 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6513 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6514 negate = !negate;
6515
6516 /* The mode has signed zeros, and we have to honor their sign.
6517 In this situation, there is only one case we can return true for.
6518 X - 0 is the same as X unless rounding towards -infinity is
6519 supported. */
6520 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type));
6521 }
6522
6523 /* Subroutine of match.pd that optimizes comparisons of a division by
6524 a nonzero integer constant against an integer constant, i.e.
6525 X/C1 op C2.
6526
6527 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6528 GE_EXPR or LE_EXPR. ARG01 and ARG1 must be a INTEGER_CST. */
6529
6530 enum tree_code
6531 fold_div_compare (enum tree_code code, tree c1, tree c2, tree *lo,
6532 tree *hi, bool *neg_overflow)
6533 {
6534 tree prod, tmp, type = TREE_TYPE (c1);
6535 signop sign = TYPE_SIGN (type);
6536 bool overflow;
6537
6538 /* We have to do this the hard way to detect unsigned overflow.
6539 prod = int_const_binop (MULT_EXPR, c1, c2); */
6540 wide_int val = wi::mul (wi::to_wide (c1), wi::to_wide (c2), sign, &overflow);
6541 prod = force_fit_type (type, val, -1, overflow);
6542 *neg_overflow = false;
6543
6544 if (sign == UNSIGNED)
6545 {
6546 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
6547 *lo = prod;
6548
6549 /* Likewise *hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6550 val = wi::add (wi::to_wide (prod), wi::to_wide (tmp), sign, &overflow);
6551 *hi = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (prod));
6552 }
6553 else if (tree_int_cst_sgn (c1) >= 0)
6554 {
6555 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
6556 switch (tree_int_cst_sgn (c2))
6557 {
6558 case -1:
6559 *neg_overflow = true;
6560 *lo = int_const_binop (MINUS_EXPR, prod, tmp);
6561 *hi = prod;
6562 break;
6563
6564 case 0:
6565 *lo = fold_negate_const (tmp, type);
6566 *hi = tmp;
6567 break;
6568
6569 case 1:
6570 *hi = int_const_binop (PLUS_EXPR, prod, tmp);
6571 *lo = prod;
6572 break;
6573
6574 default:
6575 gcc_unreachable ();
6576 }
6577 }
6578 else
6579 {
6580 /* A negative divisor reverses the relational operators. */
6581 code = swap_tree_comparison (code);
6582
6583 tmp = int_const_binop (PLUS_EXPR, c1, build_int_cst (type, 1));
6584 switch (tree_int_cst_sgn (c2))
6585 {
6586 case -1:
6587 *hi = int_const_binop (MINUS_EXPR, prod, tmp);
6588 *lo = prod;
6589 break;
6590
6591 case 0:
6592 *hi = fold_negate_const (tmp, type);
6593 *lo = tmp;
6594 break;
6595
6596 case 1:
6597 *neg_overflow = true;
6598 *lo = int_const_binop (PLUS_EXPR, prod, tmp);
6599 *hi = prod;
6600 break;
6601
6602 default:
6603 gcc_unreachable ();
6604 }
6605 }
6606
6607 if (code != EQ_EXPR && code != NE_EXPR)
6608 return code;
6609
6610 if (TREE_OVERFLOW (*lo)
6611 || operand_equal_p (*lo, TYPE_MIN_VALUE (type), 0))
6612 *lo = NULL_TREE;
6613 if (TREE_OVERFLOW (*hi)
6614 || operand_equal_p (*hi, TYPE_MAX_VALUE (type), 0))
6615 *hi = NULL_TREE;
6616
6617 return code;
6618 }
6619
6620
6621 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6622 equality/inequality test, then return a simplified form of the test
6623 using a sign testing. Otherwise return NULL. TYPE is the desired
6624 result type. */
6625
6626 static tree
6627 fold_single_bit_test_into_sign_test (location_t loc,
6628 enum tree_code code, tree arg0, tree arg1,
6629 tree result_type)
6630 {
6631 /* If this is testing a single bit, we can optimize the test. */
6632 if ((code == NE_EXPR || code == EQ_EXPR)
6633 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6634 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6635 {
6636 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6637 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6638 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6639
6640 if (arg00 != NULL_TREE
6641 /* This is only a win if casting to a signed type is cheap,
6642 i.e. when arg00's type is not a partial mode. */
6643 && type_has_mode_precision_p (TREE_TYPE (arg00)))
6644 {
6645 tree stype = signed_type_for (TREE_TYPE (arg00));
6646 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6647 result_type,
6648 fold_convert_loc (loc, stype, arg00),
6649 build_int_cst (stype, 0));
6650 }
6651 }
6652
6653 return NULL_TREE;
6654 }
6655
6656 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6657 equality/inequality test, then return a simplified form of
6658 the test using shifts and logical operations. Otherwise return
6659 NULL. TYPE is the desired result type. */
6660
6661 tree
6662 fold_single_bit_test (location_t loc, enum tree_code code,
6663 tree arg0, tree arg1, tree result_type)
6664 {
6665 /* If this is testing a single bit, we can optimize the test. */
6666 if ((code == NE_EXPR || code == EQ_EXPR)
6667 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6668 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6669 {
6670 tree inner = TREE_OPERAND (arg0, 0);
6671 tree type = TREE_TYPE (arg0);
6672 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6673 scalar_int_mode operand_mode = SCALAR_INT_TYPE_MODE (type);
6674 int ops_unsigned;
6675 tree signed_type, unsigned_type, intermediate_type;
6676 tree tem, one;
6677
6678 /* First, see if we can fold the single bit test into a sign-bit
6679 test. */
6680 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6681 result_type);
6682 if (tem)
6683 return tem;
6684
6685 /* Otherwise we have (A & C) != 0 where C is a single bit,
6686 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6687 Similarly for (A & C) == 0. */
6688
6689 /* If INNER is a right shift of a constant and it plus BITNUM does
6690 not overflow, adjust BITNUM and INNER. */
6691 if (TREE_CODE (inner) == RSHIFT_EXPR
6692 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6693 && bitnum < TYPE_PRECISION (type)
6694 && wi::ltu_p (wi::to_wide (TREE_OPERAND (inner, 1)),
6695 TYPE_PRECISION (type) - bitnum))
6696 {
6697 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6698 inner = TREE_OPERAND (inner, 0);
6699 }
6700
6701 /* If we are going to be able to omit the AND below, we must do our
6702 operations as unsigned. If we must use the AND, we have a choice.
6703 Normally unsigned is faster, but for some machines signed is. */
6704 ops_unsigned = (load_extend_op (operand_mode) == SIGN_EXTEND
6705 && !flag_syntax_only) ? 0 : 1;
6706
6707 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6708 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6709 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6710 inner = fold_convert_loc (loc, intermediate_type, inner);
6711
6712 if (bitnum != 0)
6713 inner = build2 (RSHIFT_EXPR, intermediate_type,
6714 inner, size_int (bitnum));
6715
6716 one = build_int_cst (intermediate_type, 1);
6717
6718 if (code == EQ_EXPR)
6719 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6720
6721 /* Put the AND last so it can combine with more things. */
6722 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6723
6724 /* Make sure to return the proper type. */
6725 inner = fold_convert_loc (loc, result_type, inner);
6726
6727 return inner;
6728 }
6729 return NULL_TREE;
6730 }
6731
6732 /* Test whether it is preferable two swap two operands, ARG0 and
6733 ARG1, for example because ARG0 is an integer constant and ARG1
6734 isn't. */
6735
6736 bool
6737 tree_swap_operands_p (const_tree arg0, const_tree arg1)
6738 {
6739 if (CONSTANT_CLASS_P (arg1))
6740 return 0;
6741 if (CONSTANT_CLASS_P (arg0))
6742 return 1;
6743
6744 STRIP_NOPS (arg0);
6745 STRIP_NOPS (arg1);
6746
6747 if (TREE_CONSTANT (arg1))
6748 return 0;
6749 if (TREE_CONSTANT (arg0))
6750 return 1;
6751
6752 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6753 for commutative and comparison operators. Ensuring a canonical
6754 form allows the optimizers to find additional redundancies without
6755 having to explicitly check for both orderings. */
6756 if (TREE_CODE (arg0) == SSA_NAME
6757 && TREE_CODE (arg1) == SSA_NAME
6758 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6759 return 1;
6760
6761 /* Put SSA_NAMEs last. */
6762 if (TREE_CODE (arg1) == SSA_NAME)
6763 return 0;
6764 if (TREE_CODE (arg0) == SSA_NAME)
6765 return 1;
6766
6767 /* Put variables last. */
6768 if (DECL_P (arg1))
6769 return 0;
6770 if (DECL_P (arg0))
6771 return 1;
6772
6773 return 0;
6774 }
6775
6776
6777 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6778 means A >= Y && A != MAX, but in this case we know that
6779 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6780
6781 static tree
6782 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6783 {
6784 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6785
6786 if (TREE_CODE (bound) == LT_EXPR)
6787 a = TREE_OPERAND (bound, 0);
6788 else if (TREE_CODE (bound) == GT_EXPR)
6789 a = TREE_OPERAND (bound, 1);
6790 else
6791 return NULL_TREE;
6792
6793 typea = TREE_TYPE (a);
6794 if (!INTEGRAL_TYPE_P (typea)
6795 && !POINTER_TYPE_P (typea))
6796 return NULL_TREE;
6797
6798 if (TREE_CODE (ineq) == LT_EXPR)
6799 {
6800 a1 = TREE_OPERAND (ineq, 1);
6801 y = TREE_OPERAND (ineq, 0);
6802 }
6803 else if (TREE_CODE (ineq) == GT_EXPR)
6804 {
6805 a1 = TREE_OPERAND (ineq, 0);
6806 y = TREE_OPERAND (ineq, 1);
6807 }
6808 else
6809 return NULL_TREE;
6810
6811 if (TREE_TYPE (a1) != typea)
6812 return NULL_TREE;
6813
6814 if (POINTER_TYPE_P (typea))
6815 {
6816 /* Convert the pointer types into integer before taking the difference. */
6817 tree ta = fold_convert_loc (loc, ssizetype, a);
6818 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
6819 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
6820 }
6821 else
6822 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
6823
6824 if (!diff || !integer_onep (diff))
6825 return NULL_TREE;
6826
6827 return fold_build2_loc (loc, GE_EXPR, type, a, y);
6828 }
6829
6830 /* Fold a sum or difference of at least one multiplication.
6831 Returns the folded tree or NULL if no simplification could be made. */
6832
6833 static tree
6834 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
6835 tree arg0, tree arg1)
6836 {
6837 tree arg00, arg01, arg10, arg11;
6838 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6839
6840 /* (A * C) +- (B * C) -> (A+-B) * C.
6841 (A * C) +- A -> A * (C+-1).
6842 We are most concerned about the case where C is a constant,
6843 but other combinations show up during loop reduction. Since
6844 it is not difficult, try all four possibilities. */
6845
6846 if (TREE_CODE (arg0) == MULT_EXPR)
6847 {
6848 arg00 = TREE_OPERAND (arg0, 0);
6849 arg01 = TREE_OPERAND (arg0, 1);
6850 }
6851 else if (TREE_CODE (arg0) == INTEGER_CST)
6852 {
6853 arg00 = build_one_cst (type);
6854 arg01 = arg0;
6855 }
6856 else
6857 {
6858 /* We cannot generate constant 1 for fract. */
6859 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6860 return NULL_TREE;
6861 arg00 = arg0;
6862 arg01 = build_one_cst (type);
6863 }
6864 if (TREE_CODE (arg1) == MULT_EXPR)
6865 {
6866 arg10 = TREE_OPERAND (arg1, 0);
6867 arg11 = TREE_OPERAND (arg1, 1);
6868 }
6869 else if (TREE_CODE (arg1) == INTEGER_CST)
6870 {
6871 arg10 = build_one_cst (type);
6872 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
6873 the purpose of this canonicalization. */
6874 if (wi::neg_p (wi::to_wide (arg1), TYPE_SIGN (TREE_TYPE (arg1)))
6875 && negate_expr_p (arg1)
6876 && code == PLUS_EXPR)
6877 {
6878 arg11 = negate_expr (arg1);
6879 code = MINUS_EXPR;
6880 }
6881 else
6882 arg11 = arg1;
6883 }
6884 else
6885 {
6886 /* We cannot generate constant 1 for fract. */
6887 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6888 return NULL_TREE;
6889 arg10 = arg1;
6890 arg11 = build_one_cst (type);
6891 }
6892 same = NULL_TREE;
6893
6894 /* Prefer factoring a common non-constant. */
6895 if (operand_equal_p (arg00, arg10, 0))
6896 same = arg00, alt0 = arg01, alt1 = arg11;
6897 else if (operand_equal_p (arg01, arg11, 0))
6898 same = arg01, alt0 = arg00, alt1 = arg10;
6899 else if (operand_equal_p (arg00, arg11, 0))
6900 same = arg00, alt0 = arg01, alt1 = arg10;
6901 else if (operand_equal_p (arg01, arg10, 0))
6902 same = arg01, alt0 = arg00, alt1 = arg11;
6903
6904 /* No identical multiplicands; see if we can find a common
6905 power-of-two factor in non-power-of-two multiplies. This
6906 can help in multi-dimensional array access. */
6907 else if (tree_fits_shwi_p (arg01)
6908 && tree_fits_shwi_p (arg11))
6909 {
6910 HOST_WIDE_INT int01, int11, tmp;
6911 bool swap = false;
6912 tree maybe_same;
6913 int01 = tree_to_shwi (arg01);
6914 int11 = tree_to_shwi (arg11);
6915
6916 /* Move min of absolute values to int11. */
6917 if (absu_hwi (int01) < absu_hwi (int11))
6918 {
6919 tmp = int01, int01 = int11, int11 = tmp;
6920 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6921 maybe_same = arg01;
6922 swap = true;
6923 }
6924 else
6925 maybe_same = arg11;
6926
6927 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
6928 /* The remainder should not be a constant, otherwise we
6929 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
6930 increased the number of multiplications necessary. */
6931 && TREE_CODE (arg10) != INTEGER_CST)
6932 {
6933 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
6934 build_int_cst (TREE_TYPE (arg00),
6935 int01 / int11));
6936 alt1 = arg10;
6937 same = maybe_same;
6938 if (swap)
6939 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
6940 }
6941 }
6942
6943 if (!same)
6944 return NULL_TREE;
6945
6946 if (! INTEGRAL_TYPE_P (type)
6947 || TYPE_OVERFLOW_WRAPS (type)
6948 /* We are neither factoring zero nor minus one. */
6949 || TREE_CODE (same) == INTEGER_CST)
6950 return fold_build2_loc (loc, MULT_EXPR, type,
6951 fold_build2_loc (loc, code, type,
6952 fold_convert_loc (loc, type, alt0),
6953 fold_convert_loc (loc, type, alt1)),
6954 fold_convert_loc (loc, type, same));
6955
6956 /* Same may be zero and thus the operation 'code' may overflow. Likewise
6957 same may be minus one and thus the multiplication may overflow. Perform
6958 the operations in an unsigned type. */
6959 tree utype = unsigned_type_for (type);
6960 tree tem = fold_build2_loc (loc, code, utype,
6961 fold_convert_loc (loc, utype, alt0),
6962 fold_convert_loc (loc, utype, alt1));
6963 /* If the sum evaluated to a constant that is not -INF the multiplication
6964 cannot overflow. */
6965 if (TREE_CODE (tem) == INTEGER_CST
6966 && (wi::to_wide (tem)
6967 != wi::min_value (TYPE_PRECISION (utype), SIGNED)))
6968 return fold_build2_loc (loc, MULT_EXPR, type,
6969 fold_convert (type, tem), same);
6970
6971 return fold_convert_loc (loc, type,
6972 fold_build2_loc (loc, MULT_EXPR, utype, tem,
6973 fold_convert_loc (loc, utype, same)));
6974 }
6975
6976 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
6977 specified by EXPR into the buffer PTR of length LEN bytes.
6978 Return the number of bytes placed in the buffer, or zero
6979 upon failure. */
6980
6981 static int
6982 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
6983 {
6984 tree type = TREE_TYPE (expr);
6985 int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
6986 int byte, offset, word, words;
6987 unsigned char value;
6988
6989 if ((off == -1 && total_bytes > len) || off >= total_bytes)
6990 return 0;
6991 if (off == -1)
6992 off = 0;
6993
6994 if (ptr == NULL)
6995 /* Dry run. */
6996 return MIN (len, total_bytes - off);
6997
6998 words = total_bytes / UNITS_PER_WORD;
6999
7000 for (byte = 0; byte < total_bytes; byte++)
7001 {
7002 int bitpos = byte * BITS_PER_UNIT;
7003 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7004 number of bytes. */
7005 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7006
7007 if (total_bytes > UNITS_PER_WORD)
7008 {
7009 word = byte / UNITS_PER_WORD;
7010 if (WORDS_BIG_ENDIAN)
7011 word = (words - 1) - word;
7012 offset = word * UNITS_PER_WORD;
7013 if (BYTES_BIG_ENDIAN)
7014 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7015 else
7016 offset += byte % UNITS_PER_WORD;
7017 }
7018 else
7019 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7020 if (offset >= off && offset - off < len)
7021 ptr[offset - off] = value;
7022 }
7023 return MIN (len, total_bytes - off);
7024 }
7025
7026
7027 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7028 specified by EXPR into the buffer PTR of length LEN bytes.
7029 Return the number of bytes placed in the buffer, or zero
7030 upon failure. */
7031
7032 static int
7033 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7034 {
7035 tree type = TREE_TYPE (expr);
7036 scalar_mode mode = SCALAR_TYPE_MODE (type);
7037 int total_bytes = GET_MODE_SIZE (mode);
7038 FIXED_VALUE_TYPE value;
7039 tree i_value, i_type;
7040
7041 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7042 return 0;
7043
7044 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7045
7046 if (NULL_TREE == i_type || TYPE_PRECISION (i_type) != total_bytes)
7047 return 0;
7048
7049 value = TREE_FIXED_CST (expr);
7050 i_value = double_int_to_tree (i_type, value.data);
7051
7052 return native_encode_int (i_value, ptr, len, off);
7053 }
7054
7055
7056 /* Subroutine of native_encode_expr. Encode the REAL_CST
7057 specified by EXPR into the buffer PTR of length LEN bytes.
7058 Return the number of bytes placed in the buffer, or zero
7059 upon failure. */
7060
7061 static int
7062 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7063 {
7064 tree type = TREE_TYPE (expr);
7065 int total_bytes = GET_MODE_SIZE (SCALAR_FLOAT_TYPE_MODE (type));
7066 int byte, offset, word, words, bitpos;
7067 unsigned char value;
7068
7069 /* There are always 32 bits in each long, no matter the size of
7070 the hosts long. We handle floating point representations with
7071 up to 192 bits. */
7072 long tmp[6];
7073
7074 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7075 return 0;
7076 if (off == -1)
7077 off = 0;
7078
7079 if (ptr == NULL)
7080 /* Dry run. */
7081 return MIN (len, total_bytes - off);
7082
7083 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7084
7085 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7086
7087 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7088 bitpos += BITS_PER_UNIT)
7089 {
7090 byte = (bitpos / BITS_PER_UNIT) & 3;
7091 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7092
7093 if (UNITS_PER_WORD < 4)
7094 {
7095 word = byte / UNITS_PER_WORD;
7096 if (WORDS_BIG_ENDIAN)
7097 word = (words - 1) - word;
7098 offset = word * UNITS_PER_WORD;
7099 if (BYTES_BIG_ENDIAN)
7100 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7101 else
7102 offset += byte % UNITS_PER_WORD;
7103 }
7104 else
7105 {
7106 offset = byte;
7107 if (BYTES_BIG_ENDIAN)
7108 {
7109 /* Reverse bytes within each long, or within the entire float
7110 if it's smaller than a long (for HFmode). */
7111 offset = MIN (3, total_bytes - 1) - offset;
7112 gcc_assert (offset >= 0);
7113 }
7114 }
7115 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7116 if (offset >= off
7117 && offset - off < len)
7118 ptr[offset - off] = value;
7119 }
7120 return MIN (len, total_bytes - off);
7121 }
7122
7123 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7124 specified by EXPR into the buffer PTR of length LEN bytes.
7125 Return the number of bytes placed in the buffer, or zero
7126 upon failure. */
7127
7128 static int
7129 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7130 {
7131 int rsize, isize;
7132 tree part;
7133
7134 part = TREE_REALPART (expr);
7135 rsize = native_encode_expr (part, ptr, len, off);
7136 if (off == -1 && rsize == 0)
7137 return 0;
7138 part = TREE_IMAGPART (expr);
7139 if (off != -1)
7140 off = MAX (0, off - GET_MODE_SIZE (SCALAR_TYPE_MODE (TREE_TYPE (part))));
7141 isize = native_encode_expr (part, ptr ? ptr + rsize : NULL,
7142 len - rsize, off);
7143 if (off == -1 && isize != rsize)
7144 return 0;
7145 return rsize + isize;
7146 }
7147
7148
7149 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7150 specified by EXPR into the buffer PTR of length LEN bytes.
7151 Return the number of bytes placed in the buffer, or zero
7152 upon failure. */
7153
7154 static int
7155 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7156 {
7157 unsigned i, count;
7158 int size, offset;
7159 tree itype, elem;
7160
7161 offset = 0;
7162 count = VECTOR_CST_NELTS (expr);
7163 itype = TREE_TYPE (TREE_TYPE (expr));
7164 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (itype));
7165 for (i = 0; i < count; i++)
7166 {
7167 if (off >= size)
7168 {
7169 off -= size;
7170 continue;
7171 }
7172 elem = VECTOR_CST_ELT (expr, i);
7173 int res = native_encode_expr (elem, ptr ? ptr + offset : NULL,
7174 len - offset, off);
7175 if ((off == -1 && res != size) || res == 0)
7176 return 0;
7177 offset += res;
7178 if (offset >= len)
7179 return offset;
7180 if (off != -1)
7181 off = 0;
7182 }
7183 return offset;
7184 }
7185
7186
7187 /* Subroutine of native_encode_expr. Encode the STRING_CST
7188 specified by EXPR into the buffer PTR of length LEN bytes.
7189 Return the number of bytes placed in the buffer, or zero
7190 upon failure. */
7191
7192 static int
7193 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7194 {
7195 tree type = TREE_TYPE (expr);
7196
7197 /* Wide-char strings are encoded in target byte-order so native
7198 encoding them is trivial. */
7199 if (BITS_PER_UNIT != CHAR_BIT
7200 || TREE_CODE (type) != ARRAY_TYPE
7201 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7202 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7203 return 0;
7204
7205 HOST_WIDE_INT total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
7206 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7207 return 0;
7208 if (off == -1)
7209 off = 0;
7210 if (ptr == NULL)
7211 /* Dry run. */;
7212 else if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7213 {
7214 int written = 0;
7215 if (off < TREE_STRING_LENGTH (expr))
7216 {
7217 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7218 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7219 }
7220 memset (ptr + written, 0,
7221 MIN (total_bytes - written, len - written));
7222 }
7223 else
7224 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7225 return MIN (total_bytes - off, len);
7226 }
7227
7228
7229 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7230 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7231 buffer PTR of length LEN bytes. If PTR is NULL, don't actually store
7232 anything, just do a dry run. If OFF is not -1 then start
7233 the encoding at byte offset OFF and encode at most LEN bytes.
7234 Return the number of bytes placed in the buffer, or zero upon failure. */
7235
7236 int
7237 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7238 {
7239 /* We don't support starting at negative offset and -1 is special. */
7240 if (off < -1)
7241 return 0;
7242
7243 switch (TREE_CODE (expr))
7244 {
7245 case INTEGER_CST:
7246 return native_encode_int (expr, ptr, len, off);
7247
7248 case REAL_CST:
7249 return native_encode_real (expr, ptr, len, off);
7250
7251 case FIXED_CST:
7252 return native_encode_fixed (expr, ptr, len, off);
7253
7254 case COMPLEX_CST:
7255 return native_encode_complex (expr, ptr, len, off);
7256
7257 case VECTOR_CST:
7258 return native_encode_vector (expr, ptr, len, off);
7259
7260 case STRING_CST:
7261 return native_encode_string (expr, ptr, len, off);
7262
7263 default:
7264 return 0;
7265 }
7266 }
7267
7268
7269 /* Subroutine of native_interpret_expr. Interpret the contents of
7270 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7271 If the buffer cannot be interpreted, return NULL_TREE. */
7272
7273 static tree
7274 native_interpret_int (tree type, const unsigned char *ptr, int len)
7275 {
7276 int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
7277
7278 if (total_bytes > len
7279 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7280 return NULL_TREE;
7281
7282 wide_int result = wi::from_buffer (ptr, total_bytes);
7283
7284 return wide_int_to_tree (type, result);
7285 }
7286
7287
7288 /* Subroutine of native_interpret_expr. Interpret the contents of
7289 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7290 If the buffer cannot be interpreted, return NULL_TREE. */
7291
7292 static tree
7293 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7294 {
7295 scalar_mode mode = SCALAR_TYPE_MODE (type);
7296 int total_bytes = GET_MODE_SIZE (mode);
7297 double_int result;
7298 FIXED_VALUE_TYPE fixed_value;
7299
7300 if (total_bytes > len
7301 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7302 return NULL_TREE;
7303
7304 result = double_int::from_buffer (ptr, total_bytes);
7305 fixed_value = fixed_from_double_int (result, mode);
7306
7307 return build_fixed (type, fixed_value);
7308 }
7309
7310
7311 /* Subroutine of native_interpret_expr. Interpret the contents of
7312 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7313 If the buffer cannot be interpreted, return NULL_TREE. */
7314
7315 static tree
7316 native_interpret_real (tree type, const unsigned char *ptr, int len)
7317 {
7318 scalar_float_mode mode = SCALAR_FLOAT_TYPE_MODE (type);
7319 int total_bytes = GET_MODE_SIZE (mode);
7320 unsigned char value;
7321 /* There are always 32 bits in each long, no matter the size of
7322 the hosts long. We handle floating point representations with
7323 up to 192 bits. */
7324 REAL_VALUE_TYPE r;
7325 long tmp[6];
7326
7327 if (total_bytes > len || total_bytes > 24)
7328 return NULL_TREE;
7329 int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7330
7331 memset (tmp, 0, sizeof (tmp));
7332 for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7333 bitpos += BITS_PER_UNIT)
7334 {
7335 /* Both OFFSET and BYTE index within a long;
7336 bitpos indexes the whole float. */
7337 int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
7338 if (UNITS_PER_WORD < 4)
7339 {
7340 int word = byte / UNITS_PER_WORD;
7341 if (WORDS_BIG_ENDIAN)
7342 word = (words - 1) - word;
7343 offset = word * UNITS_PER_WORD;
7344 if (BYTES_BIG_ENDIAN)
7345 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7346 else
7347 offset += byte % UNITS_PER_WORD;
7348 }
7349 else
7350 {
7351 offset = byte;
7352 if (BYTES_BIG_ENDIAN)
7353 {
7354 /* Reverse bytes within each long, or within the entire float
7355 if it's smaller than a long (for HFmode). */
7356 offset = MIN (3, total_bytes - 1) - offset;
7357 gcc_assert (offset >= 0);
7358 }
7359 }
7360 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7361
7362 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7363 }
7364
7365 real_from_target (&r, tmp, mode);
7366 return build_real (type, r);
7367 }
7368
7369
7370 /* Subroutine of native_interpret_expr. Interpret the contents of
7371 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7372 If the buffer cannot be interpreted, return NULL_TREE. */
7373
7374 static tree
7375 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7376 {
7377 tree etype, rpart, ipart;
7378 int size;
7379
7380 etype = TREE_TYPE (type);
7381 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
7382 if (size * 2 > len)
7383 return NULL_TREE;
7384 rpart = native_interpret_expr (etype, ptr, size);
7385 if (!rpart)
7386 return NULL_TREE;
7387 ipart = native_interpret_expr (etype, ptr+size, size);
7388 if (!ipart)
7389 return NULL_TREE;
7390 return build_complex (type, rpart, ipart);
7391 }
7392
7393
7394 /* Subroutine of native_interpret_expr. Interpret the contents of
7395 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7396 If the buffer cannot be interpreted, return NULL_TREE. */
7397
7398 static tree
7399 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7400 {
7401 tree etype, elem;
7402 int i, size, count;
7403
7404 etype = TREE_TYPE (type);
7405 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
7406 count = TYPE_VECTOR_SUBPARTS (type);
7407 if (size * count > len)
7408 return NULL_TREE;
7409
7410 auto_vec<tree, 32> elements (count);
7411 for (i = 0; i < count; ++i)
7412 {
7413 elem = native_interpret_expr (etype, ptr+(i*size), size);
7414 if (!elem)
7415 return NULL_TREE;
7416 elements.quick_push (elem);
7417 }
7418 return build_vector (type, elements);
7419 }
7420
7421
7422 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7423 the buffer PTR of length LEN as a constant of type TYPE. For
7424 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7425 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7426 return NULL_TREE. */
7427
7428 tree
7429 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7430 {
7431 switch (TREE_CODE (type))
7432 {
7433 case INTEGER_TYPE:
7434 case ENUMERAL_TYPE:
7435 case BOOLEAN_TYPE:
7436 case POINTER_TYPE:
7437 case REFERENCE_TYPE:
7438 return native_interpret_int (type, ptr, len);
7439
7440 case REAL_TYPE:
7441 return native_interpret_real (type, ptr, len);
7442
7443 case FIXED_POINT_TYPE:
7444 return native_interpret_fixed (type, ptr, len);
7445
7446 case COMPLEX_TYPE:
7447 return native_interpret_complex (type, ptr, len);
7448
7449 case VECTOR_TYPE:
7450 return native_interpret_vector (type, ptr, len);
7451
7452 default:
7453 return NULL_TREE;
7454 }
7455 }
7456
7457 /* Returns true if we can interpret the contents of a native encoding
7458 as TYPE. */
7459
7460 static bool
7461 can_native_interpret_type_p (tree type)
7462 {
7463 switch (TREE_CODE (type))
7464 {
7465 case INTEGER_TYPE:
7466 case ENUMERAL_TYPE:
7467 case BOOLEAN_TYPE:
7468 case POINTER_TYPE:
7469 case REFERENCE_TYPE:
7470 case FIXED_POINT_TYPE:
7471 case REAL_TYPE:
7472 case COMPLEX_TYPE:
7473 case VECTOR_TYPE:
7474 return true;
7475 default:
7476 return false;
7477 }
7478 }
7479
7480
7481 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7482 TYPE at compile-time. If we're unable to perform the conversion
7483 return NULL_TREE. */
7484
7485 static tree
7486 fold_view_convert_expr (tree type, tree expr)
7487 {
7488 /* We support up to 512-bit values (for V8DFmode). */
7489 unsigned char buffer[64];
7490 int len;
7491
7492 /* Check that the host and target are sane. */
7493 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7494 return NULL_TREE;
7495
7496 len = native_encode_expr (expr, buffer, sizeof (buffer));
7497 if (len == 0)
7498 return NULL_TREE;
7499
7500 return native_interpret_expr (type, buffer, len);
7501 }
7502
7503 /* Build an expression for the address of T. Folds away INDIRECT_REF
7504 to avoid confusing the gimplify process. */
7505
7506 tree
7507 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7508 {
7509 /* The size of the object is not relevant when talking about its address. */
7510 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7511 t = TREE_OPERAND (t, 0);
7512
7513 if (TREE_CODE (t) == INDIRECT_REF)
7514 {
7515 t = TREE_OPERAND (t, 0);
7516
7517 if (TREE_TYPE (t) != ptrtype)
7518 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7519 }
7520 else if (TREE_CODE (t) == MEM_REF
7521 && integer_zerop (TREE_OPERAND (t, 1)))
7522 return TREE_OPERAND (t, 0);
7523 else if (TREE_CODE (t) == MEM_REF
7524 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7525 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7526 TREE_OPERAND (t, 0),
7527 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7528 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7529 {
7530 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7531
7532 if (TREE_TYPE (t) != ptrtype)
7533 t = fold_convert_loc (loc, ptrtype, t);
7534 }
7535 else
7536 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7537
7538 return t;
7539 }
7540
7541 /* Build an expression for the address of T. */
7542
7543 tree
7544 build_fold_addr_expr_loc (location_t loc, tree t)
7545 {
7546 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7547
7548 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7549 }
7550
7551 /* Fold a unary expression of code CODE and type TYPE with operand
7552 OP0. Return the folded expression if folding is successful.
7553 Otherwise, return NULL_TREE. */
7554
7555 tree
7556 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7557 {
7558 tree tem;
7559 tree arg0;
7560 enum tree_code_class kind = TREE_CODE_CLASS (code);
7561
7562 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7563 && TREE_CODE_LENGTH (code) == 1);
7564
7565 arg0 = op0;
7566 if (arg0)
7567 {
7568 if (CONVERT_EXPR_CODE_P (code)
7569 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7570 {
7571 /* Don't use STRIP_NOPS, because signedness of argument type
7572 matters. */
7573 STRIP_SIGN_NOPS (arg0);
7574 }
7575 else
7576 {
7577 /* Strip any conversions that don't change the mode. This
7578 is safe for every expression, except for a comparison
7579 expression because its signedness is derived from its
7580 operands.
7581
7582 Note that this is done as an internal manipulation within
7583 the constant folder, in order to find the simplest
7584 representation of the arguments so that their form can be
7585 studied. In any cases, the appropriate type conversions
7586 should be put back in the tree that will get out of the
7587 constant folder. */
7588 STRIP_NOPS (arg0);
7589 }
7590
7591 if (CONSTANT_CLASS_P (arg0))
7592 {
7593 tree tem = const_unop (code, type, arg0);
7594 if (tem)
7595 {
7596 if (TREE_TYPE (tem) != type)
7597 tem = fold_convert_loc (loc, type, tem);
7598 return tem;
7599 }
7600 }
7601 }
7602
7603 tem = generic_simplify (loc, code, type, op0);
7604 if (tem)
7605 return tem;
7606
7607 if (TREE_CODE_CLASS (code) == tcc_unary)
7608 {
7609 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7610 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7611 fold_build1_loc (loc, code, type,
7612 fold_convert_loc (loc, TREE_TYPE (op0),
7613 TREE_OPERAND (arg0, 1))));
7614 else if (TREE_CODE (arg0) == COND_EXPR)
7615 {
7616 tree arg01 = TREE_OPERAND (arg0, 1);
7617 tree arg02 = TREE_OPERAND (arg0, 2);
7618 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7619 arg01 = fold_build1_loc (loc, code, type,
7620 fold_convert_loc (loc,
7621 TREE_TYPE (op0), arg01));
7622 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7623 arg02 = fold_build1_loc (loc, code, type,
7624 fold_convert_loc (loc,
7625 TREE_TYPE (op0), arg02));
7626 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7627 arg01, arg02);
7628
7629 /* If this was a conversion, and all we did was to move into
7630 inside the COND_EXPR, bring it back out. But leave it if
7631 it is a conversion from integer to integer and the
7632 result precision is no wider than a word since such a
7633 conversion is cheap and may be optimized away by combine,
7634 while it couldn't if it were outside the COND_EXPR. Then return
7635 so we don't get into an infinite recursion loop taking the
7636 conversion out and then back in. */
7637
7638 if ((CONVERT_EXPR_CODE_P (code)
7639 || code == NON_LVALUE_EXPR)
7640 && TREE_CODE (tem) == COND_EXPR
7641 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7642 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7643 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7644 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7645 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7646 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7647 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7648 && (INTEGRAL_TYPE_P
7649 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7650 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7651 || flag_syntax_only))
7652 tem = build1_loc (loc, code, type,
7653 build3 (COND_EXPR,
7654 TREE_TYPE (TREE_OPERAND
7655 (TREE_OPERAND (tem, 1), 0)),
7656 TREE_OPERAND (tem, 0),
7657 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7658 TREE_OPERAND (TREE_OPERAND (tem, 2),
7659 0)));
7660 return tem;
7661 }
7662 }
7663
7664 switch (code)
7665 {
7666 case NON_LVALUE_EXPR:
7667 if (!maybe_lvalue_p (op0))
7668 return fold_convert_loc (loc, type, op0);
7669 return NULL_TREE;
7670
7671 CASE_CONVERT:
7672 case FLOAT_EXPR:
7673 case FIX_TRUNC_EXPR:
7674 if (COMPARISON_CLASS_P (op0))
7675 {
7676 /* If we have (type) (a CMP b) and type is an integral type, return
7677 new expression involving the new type. Canonicalize
7678 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7679 non-integral type.
7680 Do not fold the result as that would not simplify further, also
7681 folding again results in recursions. */
7682 if (TREE_CODE (type) == BOOLEAN_TYPE)
7683 return build2_loc (loc, TREE_CODE (op0), type,
7684 TREE_OPERAND (op0, 0),
7685 TREE_OPERAND (op0, 1));
7686 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7687 && TREE_CODE (type) != VECTOR_TYPE)
7688 return build3_loc (loc, COND_EXPR, type, op0,
7689 constant_boolean_node (true, type),
7690 constant_boolean_node (false, type));
7691 }
7692
7693 /* Handle (T *)&A.B.C for A being of type T and B and C
7694 living at offset zero. This occurs frequently in
7695 C++ upcasting and then accessing the base. */
7696 if (TREE_CODE (op0) == ADDR_EXPR
7697 && POINTER_TYPE_P (type)
7698 && handled_component_p (TREE_OPERAND (op0, 0)))
7699 {
7700 HOST_WIDE_INT bitsize, bitpos;
7701 tree offset;
7702 machine_mode mode;
7703 int unsignedp, reversep, volatilep;
7704 tree base
7705 = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
7706 &offset, &mode, &unsignedp, &reversep,
7707 &volatilep);
7708 /* If the reference was to a (constant) zero offset, we can use
7709 the address of the base if it has the same base type
7710 as the result type and the pointer type is unqualified. */
7711 if (! offset && bitpos == 0
7712 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7713 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7714 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7715 return fold_convert_loc (loc, type,
7716 build_fold_addr_expr_loc (loc, base));
7717 }
7718
7719 if (TREE_CODE (op0) == MODIFY_EXPR
7720 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7721 /* Detect assigning a bitfield. */
7722 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7723 && DECL_BIT_FIELD
7724 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7725 {
7726 /* Don't leave an assignment inside a conversion
7727 unless assigning a bitfield. */
7728 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7729 /* First do the assignment, then return converted constant. */
7730 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7731 TREE_NO_WARNING (tem) = 1;
7732 TREE_USED (tem) = 1;
7733 return tem;
7734 }
7735
7736 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7737 constants (if x has signed type, the sign bit cannot be set
7738 in c). This folds extension into the BIT_AND_EXPR.
7739 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7740 very likely don't have maximal range for their precision and this
7741 transformation effectively doesn't preserve non-maximal ranges. */
7742 if (TREE_CODE (type) == INTEGER_TYPE
7743 && TREE_CODE (op0) == BIT_AND_EXPR
7744 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7745 {
7746 tree and_expr = op0;
7747 tree and0 = TREE_OPERAND (and_expr, 0);
7748 tree and1 = TREE_OPERAND (and_expr, 1);
7749 int change = 0;
7750
7751 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7752 || (TYPE_PRECISION (type)
7753 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7754 change = 1;
7755 else if (TYPE_PRECISION (TREE_TYPE (and1))
7756 <= HOST_BITS_PER_WIDE_INT
7757 && tree_fits_uhwi_p (and1))
7758 {
7759 unsigned HOST_WIDE_INT cst;
7760
7761 cst = tree_to_uhwi (and1);
7762 cst &= HOST_WIDE_INT_M1U
7763 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7764 change = (cst == 0);
7765 if (change
7766 && !flag_syntax_only
7767 && (load_extend_op (TYPE_MODE (TREE_TYPE (and0)))
7768 == ZERO_EXTEND))
7769 {
7770 tree uns = unsigned_type_for (TREE_TYPE (and0));
7771 and0 = fold_convert_loc (loc, uns, and0);
7772 and1 = fold_convert_loc (loc, uns, and1);
7773 }
7774 }
7775 if (change)
7776 {
7777 tem = force_fit_type (type, wi::to_widest (and1), 0,
7778 TREE_OVERFLOW (and1));
7779 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7780 fold_convert_loc (loc, type, and0), tem);
7781 }
7782 }
7783
7784 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
7785 cast (T1)X will fold away. We assume that this happens when X itself
7786 is a cast. */
7787 if (POINTER_TYPE_P (type)
7788 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7789 && CONVERT_EXPR_P (TREE_OPERAND (arg0, 0)))
7790 {
7791 tree arg00 = TREE_OPERAND (arg0, 0);
7792 tree arg01 = TREE_OPERAND (arg0, 1);
7793
7794 return fold_build_pointer_plus_loc
7795 (loc, fold_convert_loc (loc, type, arg00), arg01);
7796 }
7797
7798 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7799 of the same precision, and X is an integer type not narrower than
7800 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7801 if (INTEGRAL_TYPE_P (type)
7802 && TREE_CODE (op0) == BIT_NOT_EXPR
7803 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7804 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7805 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7806 {
7807 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7808 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7809 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7810 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7811 fold_convert_loc (loc, type, tem));
7812 }
7813
7814 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7815 type of X and Y (integer types only). */
7816 if (INTEGRAL_TYPE_P (type)
7817 && TREE_CODE (op0) == MULT_EXPR
7818 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7819 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7820 {
7821 /* Be careful not to introduce new overflows. */
7822 tree mult_type;
7823 if (TYPE_OVERFLOW_WRAPS (type))
7824 mult_type = type;
7825 else
7826 mult_type = unsigned_type_for (type);
7827
7828 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7829 {
7830 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7831 fold_convert_loc (loc, mult_type,
7832 TREE_OPERAND (op0, 0)),
7833 fold_convert_loc (loc, mult_type,
7834 TREE_OPERAND (op0, 1)));
7835 return fold_convert_loc (loc, type, tem);
7836 }
7837 }
7838
7839 return NULL_TREE;
7840
7841 case VIEW_CONVERT_EXPR:
7842 if (TREE_CODE (op0) == MEM_REF)
7843 {
7844 if (TYPE_ALIGN (TREE_TYPE (op0)) != TYPE_ALIGN (type))
7845 type = build_aligned_type (type, TYPE_ALIGN (TREE_TYPE (op0)));
7846 tem = fold_build2_loc (loc, MEM_REF, type,
7847 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
7848 REF_REVERSE_STORAGE_ORDER (tem) = REF_REVERSE_STORAGE_ORDER (op0);
7849 return tem;
7850 }
7851
7852 return NULL_TREE;
7853
7854 case NEGATE_EXPR:
7855 tem = fold_negate_expr (loc, arg0);
7856 if (tem)
7857 return fold_convert_loc (loc, type, tem);
7858 return NULL_TREE;
7859
7860 case ABS_EXPR:
7861 /* Convert fabs((double)float) into (double)fabsf(float). */
7862 if (TREE_CODE (arg0) == NOP_EXPR
7863 && TREE_CODE (type) == REAL_TYPE)
7864 {
7865 tree targ0 = strip_float_extensions (arg0);
7866 if (targ0 != arg0)
7867 return fold_convert_loc (loc, type,
7868 fold_build1_loc (loc, ABS_EXPR,
7869 TREE_TYPE (targ0),
7870 targ0));
7871 }
7872 return NULL_TREE;
7873
7874 case BIT_NOT_EXPR:
7875 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7876 if (TREE_CODE (arg0) == BIT_XOR_EXPR
7877 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7878 fold_convert_loc (loc, type,
7879 TREE_OPERAND (arg0, 0)))))
7880 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
7881 fold_convert_loc (loc, type,
7882 TREE_OPERAND (arg0, 1)));
7883 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7884 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7885 fold_convert_loc (loc, type,
7886 TREE_OPERAND (arg0, 1)))))
7887 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
7888 fold_convert_loc (loc, type,
7889 TREE_OPERAND (arg0, 0)), tem);
7890
7891 return NULL_TREE;
7892
7893 case TRUTH_NOT_EXPR:
7894 /* Note that the operand of this must be an int
7895 and its values must be 0 or 1.
7896 ("true" is a fixed value perhaps depending on the language,
7897 but we don't handle values other than 1 correctly yet.) */
7898 tem = fold_truth_not_expr (loc, arg0);
7899 if (!tem)
7900 return NULL_TREE;
7901 return fold_convert_loc (loc, type, tem);
7902
7903 case INDIRECT_REF:
7904 /* Fold *&X to X if X is an lvalue. */
7905 if (TREE_CODE (op0) == ADDR_EXPR)
7906 {
7907 tree op00 = TREE_OPERAND (op0, 0);
7908 if ((VAR_P (op00)
7909 || TREE_CODE (op00) == PARM_DECL
7910 || TREE_CODE (op00) == RESULT_DECL)
7911 && !TREE_READONLY (op00))
7912 return op00;
7913 }
7914 return NULL_TREE;
7915
7916 default:
7917 return NULL_TREE;
7918 } /* switch (code) */
7919 }
7920
7921
7922 /* If the operation was a conversion do _not_ mark a resulting constant
7923 with TREE_OVERFLOW if the original constant was not. These conversions
7924 have implementation defined behavior and retaining the TREE_OVERFLOW
7925 flag here would confuse later passes such as VRP. */
7926 tree
7927 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
7928 tree type, tree op0)
7929 {
7930 tree res = fold_unary_loc (loc, code, type, op0);
7931 if (res
7932 && TREE_CODE (res) == INTEGER_CST
7933 && TREE_CODE (op0) == INTEGER_CST
7934 && CONVERT_EXPR_CODE_P (code))
7935 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
7936
7937 return res;
7938 }
7939
7940 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
7941 operands OP0 and OP1. LOC is the location of the resulting expression.
7942 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
7943 Return the folded expression if folding is successful. Otherwise,
7944 return NULL_TREE. */
7945 static tree
7946 fold_truth_andor (location_t loc, enum tree_code code, tree type,
7947 tree arg0, tree arg1, tree op0, tree op1)
7948 {
7949 tree tem;
7950
7951 /* We only do these simplifications if we are optimizing. */
7952 if (!optimize)
7953 return NULL_TREE;
7954
7955 /* Check for things like (A || B) && (A || C). We can convert this
7956 to A || (B && C). Note that either operator can be any of the four
7957 truth and/or operations and the transformation will still be
7958 valid. Also note that we only care about order for the
7959 ANDIF and ORIF operators. If B contains side effects, this
7960 might change the truth-value of A. */
7961 if (TREE_CODE (arg0) == TREE_CODE (arg1)
7962 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
7963 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
7964 || TREE_CODE (arg0) == TRUTH_AND_EXPR
7965 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
7966 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
7967 {
7968 tree a00 = TREE_OPERAND (arg0, 0);
7969 tree a01 = TREE_OPERAND (arg0, 1);
7970 tree a10 = TREE_OPERAND (arg1, 0);
7971 tree a11 = TREE_OPERAND (arg1, 1);
7972 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
7973 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
7974 && (code == TRUTH_AND_EXPR
7975 || code == TRUTH_OR_EXPR));
7976
7977 if (operand_equal_p (a00, a10, 0))
7978 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
7979 fold_build2_loc (loc, code, type, a01, a11));
7980 else if (commutative && operand_equal_p (a00, a11, 0))
7981 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
7982 fold_build2_loc (loc, code, type, a01, a10));
7983 else if (commutative && operand_equal_p (a01, a10, 0))
7984 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
7985 fold_build2_loc (loc, code, type, a00, a11));
7986
7987 /* This case if tricky because we must either have commutative
7988 operators or else A10 must not have side-effects. */
7989
7990 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
7991 && operand_equal_p (a01, a11, 0))
7992 return fold_build2_loc (loc, TREE_CODE (arg0), type,
7993 fold_build2_loc (loc, code, type, a00, a10),
7994 a01);
7995 }
7996
7997 /* See if we can build a range comparison. */
7998 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
7999 return tem;
8000
8001 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8002 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8003 {
8004 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8005 if (tem)
8006 return fold_build2_loc (loc, code, type, tem, arg1);
8007 }
8008
8009 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8010 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8011 {
8012 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8013 if (tem)
8014 return fold_build2_loc (loc, code, type, arg0, tem);
8015 }
8016
8017 /* Check for the possibility of merging component references. If our
8018 lhs is another similar operation, try to merge its rhs with our
8019 rhs. Then try to merge our lhs and rhs. */
8020 if (TREE_CODE (arg0) == code
8021 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8022 TREE_OPERAND (arg0, 1), arg1)))
8023 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8024
8025 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8026 return tem;
8027
8028 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8029 && !flag_sanitize_coverage
8030 && (code == TRUTH_AND_EXPR
8031 || code == TRUTH_ANDIF_EXPR
8032 || code == TRUTH_OR_EXPR
8033 || code == TRUTH_ORIF_EXPR))
8034 {
8035 enum tree_code ncode, icode;
8036
8037 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8038 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8039 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8040
8041 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8042 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8043 We don't want to pack more than two leafs to a non-IF AND/OR
8044 expression.
8045 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8046 equal to IF-CODE, then we don't want to add right-hand operand.
8047 If the inner right-hand side of left-hand operand has
8048 side-effects, or isn't simple, then we can't add to it,
8049 as otherwise we might destroy if-sequence. */
8050 if (TREE_CODE (arg0) == icode
8051 && simple_operand_p_2 (arg1)
8052 /* Needed for sequence points to handle trappings, and
8053 side-effects. */
8054 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8055 {
8056 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8057 arg1);
8058 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8059 tem);
8060 }
8061 /* Same as above but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8062 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8063 else if (TREE_CODE (arg1) == icode
8064 && simple_operand_p_2 (arg0)
8065 /* Needed for sequence points to handle trappings, and
8066 side-effects. */
8067 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8068 {
8069 tem = fold_build2_loc (loc, ncode, type,
8070 arg0, TREE_OPERAND (arg1, 0));
8071 return fold_build2_loc (loc, icode, type, tem,
8072 TREE_OPERAND (arg1, 1));
8073 }
8074 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8075 into (A OR B).
8076 For sequence point consistancy, we need to check for trapping,
8077 and side-effects. */
8078 else if (code == icode && simple_operand_p_2 (arg0)
8079 && simple_operand_p_2 (arg1))
8080 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8081 }
8082
8083 return NULL_TREE;
8084 }
8085
8086 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8087 by changing CODE to reduce the magnitude of constants involved in
8088 ARG0 of the comparison.
8089 Returns a canonicalized comparison tree if a simplification was
8090 possible, otherwise returns NULL_TREE.
8091 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8092 valid if signed overflow is undefined. */
8093
8094 static tree
8095 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8096 tree arg0, tree arg1,
8097 bool *strict_overflow_p)
8098 {
8099 enum tree_code code0 = TREE_CODE (arg0);
8100 tree t, cst0 = NULL_TREE;
8101 int sgn0;
8102
8103 /* Match A +- CST code arg1. We can change this only if overflow
8104 is undefined. */
8105 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8106 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8107 /* In principle pointers also have undefined overflow behavior,
8108 but that causes problems elsewhere. */
8109 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8110 && (code0 == MINUS_EXPR
8111 || code0 == PLUS_EXPR)
8112 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
8113 return NULL_TREE;
8114
8115 /* Identify the constant in arg0 and its sign. */
8116 cst0 = TREE_OPERAND (arg0, 1);
8117 sgn0 = tree_int_cst_sgn (cst0);
8118
8119 /* Overflowed constants and zero will cause problems. */
8120 if (integer_zerop (cst0)
8121 || TREE_OVERFLOW (cst0))
8122 return NULL_TREE;
8123
8124 /* See if we can reduce the magnitude of the constant in
8125 arg0 by changing the comparison code. */
8126 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8127 if (code == LT_EXPR
8128 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8129 code = LE_EXPR;
8130 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8131 else if (code == GT_EXPR
8132 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8133 code = GE_EXPR;
8134 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8135 else if (code == LE_EXPR
8136 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8137 code = LT_EXPR;
8138 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8139 else if (code == GE_EXPR
8140 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8141 code = GT_EXPR;
8142 else
8143 return NULL_TREE;
8144 *strict_overflow_p = true;
8145
8146 /* Now build the constant reduced in magnitude. But not if that
8147 would produce one outside of its types range. */
8148 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8149 && ((sgn0 == 1
8150 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8151 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8152 || (sgn0 == -1
8153 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8154 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8155 return NULL_TREE;
8156
8157 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8158 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8159 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8160 t = fold_convert (TREE_TYPE (arg1), t);
8161
8162 return fold_build2_loc (loc, code, type, t, arg1);
8163 }
8164
8165 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8166 overflow further. Try to decrease the magnitude of constants involved
8167 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8168 and put sole constants at the second argument position.
8169 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8170
8171 static tree
8172 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8173 tree arg0, tree arg1)
8174 {
8175 tree t;
8176 bool strict_overflow_p;
8177 const char * const warnmsg = G_("assuming signed overflow does not occur "
8178 "when reducing constant in comparison");
8179
8180 /* Try canonicalization by simplifying arg0. */
8181 strict_overflow_p = false;
8182 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8183 &strict_overflow_p);
8184 if (t)
8185 {
8186 if (strict_overflow_p)
8187 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8188 return t;
8189 }
8190
8191 /* Try canonicalization by simplifying arg1 using the swapped
8192 comparison. */
8193 code = swap_tree_comparison (code);
8194 strict_overflow_p = false;
8195 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8196 &strict_overflow_p);
8197 if (t && strict_overflow_p)
8198 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8199 return t;
8200 }
8201
8202 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8203 space. This is used to avoid issuing overflow warnings for
8204 expressions like &p->x which can not wrap. */
8205
8206 static bool
8207 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8208 {
8209 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8210 return true;
8211
8212 if (bitpos < 0)
8213 return true;
8214
8215 wide_int wi_offset;
8216 int precision = TYPE_PRECISION (TREE_TYPE (base));
8217 if (offset == NULL_TREE)
8218 wi_offset = wi::zero (precision);
8219 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8220 return true;
8221 else
8222 wi_offset = wi::to_wide (offset);
8223
8224 bool overflow;
8225 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8226 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8227 if (overflow)
8228 return true;
8229
8230 if (!wi::fits_uhwi_p (total))
8231 return true;
8232
8233 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8234 if (size <= 0)
8235 return true;
8236
8237 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8238 array. */
8239 if (TREE_CODE (base) == ADDR_EXPR)
8240 {
8241 HOST_WIDE_INT base_size;
8242
8243 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8244 if (base_size > 0 && size < base_size)
8245 size = base_size;
8246 }
8247
8248 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8249 }
8250
8251 /* Return a positive integer when the symbol DECL is known to have
8252 a nonzero address, zero when it's known not to (e.g., it's a weak
8253 symbol), and a negative integer when the symbol is not yet in the
8254 symbol table and so whether or not its address is zero is unknown.
8255 For function local objects always return positive integer. */
8256 static int
8257 maybe_nonzero_address (tree decl)
8258 {
8259 if (DECL_P (decl) && decl_in_symtab_p (decl))
8260 if (struct symtab_node *symbol = symtab_node::get_create (decl))
8261 return symbol->nonzero_address ();
8262
8263 /* Function local objects are never NULL. */
8264 if (DECL_P (decl)
8265 && (DECL_CONTEXT (decl)
8266 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
8267 && auto_var_in_fn_p (decl, DECL_CONTEXT (decl))))
8268 return 1;
8269
8270 return -1;
8271 }
8272
8273 /* Subroutine of fold_binary. This routine performs all of the
8274 transformations that are common to the equality/inequality
8275 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8276 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8277 fold_binary should call fold_binary. Fold a comparison with
8278 tree code CODE and type TYPE with operands OP0 and OP1. Return
8279 the folded comparison or NULL_TREE. */
8280
8281 static tree
8282 fold_comparison (location_t loc, enum tree_code code, tree type,
8283 tree op0, tree op1)
8284 {
8285 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8286 tree arg0, arg1, tem;
8287
8288 arg0 = op0;
8289 arg1 = op1;
8290
8291 STRIP_SIGN_NOPS (arg0);
8292 STRIP_SIGN_NOPS (arg1);
8293
8294 /* For comparisons of pointers we can decompose it to a compile time
8295 comparison of the base objects and the offsets into the object.
8296 This requires at least one operand being an ADDR_EXPR or a
8297 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8298 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8299 && (TREE_CODE (arg0) == ADDR_EXPR
8300 || TREE_CODE (arg1) == ADDR_EXPR
8301 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8302 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8303 {
8304 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8305 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8306 machine_mode mode;
8307 int volatilep, reversep, unsignedp;
8308 bool indirect_base0 = false, indirect_base1 = false;
8309
8310 /* Get base and offset for the access. Strip ADDR_EXPR for
8311 get_inner_reference, but put it back by stripping INDIRECT_REF
8312 off the base object if possible. indirect_baseN will be true
8313 if baseN is not an address but refers to the object itself. */
8314 base0 = arg0;
8315 if (TREE_CODE (arg0) == ADDR_EXPR)
8316 {
8317 base0
8318 = get_inner_reference (TREE_OPERAND (arg0, 0),
8319 &bitsize, &bitpos0, &offset0, &mode,
8320 &unsignedp, &reversep, &volatilep);
8321 if (TREE_CODE (base0) == INDIRECT_REF)
8322 base0 = TREE_OPERAND (base0, 0);
8323 else
8324 indirect_base0 = true;
8325 }
8326 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8327 {
8328 base0 = TREE_OPERAND (arg0, 0);
8329 STRIP_SIGN_NOPS (base0);
8330 if (TREE_CODE (base0) == ADDR_EXPR)
8331 {
8332 base0
8333 = get_inner_reference (TREE_OPERAND (base0, 0),
8334 &bitsize, &bitpos0, &offset0, &mode,
8335 &unsignedp, &reversep, &volatilep);
8336 if (TREE_CODE (base0) == INDIRECT_REF)
8337 base0 = TREE_OPERAND (base0, 0);
8338 else
8339 indirect_base0 = true;
8340 }
8341 if (offset0 == NULL_TREE || integer_zerop (offset0))
8342 offset0 = TREE_OPERAND (arg0, 1);
8343 else
8344 offset0 = size_binop (PLUS_EXPR, offset0,
8345 TREE_OPERAND (arg0, 1));
8346 if (TREE_CODE (offset0) == INTEGER_CST)
8347 {
8348 offset_int tem = wi::sext (wi::to_offset (offset0),
8349 TYPE_PRECISION (sizetype));
8350 tem <<= LOG2_BITS_PER_UNIT;
8351 tem += bitpos0;
8352 if (wi::fits_shwi_p (tem))
8353 {
8354 bitpos0 = tem.to_shwi ();
8355 offset0 = NULL_TREE;
8356 }
8357 }
8358 }
8359
8360 base1 = arg1;
8361 if (TREE_CODE (arg1) == ADDR_EXPR)
8362 {
8363 base1
8364 = get_inner_reference (TREE_OPERAND (arg1, 0),
8365 &bitsize, &bitpos1, &offset1, &mode,
8366 &unsignedp, &reversep, &volatilep);
8367 if (TREE_CODE (base1) == INDIRECT_REF)
8368 base1 = TREE_OPERAND (base1, 0);
8369 else
8370 indirect_base1 = true;
8371 }
8372 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8373 {
8374 base1 = TREE_OPERAND (arg1, 0);
8375 STRIP_SIGN_NOPS (base1);
8376 if (TREE_CODE (base1) == ADDR_EXPR)
8377 {
8378 base1
8379 = get_inner_reference (TREE_OPERAND (base1, 0),
8380 &bitsize, &bitpos1, &offset1, &mode,
8381 &unsignedp, &reversep, &volatilep);
8382 if (TREE_CODE (base1) == INDIRECT_REF)
8383 base1 = TREE_OPERAND (base1, 0);
8384 else
8385 indirect_base1 = true;
8386 }
8387 if (offset1 == NULL_TREE || integer_zerop (offset1))
8388 offset1 = TREE_OPERAND (arg1, 1);
8389 else
8390 offset1 = size_binop (PLUS_EXPR, offset1,
8391 TREE_OPERAND (arg1, 1));
8392 if (TREE_CODE (offset1) == INTEGER_CST)
8393 {
8394 offset_int tem = wi::sext (wi::to_offset (offset1),
8395 TYPE_PRECISION (sizetype));
8396 tem <<= LOG2_BITS_PER_UNIT;
8397 tem += bitpos1;
8398 if (wi::fits_shwi_p (tem))
8399 {
8400 bitpos1 = tem.to_shwi ();
8401 offset1 = NULL_TREE;
8402 }
8403 }
8404 }
8405
8406 /* If we have equivalent bases we might be able to simplify. */
8407 if (indirect_base0 == indirect_base1
8408 && operand_equal_p (base0, base1,
8409 indirect_base0 ? OEP_ADDRESS_OF : 0))
8410 {
8411 /* We can fold this expression to a constant if the non-constant
8412 offset parts are equal. */
8413 if (offset0 == offset1
8414 || (offset0 && offset1
8415 && operand_equal_p (offset0, offset1, 0)))
8416 {
8417 if (!equality_code
8418 && bitpos0 != bitpos1
8419 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8420 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8421 fold_overflow_warning (("assuming pointer wraparound does not "
8422 "occur when comparing P +- C1 with "
8423 "P +- C2"),
8424 WARN_STRICT_OVERFLOW_CONDITIONAL);
8425
8426 switch (code)
8427 {
8428 case EQ_EXPR:
8429 return constant_boolean_node (bitpos0 == bitpos1, type);
8430 case NE_EXPR:
8431 return constant_boolean_node (bitpos0 != bitpos1, type);
8432 case LT_EXPR:
8433 return constant_boolean_node (bitpos0 < bitpos1, type);
8434 case LE_EXPR:
8435 return constant_boolean_node (bitpos0 <= bitpos1, type);
8436 case GE_EXPR:
8437 return constant_boolean_node (bitpos0 >= bitpos1, type);
8438 case GT_EXPR:
8439 return constant_boolean_node (bitpos0 > bitpos1, type);
8440 default:;
8441 }
8442 }
8443 /* We can simplify the comparison to a comparison of the variable
8444 offset parts if the constant offset parts are equal.
8445 Be careful to use signed sizetype here because otherwise we
8446 mess with array offsets in the wrong way. This is possible
8447 because pointer arithmetic is restricted to retain within an
8448 object and overflow on pointer differences is undefined as of
8449 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8450 else if (bitpos0 == bitpos1)
8451 {
8452 /* By converting to signed sizetype we cover middle-end pointer
8453 arithmetic which operates on unsigned pointer types of size
8454 type size and ARRAY_REF offsets which are properly sign or
8455 zero extended from their type in case it is narrower than
8456 sizetype. */
8457 if (offset0 == NULL_TREE)
8458 offset0 = build_int_cst (ssizetype, 0);
8459 else
8460 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8461 if (offset1 == NULL_TREE)
8462 offset1 = build_int_cst (ssizetype, 0);
8463 else
8464 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8465
8466 if (!equality_code
8467 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8468 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8469 fold_overflow_warning (("assuming pointer wraparound does not "
8470 "occur when comparing P +- C1 with "
8471 "P +- C2"),
8472 WARN_STRICT_OVERFLOW_COMPARISON);
8473
8474 return fold_build2_loc (loc, code, type, offset0, offset1);
8475 }
8476 }
8477 /* For equal offsets we can simplify to a comparison of the
8478 base addresses. */
8479 else if (bitpos0 == bitpos1
8480 && (indirect_base0
8481 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8482 && (indirect_base1
8483 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8484 && ((offset0 == offset1)
8485 || (offset0 && offset1
8486 && operand_equal_p (offset0, offset1, 0))))
8487 {
8488 if (indirect_base0)
8489 base0 = build_fold_addr_expr_loc (loc, base0);
8490 if (indirect_base1)
8491 base1 = build_fold_addr_expr_loc (loc, base1);
8492 return fold_build2_loc (loc, code, type, base0, base1);
8493 }
8494 /* Comparison between an ordinary (non-weak) symbol and a null
8495 pointer can be eliminated since such symbols must have a non
8496 null address. In C, relational expressions between pointers
8497 to objects and null pointers are undefined. The results
8498 below follow the C++ rules with the additional property that
8499 every object pointer compares greater than a null pointer.
8500 */
8501 else if (((DECL_P (base0)
8502 && maybe_nonzero_address (base0) > 0
8503 /* Avoid folding references to struct members at offset 0 to
8504 prevent tests like '&ptr->firstmember == 0' from getting
8505 eliminated. When ptr is null, although the -> expression
8506 is strictly speaking invalid, GCC retains it as a matter
8507 of QoI. See PR c/44555. */
8508 && (offset0 == NULL_TREE && bitpos0 != 0))
8509 || CONSTANT_CLASS_P (base0))
8510 && indirect_base0
8511 /* The caller guarantees that when one of the arguments is
8512 constant (i.e., null in this case) it is second. */
8513 && integer_zerop (arg1))
8514 {
8515 switch (code)
8516 {
8517 case EQ_EXPR:
8518 case LE_EXPR:
8519 case LT_EXPR:
8520 return constant_boolean_node (false, type);
8521 case GE_EXPR:
8522 case GT_EXPR:
8523 case NE_EXPR:
8524 return constant_boolean_node (true, type);
8525 default:
8526 gcc_unreachable ();
8527 }
8528 }
8529 }
8530
8531 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8532 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8533 the resulting offset is smaller in absolute value than the
8534 original one and has the same sign. */
8535 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8536 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8537 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8538 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8539 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8540 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8541 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8542 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8543 {
8544 tree const1 = TREE_OPERAND (arg0, 1);
8545 tree const2 = TREE_OPERAND (arg1, 1);
8546 tree variable1 = TREE_OPERAND (arg0, 0);
8547 tree variable2 = TREE_OPERAND (arg1, 0);
8548 tree cst;
8549 const char * const warnmsg = G_("assuming signed overflow does not "
8550 "occur when combining constants around "
8551 "a comparison");
8552
8553 /* Put the constant on the side where it doesn't overflow and is
8554 of lower absolute value and of same sign than before. */
8555 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8556 ? MINUS_EXPR : PLUS_EXPR,
8557 const2, const1);
8558 if (!TREE_OVERFLOW (cst)
8559 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
8560 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
8561 {
8562 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8563 return fold_build2_loc (loc, code, type,
8564 variable1,
8565 fold_build2_loc (loc, TREE_CODE (arg1),
8566 TREE_TYPE (arg1),
8567 variable2, cst));
8568 }
8569
8570 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8571 ? MINUS_EXPR : PLUS_EXPR,
8572 const1, const2);
8573 if (!TREE_OVERFLOW (cst)
8574 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
8575 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
8576 {
8577 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8578 return fold_build2_loc (loc, code, type,
8579 fold_build2_loc (loc, TREE_CODE (arg0),
8580 TREE_TYPE (arg0),
8581 variable1, cst),
8582 variable2);
8583 }
8584 }
8585
8586 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
8587 if (tem)
8588 return tem;
8589
8590 /* If we are comparing an expression that just has comparisons
8591 of two integer values, arithmetic expressions of those comparisons,
8592 and constants, we can simplify it. There are only three cases
8593 to check: the two values can either be equal, the first can be
8594 greater, or the second can be greater. Fold the expression for
8595 those three values. Since each value must be 0 or 1, we have
8596 eight possibilities, each of which corresponds to the constant 0
8597 or 1 or one of the six possible comparisons.
8598
8599 This handles common cases like (a > b) == 0 but also handles
8600 expressions like ((x > y) - (y > x)) > 0, which supposedly
8601 occur in macroized code. */
8602
8603 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8604 {
8605 tree cval1 = 0, cval2 = 0;
8606 int save_p = 0;
8607
8608 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8609 /* Don't handle degenerate cases here; they should already
8610 have been handled anyway. */
8611 && cval1 != 0 && cval2 != 0
8612 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8613 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8614 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8615 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8616 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8617 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8618 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8619 {
8620 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8621 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8622
8623 /* We can't just pass T to eval_subst in case cval1 or cval2
8624 was the same as ARG1. */
8625
8626 tree high_result
8627 = fold_build2_loc (loc, code, type,
8628 eval_subst (loc, arg0, cval1, maxval,
8629 cval2, minval),
8630 arg1);
8631 tree equal_result
8632 = fold_build2_loc (loc, code, type,
8633 eval_subst (loc, arg0, cval1, maxval,
8634 cval2, maxval),
8635 arg1);
8636 tree low_result
8637 = fold_build2_loc (loc, code, type,
8638 eval_subst (loc, arg0, cval1, minval,
8639 cval2, maxval),
8640 arg1);
8641
8642 /* All three of these results should be 0 or 1. Confirm they are.
8643 Then use those values to select the proper code to use. */
8644
8645 if (TREE_CODE (high_result) == INTEGER_CST
8646 && TREE_CODE (equal_result) == INTEGER_CST
8647 && TREE_CODE (low_result) == INTEGER_CST)
8648 {
8649 /* Make a 3-bit mask with the high-order bit being the
8650 value for `>', the next for '=', and the low for '<'. */
8651 switch ((integer_onep (high_result) * 4)
8652 + (integer_onep (equal_result) * 2)
8653 + integer_onep (low_result))
8654 {
8655 case 0:
8656 /* Always false. */
8657 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
8658 case 1:
8659 code = LT_EXPR;
8660 break;
8661 case 2:
8662 code = EQ_EXPR;
8663 break;
8664 case 3:
8665 code = LE_EXPR;
8666 break;
8667 case 4:
8668 code = GT_EXPR;
8669 break;
8670 case 5:
8671 code = NE_EXPR;
8672 break;
8673 case 6:
8674 code = GE_EXPR;
8675 break;
8676 case 7:
8677 /* Always true. */
8678 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
8679 }
8680
8681 if (save_p)
8682 {
8683 tem = save_expr (build2 (code, type, cval1, cval2));
8684 protected_set_expr_location (tem, loc);
8685 return tem;
8686 }
8687 return fold_build2_loc (loc, code, type, cval1, cval2);
8688 }
8689 }
8690 }
8691
8692 return NULL_TREE;
8693 }
8694
8695
8696 /* Subroutine of fold_binary. Optimize complex multiplications of the
8697 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8698 argument EXPR represents the expression "z" of type TYPE. */
8699
8700 static tree
8701 fold_mult_zconjz (location_t loc, tree type, tree expr)
8702 {
8703 tree itype = TREE_TYPE (type);
8704 tree rpart, ipart, tem;
8705
8706 if (TREE_CODE (expr) == COMPLEX_EXPR)
8707 {
8708 rpart = TREE_OPERAND (expr, 0);
8709 ipart = TREE_OPERAND (expr, 1);
8710 }
8711 else if (TREE_CODE (expr) == COMPLEX_CST)
8712 {
8713 rpart = TREE_REALPART (expr);
8714 ipart = TREE_IMAGPART (expr);
8715 }
8716 else
8717 {
8718 expr = save_expr (expr);
8719 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
8720 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
8721 }
8722
8723 rpart = save_expr (rpart);
8724 ipart = save_expr (ipart);
8725 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
8726 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
8727 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
8728 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
8729 build_zero_cst (itype));
8730 }
8731
8732
8733 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
8734 CONSTRUCTOR ARG into array ELTS, which has NELTS elements, and return
8735 true if successful. */
8736
8737 static bool
8738 vec_cst_ctor_to_array (tree arg, unsigned int nelts, tree *elts)
8739 {
8740 unsigned int i;
8741
8742 if (TREE_CODE (arg) == VECTOR_CST)
8743 {
8744 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
8745 elts[i] = VECTOR_CST_ELT (arg, i);
8746 }
8747 else if (TREE_CODE (arg) == CONSTRUCTOR)
8748 {
8749 constructor_elt *elt;
8750
8751 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
8752 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
8753 return false;
8754 else
8755 elts[i] = elt->value;
8756 }
8757 else
8758 return false;
8759 for (; i < nelts; i++)
8760 elts[i]
8761 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
8762 return true;
8763 }
8764
8765 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
8766 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
8767 NULL_TREE otherwise. */
8768
8769 static tree
8770 fold_vec_perm (tree type, tree arg0, tree arg1, vec_perm_indices sel)
8771 {
8772 unsigned int i;
8773 bool need_ctor = false;
8774
8775 unsigned int nelts = sel.length ();
8776 gcc_assert (TYPE_VECTOR_SUBPARTS (type) == nelts
8777 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
8778 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
8779 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
8780 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
8781 return NULL_TREE;
8782
8783 tree *in_elts = XALLOCAVEC (tree, nelts * 2);
8784 if (!vec_cst_ctor_to_array (arg0, nelts, in_elts)
8785 || !vec_cst_ctor_to_array (arg1, nelts, in_elts + nelts))
8786 return NULL_TREE;
8787
8788 auto_vec<tree, 32> out_elts (nelts);
8789 for (i = 0; i < nelts; i++)
8790 {
8791 if (!CONSTANT_CLASS_P (in_elts[sel[i]]))
8792 need_ctor = true;
8793 out_elts.quick_push (unshare_expr (in_elts[sel[i]]));
8794 }
8795
8796 if (need_ctor)
8797 {
8798 vec<constructor_elt, va_gc> *v;
8799 vec_alloc (v, nelts);
8800 for (i = 0; i < nelts; i++)
8801 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, out_elts[i]);
8802 return build_constructor (type, v);
8803 }
8804 else
8805 return build_vector (type, out_elts);
8806 }
8807
8808 /* Try to fold a pointer difference of type TYPE two address expressions of
8809 array references AREF0 and AREF1 using location LOC. Return a
8810 simplified expression for the difference or NULL_TREE. */
8811
8812 static tree
8813 fold_addr_of_array_ref_difference (location_t loc, tree type,
8814 tree aref0, tree aref1,
8815 bool use_pointer_diff)
8816 {
8817 tree base0 = TREE_OPERAND (aref0, 0);
8818 tree base1 = TREE_OPERAND (aref1, 0);
8819 tree base_offset = build_int_cst (type, 0);
8820
8821 /* If the bases are array references as well, recurse. If the bases
8822 are pointer indirections compute the difference of the pointers.
8823 If the bases are equal, we are set. */
8824 if ((TREE_CODE (base0) == ARRAY_REF
8825 && TREE_CODE (base1) == ARRAY_REF
8826 && (base_offset
8827 = fold_addr_of_array_ref_difference (loc, type, base0, base1,
8828 use_pointer_diff)))
8829 || (INDIRECT_REF_P (base0)
8830 && INDIRECT_REF_P (base1)
8831 && (base_offset
8832 = use_pointer_diff
8833 ? fold_binary_loc (loc, POINTER_DIFF_EXPR, type,
8834 TREE_OPERAND (base0, 0),
8835 TREE_OPERAND (base1, 0))
8836 : fold_binary_loc (loc, MINUS_EXPR, type,
8837 fold_convert (type,
8838 TREE_OPERAND (base0, 0)),
8839 fold_convert (type,
8840 TREE_OPERAND (base1, 0)))))
8841 || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
8842 {
8843 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
8844 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
8845 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
8846 tree diff = fold_build2_loc (loc, MINUS_EXPR, type, op0, op1);
8847 return fold_build2_loc (loc, PLUS_EXPR, type,
8848 base_offset,
8849 fold_build2_loc (loc, MULT_EXPR, type,
8850 diff, esz));
8851 }
8852 return NULL_TREE;
8853 }
8854
8855 /* If the real or vector real constant CST of type TYPE has an exact
8856 inverse, return it, else return NULL. */
8857
8858 tree
8859 exact_inverse (tree type, tree cst)
8860 {
8861 REAL_VALUE_TYPE r;
8862 tree unit_type;
8863 machine_mode mode;
8864 unsigned vec_nelts, i;
8865
8866 switch (TREE_CODE (cst))
8867 {
8868 case REAL_CST:
8869 r = TREE_REAL_CST (cst);
8870
8871 if (exact_real_inverse (TYPE_MODE (type), &r))
8872 return build_real (type, r);
8873
8874 return NULL_TREE;
8875
8876 case VECTOR_CST:
8877 {
8878 vec_nelts = VECTOR_CST_NELTS (cst);
8879 unit_type = TREE_TYPE (type);
8880 mode = TYPE_MODE (unit_type);
8881
8882 auto_vec<tree, 32> elts (vec_nelts);
8883 for (i = 0; i < vec_nelts; i++)
8884 {
8885 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
8886 if (!exact_real_inverse (mode, &r))
8887 return NULL_TREE;
8888 elts.quick_push (build_real (unit_type, r));
8889 }
8890
8891 return build_vector (type, elts);
8892 }
8893
8894 default:
8895 return NULL_TREE;
8896 }
8897 }
8898
8899 /* Mask out the tz least significant bits of X of type TYPE where
8900 tz is the number of trailing zeroes in Y. */
8901 static wide_int
8902 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
8903 {
8904 int tz = wi::ctz (y);
8905 if (tz > 0)
8906 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
8907 return x;
8908 }
8909
8910 /* Return true when T is an address and is known to be nonzero.
8911 For floating point we further ensure that T is not denormal.
8912 Similar logic is present in nonzero_address in rtlanal.h.
8913
8914 If the return value is based on the assumption that signed overflow
8915 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
8916 change *STRICT_OVERFLOW_P. */
8917
8918 static bool
8919 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
8920 {
8921 tree type = TREE_TYPE (t);
8922 enum tree_code code;
8923
8924 /* Doing something useful for floating point would need more work. */
8925 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
8926 return false;
8927
8928 code = TREE_CODE (t);
8929 switch (TREE_CODE_CLASS (code))
8930 {
8931 case tcc_unary:
8932 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
8933 strict_overflow_p);
8934 case tcc_binary:
8935 case tcc_comparison:
8936 return tree_binary_nonzero_warnv_p (code, type,
8937 TREE_OPERAND (t, 0),
8938 TREE_OPERAND (t, 1),
8939 strict_overflow_p);
8940 case tcc_constant:
8941 case tcc_declaration:
8942 case tcc_reference:
8943 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
8944
8945 default:
8946 break;
8947 }
8948
8949 switch (code)
8950 {
8951 case TRUTH_NOT_EXPR:
8952 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
8953 strict_overflow_p);
8954
8955 case TRUTH_AND_EXPR:
8956 case TRUTH_OR_EXPR:
8957 case TRUTH_XOR_EXPR:
8958 return tree_binary_nonzero_warnv_p (code, type,
8959 TREE_OPERAND (t, 0),
8960 TREE_OPERAND (t, 1),
8961 strict_overflow_p);
8962
8963 case COND_EXPR:
8964 case CONSTRUCTOR:
8965 case OBJ_TYPE_REF:
8966 case ASSERT_EXPR:
8967 case ADDR_EXPR:
8968 case WITH_SIZE_EXPR:
8969 case SSA_NAME:
8970 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
8971
8972 case COMPOUND_EXPR:
8973 case MODIFY_EXPR:
8974 case BIND_EXPR:
8975 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
8976 strict_overflow_p);
8977
8978 case SAVE_EXPR:
8979 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
8980 strict_overflow_p);
8981
8982 case CALL_EXPR:
8983 {
8984 tree fndecl = get_callee_fndecl (t);
8985 if (!fndecl) return false;
8986 if (flag_delete_null_pointer_checks && !flag_check_new
8987 && DECL_IS_OPERATOR_NEW (fndecl)
8988 && !TREE_NOTHROW (fndecl))
8989 return true;
8990 if (flag_delete_null_pointer_checks
8991 && lookup_attribute ("returns_nonnull",
8992 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
8993 return true;
8994 return alloca_call_p (t);
8995 }
8996
8997 default:
8998 break;
8999 }
9000 return false;
9001 }
9002
9003 /* Return true when T is an address and is known to be nonzero.
9004 Handle warnings about undefined signed overflow. */
9005
9006 bool
9007 tree_expr_nonzero_p (tree t)
9008 {
9009 bool ret, strict_overflow_p;
9010
9011 strict_overflow_p = false;
9012 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9013 if (strict_overflow_p)
9014 fold_overflow_warning (("assuming signed overflow does not occur when "
9015 "determining that expression is always "
9016 "non-zero"),
9017 WARN_STRICT_OVERFLOW_MISC);
9018 return ret;
9019 }
9020
9021 /* Return true if T is known not to be equal to an integer W. */
9022
9023 bool
9024 expr_not_equal_to (tree t, const wide_int &w)
9025 {
9026 wide_int min, max, nz;
9027 value_range_type rtype;
9028 switch (TREE_CODE (t))
9029 {
9030 case INTEGER_CST:
9031 return wi::to_wide (t) != w;
9032
9033 case SSA_NAME:
9034 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
9035 return false;
9036 rtype = get_range_info (t, &min, &max);
9037 if (rtype == VR_RANGE)
9038 {
9039 if (wi::lt_p (max, w, TYPE_SIGN (TREE_TYPE (t))))
9040 return true;
9041 if (wi::lt_p (w, min, TYPE_SIGN (TREE_TYPE (t))))
9042 return true;
9043 }
9044 else if (rtype == VR_ANTI_RANGE
9045 && wi::le_p (min, w, TYPE_SIGN (TREE_TYPE (t)))
9046 && wi::le_p (w, max, TYPE_SIGN (TREE_TYPE (t))))
9047 return true;
9048 /* If T has some known zero bits and W has any of those bits set,
9049 then T is known not to be equal to W. */
9050 if (wi::ne_p (wi::zext (wi::bit_and_not (w, get_nonzero_bits (t)),
9051 TYPE_PRECISION (TREE_TYPE (t))), 0))
9052 return true;
9053 return false;
9054
9055 default:
9056 return false;
9057 }
9058 }
9059
9060 /* Fold a binary expression of code CODE and type TYPE with operands
9061 OP0 and OP1. LOC is the location of the resulting expression.
9062 Return the folded expression if folding is successful. Otherwise,
9063 return NULL_TREE. */
9064
9065 tree
9066 fold_binary_loc (location_t loc,
9067 enum tree_code code, tree type, tree op0, tree op1)
9068 {
9069 enum tree_code_class kind = TREE_CODE_CLASS (code);
9070 tree arg0, arg1, tem;
9071 tree t1 = NULL_TREE;
9072 bool strict_overflow_p;
9073 unsigned int prec;
9074
9075 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9076 && TREE_CODE_LENGTH (code) == 2
9077 && op0 != NULL_TREE
9078 && op1 != NULL_TREE);
9079
9080 arg0 = op0;
9081 arg1 = op1;
9082
9083 /* Strip any conversions that don't change the mode. This is
9084 safe for every expression, except for a comparison expression
9085 because its signedness is derived from its operands. So, in
9086 the latter case, only strip conversions that don't change the
9087 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9088 preserved.
9089
9090 Note that this is done as an internal manipulation within the
9091 constant folder, in order to find the simplest representation
9092 of the arguments so that their form can be studied. In any
9093 cases, the appropriate type conversions should be put back in
9094 the tree that will get out of the constant folder. */
9095
9096 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9097 {
9098 STRIP_SIGN_NOPS (arg0);
9099 STRIP_SIGN_NOPS (arg1);
9100 }
9101 else
9102 {
9103 STRIP_NOPS (arg0);
9104 STRIP_NOPS (arg1);
9105 }
9106
9107 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9108 constant but we can't do arithmetic on them. */
9109 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
9110 {
9111 tem = const_binop (code, type, arg0, arg1);
9112 if (tem != NULL_TREE)
9113 {
9114 if (TREE_TYPE (tem) != type)
9115 tem = fold_convert_loc (loc, type, tem);
9116 return tem;
9117 }
9118 }
9119
9120 /* If this is a commutative operation, and ARG0 is a constant, move it
9121 to ARG1 to reduce the number of tests below. */
9122 if (commutative_tree_code (code)
9123 && tree_swap_operands_p (arg0, arg1))
9124 return fold_build2_loc (loc, code, type, op1, op0);
9125
9126 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9127 to ARG1 to reduce the number of tests below. */
9128 if (kind == tcc_comparison
9129 && tree_swap_operands_p (arg0, arg1))
9130 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9131
9132 tem = generic_simplify (loc, code, type, op0, op1);
9133 if (tem)
9134 return tem;
9135
9136 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9137
9138 First check for cases where an arithmetic operation is applied to a
9139 compound, conditional, or comparison operation. Push the arithmetic
9140 operation inside the compound or conditional to see if any folding
9141 can then be done. Convert comparison to conditional for this purpose.
9142 The also optimizes non-constant cases that used to be done in
9143 expand_expr.
9144
9145 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9146 one of the operands is a comparison and the other is a comparison, a
9147 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9148 code below would make the expression more complex. Change it to a
9149 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9150 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9151
9152 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9153 || code == EQ_EXPR || code == NE_EXPR)
9154 && TREE_CODE (type) != VECTOR_TYPE
9155 && ((truth_value_p (TREE_CODE (arg0))
9156 && (truth_value_p (TREE_CODE (arg1))
9157 || (TREE_CODE (arg1) == BIT_AND_EXPR
9158 && integer_onep (TREE_OPERAND (arg1, 1)))))
9159 || (truth_value_p (TREE_CODE (arg1))
9160 && (truth_value_p (TREE_CODE (arg0))
9161 || (TREE_CODE (arg0) == BIT_AND_EXPR
9162 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9163 {
9164 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9165 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9166 : TRUTH_XOR_EXPR,
9167 boolean_type_node,
9168 fold_convert_loc (loc, boolean_type_node, arg0),
9169 fold_convert_loc (loc, boolean_type_node, arg1));
9170
9171 if (code == EQ_EXPR)
9172 tem = invert_truthvalue_loc (loc, tem);
9173
9174 return fold_convert_loc (loc, type, tem);
9175 }
9176
9177 if (TREE_CODE_CLASS (code) == tcc_binary
9178 || TREE_CODE_CLASS (code) == tcc_comparison)
9179 {
9180 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9181 {
9182 tem = fold_build2_loc (loc, code, type,
9183 fold_convert_loc (loc, TREE_TYPE (op0),
9184 TREE_OPERAND (arg0, 1)), op1);
9185 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9186 tem);
9187 }
9188 if (TREE_CODE (arg1) == COMPOUND_EXPR)
9189 {
9190 tem = fold_build2_loc (loc, code, type, op0,
9191 fold_convert_loc (loc, TREE_TYPE (op1),
9192 TREE_OPERAND (arg1, 1)));
9193 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9194 tem);
9195 }
9196
9197 if (TREE_CODE (arg0) == COND_EXPR
9198 || TREE_CODE (arg0) == VEC_COND_EXPR
9199 || COMPARISON_CLASS_P (arg0))
9200 {
9201 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9202 arg0, arg1,
9203 /*cond_first_p=*/1);
9204 if (tem != NULL_TREE)
9205 return tem;
9206 }
9207
9208 if (TREE_CODE (arg1) == COND_EXPR
9209 || TREE_CODE (arg1) == VEC_COND_EXPR
9210 || COMPARISON_CLASS_P (arg1))
9211 {
9212 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9213 arg1, arg0,
9214 /*cond_first_p=*/0);
9215 if (tem != NULL_TREE)
9216 return tem;
9217 }
9218 }
9219
9220 switch (code)
9221 {
9222 case MEM_REF:
9223 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9224 if (TREE_CODE (arg0) == ADDR_EXPR
9225 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9226 {
9227 tree iref = TREE_OPERAND (arg0, 0);
9228 return fold_build2 (MEM_REF, type,
9229 TREE_OPERAND (iref, 0),
9230 int_const_binop (PLUS_EXPR, arg1,
9231 TREE_OPERAND (iref, 1)));
9232 }
9233
9234 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9235 if (TREE_CODE (arg0) == ADDR_EXPR
9236 && handled_component_p (TREE_OPERAND (arg0, 0)))
9237 {
9238 tree base;
9239 HOST_WIDE_INT coffset;
9240 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9241 &coffset);
9242 if (!base)
9243 return NULL_TREE;
9244 return fold_build2 (MEM_REF, type,
9245 build_fold_addr_expr (base),
9246 int_const_binop (PLUS_EXPR, arg1,
9247 size_int (coffset)));
9248 }
9249
9250 return NULL_TREE;
9251
9252 case POINTER_PLUS_EXPR:
9253 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9254 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9255 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9256 return fold_convert_loc (loc, type,
9257 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9258 fold_convert_loc (loc, sizetype,
9259 arg1),
9260 fold_convert_loc (loc, sizetype,
9261 arg0)));
9262
9263 return NULL_TREE;
9264
9265 case PLUS_EXPR:
9266 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
9267 {
9268 /* X + (X / CST) * -CST is X % CST. */
9269 if (TREE_CODE (arg1) == MULT_EXPR
9270 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9271 && operand_equal_p (arg0,
9272 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9273 {
9274 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9275 tree cst1 = TREE_OPERAND (arg1, 1);
9276 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9277 cst1, cst0);
9278 if (sum && integer_zerop (sum))
9279 return fold_convert_loc (loc, type,
9280 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9281 TREE_TYPE (arg0), arg0,
9282 cst0));
9283 }
9284 }
9285
9286 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
9287 one. Make sure the type is not saturating and has the signedness of
9288 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9289 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9290 if ((TREE_CODE (arg0) == MULT_EXPR
9291 || TREE_CODE (arg1) == MULT_EXPR)
9292 && !TYPE_SATURATING (type)
9293 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9294 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9295 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9296 {
9297 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9298 if (tem)
9299 return tem;
9300 }
9301
9302 if (! FLOAT_TYPE_P (type))
9303 {
9304 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9305 (plus (plus (mult) (mult)) (foo)) so that we can
9306 take advantage of the factoring cases below. */
9307 if (ANY_INTEGRAL_TYPE_P (type)
9308 && TYPE_OVERFLOW_WRAPS (type)
9309 && (((TREE_CODE (arg0) == PLUS_EXPR
9310 || TREE_CODE (arg0) == MINUS_EXPR)
9311 && TREE_CODE (arg1) == MULT_EXPR)
9312 || ((TREE_CODE (arg1) == PLUS_EXPR
9313 || TREE_CODE (arg1) == MINUS_EXPR)
9314 && TREE_CODE (arg0) == MULT_EXPR)))
9315 {
9316 tree parg0, parg1, parg, marg;
9317 enum tree_code pcode;
9318
9319 if (TREE_CODE (arg1) == MULT_EXPR)
9320 parg = arg0, marg = arg1;
9321 else
9322 parg = arg1, marg = arg0;
9323 pcode = TREE_CODE (parg);
9324 parg0 = TREE_OPERAND (parg, 0);
9325 parg1 = TREE_OPERAND (parg, 1);
9326 STRIP_NOPS (parg0);
9327 STRIP_NOPS (parg1);
9328
9329 if (TREE_CODE (parg0) == MULT_EXPR
9330 && TREE_CODE (parg1) != MULT_EXPR)
9331 return fold_build2_loc (loc, pcode, type,
9332 fold_build2_loc (loc, PLUS_EXPR, type,
9333 fold_convert_loc (loc, type,
9334 parg0),
9335 fold_convert_loc (loc, type,
9336 marg)),
9337 fold_convert_loc (loc, type, parg1));
9338 if (TREE_CODE (parg0) != MULT_EXPR
9339 && TREE_CODE (parg1) == MULT_EXPR)
9340 return
9341 fold_build2_loc (loc, PLUS_EXPR, type,
9342 fold_convert_loc (loc, type, parg0),
9343 fold_build2_loc (loc, pcode, type,
9344 fold_convert_loc (loc, type, marg),
9345 fold_convert_loc (loc, type,
9346 parg1)));
9347 }
9348 }
9349 else
9350 {
9351 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9352 to __complex__ ( x, y ). This is not the same for SNaNs or
9353 if signed zeros are involved. */
9354 if (!HONOR_SNANS (element_mode (arg0))
9355 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9356 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9357 {
9358 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9359 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9360 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9361 bool arg0rz = false, arg0iz = false;
9362 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9363 || (arg0i && (arg0iz = real_zerop (arg0i))))
9364 {
9365 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9366 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9367 if (arg0rz && arg1i && real_zerop (arg1i))
9368 {
9369 tree rp = arg1r ? arg1r
9370 : build1 (REALPART_EXPR, rtype, arg1);
9371 tree ip = arg0i ? arg0i
9372 : build1 (IMAGPART_EXPR, rtype, arg0);
9373 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9374 }
9375 else if (arg0iz && arg1r && real_zerop (arg1r))
9376 {
9377 tree rp = arg0r ? arg0r
9378 : build1 (REALPART_EXPR, rtype, arg0);
9379 tree ip = arg1i ? arg1i
9380 : build1 (IMAGPART_EXPR, rtype, arg1);
9381 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9382 }
9383 }
9384 }
9385
9386 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9387 We associate floats only if the user has specified
9388 -fassociative-math. */
9389 if (flag_associative_math
9390 && TREE_CODE (arg1) == PLUS_EXPR
9391 && TREE_CODE (arg0) != MULT_EXPR)
9392 {
9393 tree tree10 = TREE_OPERAND (arg1, 0);
9394 tree tree11 = TREE_OPERAND (arg1, 1);
9395 if (TREE_CODE (tree11) == MULT_EXPR
9396 && TREE_CODE (tree10) == MULT_EXPR)
9397 {
9398 tree tree0;
9399 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
9400 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
9401 }
9402 }
9403 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9404 We associate floats only if the user has specified
9405 -fassociative-math. */
9406 if (flag_associative_math
9407 && TREE_CODE (arg0) == PLUS_EXPR
9408 && TREE_CODE (arg1) != MULT_EXPR)
9409 {
9410 tree tree00 = TREE_OPERAND (arg0, 0);
9411 tree tree01 = TREE_OPERAND (arg0, 1);
9412 if (TREE_CODE (tree01) == MULT_EXPR
9413 && TREE_CODE (tree00) == MULT_EXPR)
9414 {
9415 tree tree0;
9416 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
9417 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
9418 }
9419 }
9420 }
9421
9422 bit_rotate:
9423 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9424 is a rotate of A by C1 bits. */
9425 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9426 is a rotate of A by B bits.
9427 Similarly for (A << B) | (A >> (-B & C3)) where C3 is Z-1,
9428 though in this case CODE must be | and not + or ^, otherwise
9429 it doesn't return A when B is 0. */
9430 {
9431 enum tree_code code0, code1;
9432 tree rtype;
9433 code0 = TREE_CODE (arg0);
9434 code1 = TREE_CODE (arg1);
9435 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9436 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9437 && operand_equal_p (TREE_OPERAND (arg0, 0),
9438 TREE_OPERAND (arg1, 0), 0)
9439 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9440 TYPE_UNSIGNED (rtype))
9441 /* Only create rotates in complete modes. Other cases are not
9442 expanded properly. */
9443 && (element_precision (rtype)
9444 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
9445 {
9446 tree tree01, tree11;
9447 tree orig_tree01, orig_tree11;
9448 enum tree_code code01, code11;
9449
9450 tree01 = orig_tree01 = TREE_OPERAND (arg0, 1);
9451 tree11 = orig_tree11 = TREE_OPERAND (arg1, 1);
9452 STRIP_NOPS (tree01);
9453 STRIP_NOPS (tree11);
9454 code01 = TREE_CODE (tree01);
9455 code11 = TREE_CODE (tree11);
9456 if (code11 != MINUS_EXPR
9457 && (code01 == MINUS_EXPR || code01 == BIT_AND_EXPR))
9458 {
9459 std::swap (code0, code1);
9460 std::swap (code01, code11);
9461 std::swap (tree01, tree11);
9462 std::swap (orig_tree01, orig_tree11);
9463 }
9464 if (code01 == INTEGER_CST
9465 && code11 == INTEGER_CST
9466 && (wi::to_widest (tree01) + wi::to_widest (tree11)
9467 == element_precision (rtype)))
9468 {
9469 tem = build2_loc (loc, LROTATE_EXPR,
9470 rtype, TREE_OPERAND (arg0, 0),
9471 code0 == LSHIFT_EXPR
9472 ? orig_tree01 : orig_tree11);
9473 return fold_convert_loc (loc, type, tem);
9474 }
9475 else if (code11 == MINUS_EXPR)
9476 {
9477 tree tree110, tree111;
9478 tree110 = TREE_OPERAND (tree11, 0);
9479 tree111 = TREE_OPERAND (tree11, 1);
9480 STRIP_NOPS (tree110);
9481 STRIP_NOPS (tree111);
9482 if (TREE_CODE (tree110) == INTEGER_CST
9483 && 0 == compare_tree_int (tree110,
9484 element_precision (rtype))
9485 && operand_equal_p (tree01, tree111, 0))
9486 {
9487 tem = build2_loc (loc, (code0 == LSHIFT_EXPR
9488 ? LROTATE_EXPR : RROTATE_EXPR),
9489 rtype, TREE_OPERAND (arg0, 0),
9490 orig_tree01);
9491 return fold_convert_loc (loc, type, tem);
9492 }
9493 }
9494 else if (code == BIT_IOR_EXPR
9495 && code11 == BIT_AND_EXPR
9496 && pow2p_hwi (element_precision (rtype)))
9497 {
9498 tree tree110, tree111;
9499 tree110 = TREE_OPERAND (tree11, 0);
9500 tree111 = TREE_OPERAND (tree11, 1);
9501 STRIP_NOPS (tree110);
9502 STRIP_NOPS (tree111);
9503 if (TREE_CODE (tree110) == NEGATE_EXPR
9504 && TREE_CODE (tree111) == INTEGER_CST
9505 && 0 == compare_tree_int (tree111,
9506 element_precision (rtype) - 1)
9507 && operand_equal_p (tree01, TREE_OPERAND (tree110, 0), 0))
9508 {
9509 tem = build2_loc (loc, (code0 == LSHIFT_EXPR
9510 ? LROTATE_EXPR : RROTATE_EXPR),
9511 rtype, TREE_OPERAND (arg0, 0),
9512 orig_tree01);
9513 return fold_convert_loc (loc, type, tem);
9514 }
9515 }
9516 }
9517 }
9518
9519 associate:
9520 /* In most languages, can't associate operations on floats through
9521 parentheses. Rather than remember where the parentheses were, we
9522 don't associate floats at all, unless the user has specified
9523 -fassociative-math.
9524 And, we need to make sure type is not saturating. */
9525
9526 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9527 && !TYPE_SATURATING (type))
9528 {
9529 tree var0, minus_var0, con0, minus_con0, lit0, minus_lit0;
9530 tree var1, minus_var1, con1, minus_con1, lit1, minus_lit1;
9531 tree atype = type;
9532 bool ok = true;
9533
9534 /* Split both trees into variables, constants, and literals. Then
9535 associate each group together, the constants with literals,
9536 then the result with variables. This increases the chances of
9537 literals being recombined later and of generating relocatable
9538 expressions for the sum of a constant and literal. */
9539 var0 = split_tree (arg0, type, code,
9540 &minus_var0, &con0, &minus_con0,
9541 &lit0, &minus_lit0, 0);
9542 var1 = split_tree (arg1, type, code,
9543 &minus_var1, &con1, &minus_con1,
9544 &lit1, &minus_lit1, code == MINUS_EXPR);
9545
9546 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9547 if (code == MINUS_EXPR)
9548 code = PLUS_EXPR;
9549
9550 /* With undefined overflow prefer doing association in a type
9551 which wraps on overflow, if that is one of the operand types. */
9552 if (POINTER_TYPE_P (type)
9553 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9554 {
9555 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9556 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
9557 atype = TREE_TYPE (arg0);
9558 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9559 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
9560 atype = TREE_TYPE (arg1);
9561 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
9562 }
9563
9564 /* With undefined overflow we can only associate constants with one
9565 variable, and constants whose association doesn't overflow. */
9566 if (POINTER_TYPE_P (atype)
9567 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
9568 {
9569 if ((var0 && var1) || (minus_var0 && minus_var1))
9570 {
9571 /* ??? If split_tree would handle NEGATE_EXPR we could
9572 simply reject these cases and the allowed cases would
9573 be the var0/minus_var1 ones. */
9574 tree tmp0 = var0 ? var0 : minus_var0;
9575 tree tmp1 = var1 ? var1 : minus_var1;
9576 bool one_neg = false;
9577
9578 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9579 {
9580 tmp0 = TREE_OPERAND (tmp0, 0);
9581 one_neg = !one_neg;
9582 }
9583 if (CONVERT_EXPR_P (tmp0)
9584 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9585 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9586 <= TYPE_PRECISION (atype)))
9587 tmp0 = TREE_OPERAND (tmp0, 0);
9588 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9589 {
9590 tmp1 = TREE_OPERAND (tmp1, 0);
9591 one_neg = !one_neg;
9592 }
9593 if (CONVERT_EXPR_P (tmp1)
9594 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9595 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9596 <= TYPE_PRECISION (atype)))
9597 tmp1 = TREE_OPERAND (tmp1, 0);
9598 /* The only case we can still associate with two variables
9599 is if they cancel out. */
9600 if (!one_neg
9601 || !operand_equal_p (tmp0, tmp1, 0))
9602 ok = false;
9603 }
9604 else if ((var0 && minus_var1
9605 && ! operand_equal_p (var0, minus_var1, 0))
9606 || (minus_var0 && var1
9607 && ! operand_equal_p (minus_var0, var1, 0)))
9608 ok = false;
9609 }
9610
9611 /* Only do something if we found more than two objects. Otherwise,
9612 nothing has changed and we risk infinite recursion. */
9613 if (ok
9614 && (2 < ((var0 != 0) + (var1 != 0)
9615 + (minus_var0 != 0) + (minus_var1 != 0)
9616 + (con0 != 0) + (con1 != 0)
9617 + (minus_con0 != 0) + (minus_con1 != 0)
9618 + (lit0 != 0) + (lit1 != 0)
9619 + (minus_lit0 != 0) + (minus_lit1 != 0))))
9620 {
9621 var0 = associate_trees (loc, var0, var1, code, atype);
9622 minus_var0 = associate_trees (loc, minus_var0, minus_var1,
9623 code, atype);
9624 con0 = associate_trees (loc, con0, con1, code, atype);
9625 minus_con0 = associate_trees (loc, minus_con0, minus_con1,
9626 code, atype);
9627 lit0 = associate_trees (loc, lit0, lit1, code, atype);
9628 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
9629 code, atype);
9630
9631 if (minus_var0 && var0)
9632 {
9633 var0 = associate_trees (loc, var0, minus_var0,
9634 MINUS_EXPR, atype);
9635 minus_var0 = 0;
9636 }
9637 if (minus_con0 && con0)
9638 {
9639 con0 = associate_trees (loc, con0, minus_con0,
9640 MINUS_EXPR, atype);
9641 minus_con0 = 0;
9642 }
9643
9644 /* Preserve the MINUS_EXPR if the negative part of the literal is
9645 greater than the positive part. Otherwise, the multiplicative
9646 folding code (i.e extract_muldiv) may be fooled in case
9647 unsigned constants are subtracted, like in the following
9648 example: ((X*2 + 4) - 8U)/2. */
9649 if (minus_lit0 && lit0)
9650 {
9651 if (TREE_CODE (lit0) == INTEGER_CST
9652 && TREE_CODE (minus_lit0) == INTEGER_CST
9653 && tree_int_cst_lt (lit0, minus_lit0)
9654 /* But avoid ending up with only negated parts. */
9655 && (var0 || con0))
9656 {
9657 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
9658 MINUS_EXPR, atype);
9659 lit0 = 0;
9660 }
9661 else
9662 {
9663 lit0 = associate_trees (loc, lit0, minus_lit0,
9664 MINUS_EXPR, atype);
9665 minus_lit0 = 0;
9666 }
9667 }
9668
9669 /* Don't introduce overflows through reassociation. */
9670 if ((lit0 && TREE_OVERFLOW_P (lit0))
9671 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0)))
9672 return NULL_TREE;
9673
9674 /* Eliminate lit0 and minus_lit0 to con0 and minus_con0. */
9675 con0 = associate_trees (loc, con0, lit0, code, atype);
9676 lit0 = 0;
9677 minus_con0 = associate_trees (loc, minus_con0, minus_lit0,
9678 code, atype);
9679 minus_lit0 = 0;
9680
9681 /* Eliminate minus_con0. */
9682 if (minus_con0)
9683 {
9684 if (con0)
9685 con0 = associate_trees (loc, con0, minus_con0,
9686 MINUS_EXPR, atype);
9687 else if (var0)
9688 var0 = associate_trees (loc, var0, minus_con0,
9689 MINUS_EXPR, atype);
9690 else
9691 gcc_unreachable ();
9692 minus_con0 = 0;
9693 }
9694
9695 /* Eliminate minus_var0. */
9696 if (minus_var0)
9697 {
9698 if (con0)
9699 con0 = associate_trees (loc, con0, minus_var0,
9700 MINUS_EXPR, atype);
9701 else
9702 gcc_unreachable ();
9703 minus_var0 = 0;
9704 }
9705
9706 return
9707 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
9708 code, atype));
9709 }
9710 }
9711
9712 return NULL_TREE;
9713
9714 case POINTER_DIFF_EXPR:
9715 case MINUS_EXPR:
9716 /* Fold &a[i] - &a[j] to i-j. */
9717 if (TREE_CODE (arg0) == ADDR_EXPR
9718 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9719 && TREE_CODE (arg1) == ADDR_EXPR
9720 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9721 {
9722 tree tem = fold_addr_of_array_ref_difference (loc, type,
9723 TREE_OPERAND (arg0, 0),
9724 TREE_OPERAND (arg1, 0),
9725 code
9726 == POINTER_DIFF_EXPR);
9727 if (tem)
9728 return tem;
9729 }
9730
9731 /* Further transformations are not for pointers. */
9732 if (code == POINTER_DIFF_EXPR)
9733 return NULL_TREE;
9734
9735 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9736 if (TREE_CODE (arg0) == NEGATE_EXPR
9737 && negate_expr_p (op1))
9738 return fold_build2_loc (loc, MINUS_EXPR, type,
9739 negate_expr (op1),
9740 fold_convert_loc (loc, type,
9741 TREE_OPERAND (arg0, 0)));
9742
9743 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
9744 __complex__ ( x, -y ). This is not the same for SNaNs or if
9745 signed zeros are involved. */
9746 if (!HONOR_SNANS (element_mode (arg0))
9747 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9748 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9749 {
9750 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9751 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9752 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9753 bool arg0rz = false, arg0iz = false;
9754 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9755 || (arg0i && (arg0iz = real_zerop (arg0i))))
9756 {
9757 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9758 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9759 if (arg0rz && arg1i && real_zerop (arg1i))
9760 {
9761 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9762 arg1r ? arg1r
9763 : build1 (REALPART_EXPR, rtype, arg1));
9764 tree ip = arg0i ? arg0i
9765 : build1 (IMAGPART_EXPR, rtype, arg0);
9766 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9767 }
9768 else if (arg0iz && arg1r && real_zerop (arg1r))
9769 {
9770 tree rp = arg0r ? arg0r
9771 : build1 (REALPART_EXPR, rtype, arg0);
9772 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9773 arg1i ? arg1i
9774 : build1 (IMAGPART_EXPR, rtype, arg1));
9775 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9776 }
9777 }
9778 }
9779
9780 /* A - B -> A + (-B) if B is easily negatable. */
9781 if (negate_expr_p (op1)
9782 && ! TYPE_OVERFLOW_SANITIZED (type)
9783 && ((FLOAT_TYPE_P (type)
9784 /* Avoid this transformation if B is a positive REAL_CST. */
9785 && (TREE_CODE (op1) != REAL_CST
9786 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1))))
9787 || INTEGRAL_TYPE_P (type)))
9788 return fold_build2_loc (loc, PLUS_EXPR, type,
9789 fold_convert_loc (loc, type, arg0),
9790 negate_expr (op1));
9791
9792 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
9793 one. Make sure the type is not saturating and has the signedness of
9794 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9795 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9796 if ((TREE_CODE (arg0) == MULT_EXPR
9797 || TREE_CODE (arg1) == MULT_EXPR)
9798 && !TYPE_SATURATING (type)
9799 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9800 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9801 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9802 {
9803 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9804 if (tem)
9805 return tem;
9806 }
9807
9808 goto associate;
9809
9810 case MULT_EXPR:
9811 if (! FLOAT_TYPE_P (type))
9812 {
9813 /* Transform x * -C into -x * C if x is easily negatable. */
9814 if (TREE_CODE (op1) == INTEGER_CST
9815 && tree_int_cst_sgn (op1) == -1
9816 && negate_expr_p (op0)
9817 && negate_expr_p (op1)
9818 && (tem = negate_expr (op1)) != op1
9819 && ! TREE_OVERFLOW (tem))
9820 return fold_build2_loc (loc, MULT_EXPR, type,
9821 fold_convert_loc (loc, type,
9822 negate_expr (op0)), tem);
9823
9824 strict_overflow_p = false;
9825 if (TREE_CODE (arg1) == INTEGER_CST
9826 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
9827 &strict_overflow_p)))
9828 {
9829 if (strict_overflow_p)
9830 fold_overflow_warning (("assuming signed overflow does not "
9831 "occur when simplifying "
9832 "multiplication"),
9833 WARN_STRICT_OVERFLOW_MISC);
9834 return fold_convert_loc (loc, type, tem);
9835 }
9836
9837 /* Optimize z * conj(z) for integer complex numbers. */
9838 if (TREE_CODE (arg0) == CONJ_EXPR
9839 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9840 return fold_mult_zconjz (loc, type, arg1);
9841 if (TREE_CODE (arg1) == CONJ_EXPR
9842 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9843 return fold_mult_zconjz (loc, type, arg0);
9844 }
9845 else
9846 {
9847 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
9848 This is not the same for NaNs or if signed zeros are
9849 involved. */
9850 if (!HONOR_NANS (arg0)
9851 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9852 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9853 && TREE_CODE (arg1) == COMPLEX_CST
9854 && real_zerop (TREE_REALPART (arg1)))
9855 {
9856 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9857 if (real_onep (TREE_IMAGPART (arg1)))
9858 return
9859 fold_build2_loc (loc, COMPLEX_EXPR, type,
9860 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
9861 rtype, arg0)),
9862 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
9863 else if (real_minus_onep (TREE_IMAGPART (arg1)))
9864 return
9865 fold_build2_loc (loc, COMPLEX_EXPR, type,
9866 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
9867 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
9868 rtype, arg0)));
9869 }
9870
9871 /* Optimize z * conj(z) for floating point complex numbers.
9872 Guarded by flag_unsafe_math_optimizations as non-finite
9873 imaginary components don't produce scalar results. */
9874 if (flag_unsafe_math_optimizations
9875 && TREE_CODE (arg0) == CONJ_EXPR
9876 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9877 return fold_mult_zconjz (loc, type, arg1);
9878 if (flag_unsafe_math_optimizations
9879 && TREE_CODE (arg1) == CONJ_EXPR
9880 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9881 return fold_mult_zconjz (loc, type, arg0);
9882 }
9883 goto associate;
9884
9885 case BIT_IOR_EXPR:
9886 /* Canonicalize (X & C1) | C2. */
9887 if (TREE_CODE (arg0) == BIT_AND_EXPR
9888 && TREE_CODE (arg1) == INTEGER_CST
9889 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9890 {
9891 int width = TYPE_PRECISION (type), w;
9892 wide_int c1 = wi::to_wide (TREE_OPERAND (arg0, 1));
9893 wide_int c2 = wi::to_wide (arg1);
9894
9895 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
9896 if ((c1 & c2) == c1)
9897 return omit_one_operand_loc (loc, type, arg1,
9898 TREE_OPERAND (arg0, 0));
9899
9900 wide_int msk = wi::mask (width, false,
9901 TYPE_PRECISION (TREE_TYPE (arg1)));
9902
9903 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
9904 if (wi::bit_and_not (msk, c1 | c2) == 0)
9905 {
9906 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
9907 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
9908 }
9909
9910 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
9911 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
9912 mode which allows further optimizations. */
9913 c1 &= msk;
9914 c2 &= msk;
9915 wide_int c3 = wi::bit_and_not (c1, c2);
9916 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
9917 {
9918 wide_int mask = wi::mask (w, false,
9919 TYPE_PRECISION (type));
9920 if (((c1 | c2) & mask) == mask
9921 && wi::bit_and_not (c1, mask) == 0)
9922 {
9923 c3 = mask;
9924 break;
9925 }
9926 }
9927
9928 if (c3 != c1)
9929 {
9930 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
9931 tem = fold_build2_loc (loc, BIT_AND_EXPR, type, tem,
9932 wide_int_to_tree (type, c3));
9933 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
9934 }
9935 }
9936
9937 /* See if this can be simplified into a rotate first. If that
9938 is unsuccessful continue in the association code. */
9939 goto bit_rotate;
9940
9941 case BIT_XOR_EXPR:
9942 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
9943 if (TREE_CODE (arg0) == BIT_AND_EXPR
9944 && INTEGRAL_TYPE_P (type)
9945 && integer_onep (TREE_OPERAND (arg0, 1))
9946 && integer_onep (arg1))
9947 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
9948 build_zero_cst (TREE_TYPE (arg0)));
9949
9950 /* See if this can be simplified into a rotate first. If that
9951 is unsuccessful continue in the association code. */
9952 goto bit_rotate;
9953
9954 case BIT_AND_EXPR:
9955 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
9956 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9957 && INTEGRAL_TYPE_P (type)
9958 && integer_onep (TREE_OPERAND (arg0, 1))
9959 && integer_onep (arg1))
9960 {
9961 tree tem2;
9962 tem = TREE_OPERAND (arg0, 0);
9963 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
9964 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
9965 tem, tem2);
9966 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
9967 build_zero_cst (TREE_TYPE (tem)));
9968 }
9969 /* Fold ~X & 1 as (X & 1) == 0. */
9970 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9971 && INTEGRAL_TYPE_P (type)
9972 && integer_onep (arg1))
9973 {
9974 tree tem2;
9975 tem = TREE_OPERAND (arg0, 0);
9976 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
9977 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
9978 tem, tem2);
9979 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
9980 build_zero_cst (TREE_TYPE (tem)));
9981 }
9982 /* Fold !X & 1 as X == 0. */
9983 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
9984 && integer_onep (arg1))
9985 {
9986 tem = TREE_OPERAND (arg0, 0);
9987 return fold_build2_loc (loc, EQ_EXPR, type, tem,
9988 build_zero_cst (TREE_TYPE (tem)));
9989 }
9990
9991 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
9992 multiple of 1 << CST. */
9993 if (TREE_CODE (arg1) == INTEGER_CST)
9994 {
9995 wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
9996 wide_int ncst1 = -cst1;
9997 if ((cst1 & ncst1) == ncst1
9998 && multiple_of_p (type, arg0,
9999 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
10000 return fold_convert_loc (loc, type, arg0);
10001 }
10002
10003 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
10004 bits from CST2. */
10005 if (TREE_CODE (arg1) == INTEGER_CST
10006 && TREE_CODE (arg0) == MULT_EXPR
10007 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10008 {
10009 wi::tree_to_wide_ref warg1 = wi::to_wide (arg1);
10010 wide_int masked
10011 = mask_with_tz (type, warg1, wi::to_wide (TREE_OPERAND (arg0, 1)));
10012
10013 if (masked == 0)
10014 return omit_two_operands_loc (loc, type, build_zero_cst (type),
10015 arg0, arg1);
10016 else if (masked != warg1)
10017 {
10018 /* Avoid the transform if arg1 is a mask of some
10019 mode which allows further optimizations. */
10020 int pop = wi::popcount (warg1);
10021 if (!(pop >= BITS_PER_UNIT
10022 && pow2p_hwi (pop)
10023 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
10024 return fold_build2_loc (loc, code, type, op0,
10025 wide_int_to_tree (type, masked));
10026 }
10027 }
10028
10029 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
10030 ((A & N) + B) & M -> (A + B) & M
10031 Similarly if (N & M) == 0,
10032 ((A | N) + B) & M -> (A + B) & M
10033 and for - instead of + (or unary - instead of +)
10034 and/or ^ instead of |.
10035 If B is constant and (B & M) == 0, fold into A & M. */
10036 if (TREE_CODE (arg1) == INTEGER_CST)
10037 {
10038 wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
10039 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
10040 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10041 && (TREE_CODE (arg0) == PLUS_EXPR
10042 || TREE_CODE (arg0) == MINUS_EXPR
10043 || TREE_CODE (arg0) == NEGATE_EXPR)
10044 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
10045 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
10046 {
10047 tree pmop[2];
10048 int which = 0;
10049 wide_int cst0;
10050
10051 /* Now we know that arg0 is (C + D) or (C - D) or
10052 -C and arg1 (M) is == (1LL << cst) - 1.
10053 Store C into PMOP[0] and D into PMOP[1]. */
10054 pmop[0] = TREE_OPERAND (arg0, 0);
10055 pmop[1] = NULL;
10056 if (TREE_CODE (arg0) != NEGATE_EXPR)
10057 {
10058 pmop[1] = TREE_OPERAND (arg0, 1);
10059 which = 1;
10060 }
10061
10062 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
10063 which = -1;
10064
10065 for (; which >= 0; which--)
10066 switch (TREE_CODE (pmop[which]))
10067 {
10068 case BIT_AND_EXPR:
10069 case BIT_IOR_EXPR:
10070 case BIT_XOR_EXPR:
10071 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
10072 != INTEGER_CST)
10073 break;
10074 cst0 = wi::to_wide (TREE_OPERAND (pmop[which], 1)) & cst1;
10075 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
10076 {
10077 if (cst0 != cst1)
10078 break;
10079 }
10080 else if (cst0 != 0)
10081 break;
10082 /* If C or D is of the form (A & N) where
10083 (N & M) == M, or of the form (A | N) or
10084 (A ^ N) where (N & M) == 0, replace it with A. */
10085 pmop[which] = TREE_OPERAND (pmop[which], 0);
10086 break;
10087 case INTEGER_CST:
10088 /* If C or D is a N where (N & M) == 0, it can be
10089 omitted (assumed 0). */
10090 if ((TREE_CODE (arg0) == PLUS_EXPR
10091 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
10092 && (cst1 & wi::to_wide (pmop[which])) == 0)
10093 pmop[which] = NULL;
10094 break;
10095 default:
10096 break;
10097 }
10098
10099 /* Only build anything new if we optimized one or both arguments
10100 above. */
10101 if (pmop[0] != TREE_OPERAND (arg0, 0)
10102 || (TREE_CODE (arg0) != NEGATE_EXPR
10103 && pmop[1] != TREE_OPERAND (arg0, 1)))
10104 {
10105 tree utype = TREE_TYPE (arg0);
10106 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10107 {
10108 /* Perform the operations in a type that has defined
10109 overflow behavior. */
10110 utype = unsigned_type_for (TREE_TYPE (arg0));
10111 if (pmop[0] != NULL)
10112 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
10113 if (pmop[1] != NULL)
10114 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
10115 }
10116
10117 if (TREE_CODE (arg0) == NEGATE_EXPR)
10118 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
10119 else if (TREE_CODE (arg0) == PLUS_EXPR)
10120 {
10121 if (pmop[0] != NULL && pmop[1] != NULL)
10122 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
10123 pmop[0], pmop[1]);
10124 else if (pmop[0] != NULL)
10125 tem = pmop[0];
10126 else if (pmop[1] != NULL)
10127 tem = pmop[1];
10128 else
10129 return build_int_cst (type, 0);
10130 }
10131 else if (pmop[0] == NULL)
10132 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
10133 else
10134 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
10135 pmop[0], pmop[1]);
10136 /* TEM is now the new binary +, - or unary - replacement. */
10137 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
10138 fold_convert_loc (loc, utype, arg1));
10139 return fold_convert_loc (loc, type, tem);
10140 }
10141 }
10142 }
10143
10144 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10145 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10146 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10147 {
10148 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10149
10150 wide_int mask = wide_int::from (wi::to_wide (arg1), prec, UNSIGNED);
10151 if (mask == -1)
10152 return
10153 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10154 }
10155
10156 goto associate;
10157
10158 case RDIV_EXPR:
10159 /* Don't touch a floating-point divide by zero unless the mode
10160 of the constant can represent infinity. */
10161 if (TREE_CODE (arg1) == REAL_CST
10162 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10163 && real_zerop (arg1))
10164 return NULL_TREE;
10165
10166 /* (-A) / (-B) -> A / B */
10167 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10168 return fold_build2_loc (loc, RDIV_EXPR, type,
10169 TREE_OPERAND (arg0, 0),
10170 negate_expr (arg1));
10171 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10172 return fold_build2_loc (loc, RDIV_EXPR, type,
10173 negate_expr (arg0),
10174 TREE_OPERAND (arg1, 0));
10175 return NULL_TREE;
10176
10177 case TRUNC_DIV_EXPR:
10178 /* Fall through */
10179
10180 case FLOOR_DIV_EXPR:
10181 /* Simplify A / (B << N) where A and B are positive and B is
10182 a power of 2, to A >> (N + log2(B)). */
10183 strict_overflow_p = false;
10184 if (TREE_CODE (arg1) == LSHIFT_EXPR
10185 && (TYPE_UNSIGNED (type)
10186 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
10187 {
10188 tree sval = TREE_OPERAND (arg1, 0);
10189 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10190 {
10191 tree sh_cnt = TREE_OPERAND (arg1, 1);
10192 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
10193 wi::exact_log2 (wi::to_wide (sval)));
10194
10195 if (strict_overflow_p)
10196 fold_overflow_warning (("assuming signed overflow does not "
10197 "occur when simplifying A / (B << N)"),
10198 WARN_STRICT_OVERFLOW_MISC);
10199
10200 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
10201 sh_cnt, pow2);
10202 return fold_build2_loc (loc, RSHIFT_EXPR, type,
10203 fold_convert_loc (loc, type, arg0), sh_cnt);
10204 }
10205 }
10206
10207 /* Fall through */
10208
10209 case ROUND_DIV_EXPR:
10210 case CEIL_DIV_EXPR:
10211 case EXACT_DIV_EXPR:
10212 if (integer_zerop (arg1))
10213 return NULL_TREE;
10214
10215 /* Convert -A / -B to A / B when the type is signed and overflow is
10216 undefined. */
10217 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10218 && TREE_CODE (op0) == NEGATE_EXPR
10219 && negate_expr_p (op1))
10220 {
10221 if (INTEGRAL_TYPE_P (type))
10222 fold_overflow_warning (("assuming signed overflow does not occur "
10223 "when distributing negation across "
10224 "division"),
10225 WARN_STRICT_OVERFLOW_MISC);
10226 return fold_build2_loc (loc, code, type,
10227 fold_convert_loc (loc, type,
10228 TREE_OPERAND (arg0, 0)),
10229 negate_expr (op1));
10230 }
10231 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10232 && TREE_CODE (arg1) == NEGATE_EXPR
10233 && negate_expr_p (op0))
10234 {
10235 if (INTEGRAL_TYPE_P (type))
10236 fold_overflow_warning (("assuming signed overflow does not occur "
10237 "when distributing negation across "
10238 "division"),
10239 WARN_STRICT_OVERFLOW_MISC);
10240 return fold_build2_loc (loc, code, type,
10241 negate_expr (op0),
10242 fold_convert_loc (loc, type,
10243 TREE_OPERAND (arg1, 0)));
10244 }
10245
10246 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10247 operation, EXACT_DIV_EXPR.
10248
10249 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10250 At one time others generated faster code, it's not clear if they do
10251 after the last round to changes to the DIV code in expmed.c. */
10252 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10253 && multiple_of_p (type, arg0, arg1))
10254 return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
10255 fold_convert (type, arg0),
10256 fold_convert (type, arg1));
10257
10258 strict_overflow_p = false;
10259 if (TREE_CODE (arg1) == INTEGER_CST
10260 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10261 &strict_overflow_p)))
10262 {
10263 if (strict_overflow_p)
10264 fold_overflow_warning (("assuming signed overflow does not occur "
10265 "when simplifying division"),
10266 WARN_STRICT_OVERFLOW_MISC);
10267 return fold_convert_loc (loc, type, tem);
10268 }
10269
10270 return NULL_TREE;
10271
10272 case CEIL_MOD_EXPR:
10273 case FLOOR_MOD_EXPR:
10274 case ROUND_MOD_EXPR:
10275 case TRUNC_MOD_EXPR:
10276 strict_overflow_p = false;
10277 if (TREE_CODE (arg1) == INTEGER_CST
10278 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10279 &strict_overflow_p)))
10280 {
10281 if (strict_overflow_p)
10282 fold_overflow_warning (("assuming signed overflow does not occur "
10283 "when simplifying modulus"),
10284 WARN_STRICT_OVERFLOW_MISC);
10285 return fold_convert_loc (loc, type, tem);
10286 }
10287
10288 return NULL_TREE;
10289
10290 case LROTATE_EXPR:
10291 case RROTATE_EXPR:
10292 case RSHIFT_EXPR:
10293 case LSHIFT_EXPR:
10294 /* Since negative shift count is not well-defined,
10295 don't try to compute it in the compiler. */
10296 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10297 return NULL_TREE;
10298
10299 prec = element_precision (type);
10300
10301 /* If we have a rotate of a bit operation with the rotate count and
10302 the second operand of the bit operation both constant,
10303 permute the two operations. */
10304 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10305 && (TREE_CODE (arg0) == BIT_AND_EXPR
10306 || TREE_CODE (arg0) == BIT_IOR_EXPR
10307 || TREE_CODE (arg0) == BIT_XOR_EXPR)
10308 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10309 {
10310 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10311 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10312 return fold_build2_loc (loc, TREE_CODE (arg0), type,
10313 fold_build2_loc (loc, code, type,
10314 arg00, arg1),
10315 fold_build2_loc (loc, code, type,
10316 arg01, arg1));
10317 }
10318
10319 /* Two consecutive rotates adding up to the some integer
10320 multiple of the precision of the type can be ignored. */
10321 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10322 && TREE_CODE (arg0) == RROTATE_EXPR
10323 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10324 && wi::umod_trunc (wi::to_wide (arg1)
10325 + wi::to_wide (TREE_OPERAND (arg0, 1)),
10326 prec) == 0)
10327 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10328
10329 return NULL_TREE;
10330
10331 case MIN_EXPR:
10332 case MAX_EXPR:
10333 goto associate;
10334
10335 case TRUTH_ANDIF_EXPR:
10336 /* Note that the operands of this must be ints
10337 and their values must be 0 or 1.
10338 ("true" is a fixed value perhaps depending on the language.) */
10339 /* If first arg is constant zero, return it. */
10340 if (integer_zerop (arg0))
10341 return fold_convert_loc (loc, type, arg0);
10342 /* FALLTHRU */
10343 case TRUTH_AND_EXPR:
10344 /* If either arg is constant true, drop it. */
10345 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10346 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10347 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10348 /* Preserve sequence points. */
10349 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10350 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10351 /* If second arg is constant zero, result is zero, but first arg
10352 must be evaluated. */
10353 if (integer_zerop (arg1))
10354 return omit_one_operand_loc (loc, type, arg1, arg0);
10355 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10356 case will be handled here. */
10357 if (integer_zerop (arg0))
10358 return omit_one_operand_loc (loc, type, arg0, arg1);
10359
10360 /* !X && X is always false. */
10361 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10362 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10363 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
10364 /* X && !X is always false. */
10365 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10366 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10367 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10368
10369 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10370 means A >= Y && A != MAX, but in this case we know that
10371 A < X <= MAX. */
10372
10373 if (!TREE_SIDE_EFFECTS (arg0)
10374 && !TREE_SIDE_EFFECTS (arg1))
10375 {
10376 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
10377 if (tem && !operand_equal_p (tem, arg0, 0))
10378 return fold_build2_loc (loc, code, type, tem, arg1);
10379
10380 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
10381 if (tem && !operand_equal_p (tem, arg1, 0))
10382 return fold_build2_loc (loc, code, type, arg0, tem);
10383 }
10384
10385 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10386 != NULL_TREE)
10387 return tem;
10388
10389 return NULL_TREE;
10390
10391 case TRUTH_ORIF_EXPR:
10392 /* Note that the operands of this must be ints
10393 and their values must be 0 or true.
10394 ("true" is a fixed value perhaps depending on the language.) */
10395 /* If first arg is constant true, return it. */
10396 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10397 return fold_convert_loc (loc, type, arg0);
10398 /* FALLTHRU */
10399 case TRUTH_OR_EXPR:
10400 /* If either arg is constant zero, drop it. */
10401 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10402 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10403 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10404 /* Preserve sequence points. */
10405 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10406 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10407 /* If second arg is constant true, result is true, but we must
10408 evaluate first arg. */
10409 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10410 return omit_one_operand_loc (loc, type, arg1, arg0);
10411 /* Likewise for first arg, but note this only occurs here for
10412 TRUTH_OR_EXPR. */
10413 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10414 return omit_one_operand_loc (loc, type, arg0, arg1);
10415
10416 /* !X || X is always true. */
10417 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10418 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10419 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10420 /* X || !X is always true. */
10421 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10422 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10423 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10424
10425 /* (X && !Y) || (!X && Y) is X ^ Y */
10426 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
10427 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
10428 {
10429 tree a0, a1, l0, l1, n0, n1;
10430
10431 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10432 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10433
10434 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10435 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10436
10437 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
10438 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
10439
10440 if ((operand_equal_p (n0, a0, 0)
10441 && operand_equal_p (n1, a1, 0))
10442 || (operand_equal_p (n0, a1, 0)
10443 && operand_equal_p (n1, a0, 0)))
10444 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
10445 }
10446
10447 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10448 != NULL_TREE)
10449 return tem;
10450
10451 return NULL_TREE;
10452
10453 case TRUTH_XOR_EXPR:
10454 /* If the second arg is constant zero, drop it. */
10455 if (integer_zerop (arg1))
10456 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10457 /* If the second arg is constant true, this is a logical inversion. */
10458 if (integer_onep (arg1))
10459 {
10460 tem = invert_truthvalue_loc (loc, arg0);
10461 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
10462 }
10463 /* Identical arguments cancel to zero. */
10464 if (operand_equal_p (arg0, arg1, 0))
10465 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10466
10467 /* !X ^ X is always true. */
10468 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10469 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10470 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10471
10472 /* X ^ !X is always true. */
10473 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10474 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10475 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10476
10477 return NULL_TREE;
10478
10479 case EQ_EXPR:
10480 case NE_EXPR:
10481 STRIP_NOPS (arg0);
10482 STRIP_NOPS (arg1);
10483
10484 tem = fold_comparison (loc, code, type, op0, op1);
10485 if (tem != NULL_TREE)
10486 return tem;
10487
10488 /* bool_var != 1 becomes !bool_var. */
10489 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10490 && code == NE_EXPR)
10491 return fold_convert_loc (loc, type,
10492 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10493 TREE_TYPE (arg0), arg0));
10494
10495 /* bool_var == 0 becomes !bool_var. */
10496 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10497 && code == EQ_EXPR)
10498 return fold_convert_loc (loc, type,
10499 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10500 TREE_TYPE (arg0), arg0));
10501
10502 /* !exp != 0 becomes !exp */
10503 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
10504 && code == NE_EXPR)
10505 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10506
10507 /* If this is an EQ or NE comparison with zero and ARG0 is
10508 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10509 two operations, but the latter can be done in one less insn
10510 on machines that have only two-operand insns or on which a
10511 constant cannot be the first operand. */
10512 if (TREE_CODE (arg0) == BIT_AND_EXPR
10513 && integer_zerop (arg1))
10514 {
10515 tree arg00 = TREE_OPERAND (arg0, 0);
10516 tree arg01 = TREE_OPERAND (arg0, 1);
10517 if (TREE_CODE (arg00) == LSHIFT_EXPR
10518 && integer_onep (TREE_OPERAND (arg00, 0)))
10519 {
10520 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
10521 arg01, TREE_OPERAND (arg00, 1));
10522 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10523 build_int_cst (TREE_TYPE (arg0), 1));
10524 return fold_build2_loc (loc, code, type,
10525 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10526 arg1);
10527 }
10528 else if (TREE_CODE (arg01) == LSHIFT_EXPR
10529 && integer_onep (TREE_OPERAND (arg01, 0)))
10530 {
10531 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
10532 arg00, TREE_OPERAND (arg01, 1));
10533 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10534 build_int_cst (TREE_TYPE (arg0), 1));
10535 return fold_build2_loc (loc, code, type,
10536 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10537 arg1);
10538 }
10539 }
10540
10541 /* If this is an NE or EQ comparison of zero against the result of a
10542 signed MOD operation whose second operand is a power of 2, make
10543 the MOD operation unsigned since it is simpler and equivalent. */
10544 if (integer_zerop (arg1)
10545 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10546 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10547 || TREE_CODE (arg0) == CEIL_MOD_EXPR
10548 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10549 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10550 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10551 {
10552 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
10553 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
10554 fold_convert_loc (loc, newtype,
10555 TREE_OPERAND (arg0, 0)),
10556 fold_convert_loc (loc, newtype,
10557 TREE_OPERAND (arg0, 1)));
10558
10559 return fold_build2_loc (loc, code, type, newmod,
10560 fold_convert_loc (loc, newtype, arg1));
10561 }
10562
10563 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10564 C1 is a valid shift constant, and C2 is a power of two, i.e.
10565 a single bit. */
10566 if (TREE_CODE (arg0) == BIT_AND_EXPR
10567 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10568 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10569 == INTEGER_CST
10570 && integer_pow2p (TREE_OPERAND (arg0, 1))
10571 && integer_zerop (arg1))
10572 {
10573 tree itype = TREE_TYPE (arg0);
10574 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10575 prec = TYPE_PRECISION (itype);
10576
10577 /* Check for a valid shift count. */
10578 if (wi::ltu_p (wi::to_wide (arg001), prec))
10579 {
10580 tree arg01 = TREE_OPERAND (arg0, 1);
10581 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10582 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10583 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10584 can be rewritten as (X & (C2 << C1)) != 0. */
10585 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
10586 {
10587 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
10588 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
10589 return fold_build2_loc (loc, code, type, tem,
10590 fold_convert_loc (loc, itype, arg1));
10591 }
10592 /* Otherwise, for signed (arithmetic) shifts,
10593 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10594 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10595 else if (!TYPE_UNSIGNED (itype))
10596 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10597 arg000, build_int_cst (itype, 0));
10598 /* Otherwise, of unsigned (logical) shifts,
10599 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10600 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10601 else
10602 return omit_one_operand_loc (loc, type,
10603 code == EQ_EXPR ? integer_one_node
10604 : integer_zero_node,
10605 arg000);
10606 }
10607 }
10608
10609 /* If this is a comparison of a field, we may be able to simplify it. */
10610 if ((TREE_CODE (arg0) == COMPONENT_REF
10611 || TREE_CODE (arg0) == BIT_FIELD_REF)
10612 /* Handle the constant case even without -O
10613 to make sure the warnings are given. */
10614 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
10615 {
10616 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
10617 if (t1)
10618 return t1;
10619 }
10620
10621 /* Optimize comparisons of strlen vs zero to a compare of the
10622 first character of the string vs zero. To wit,
10623 strlen(ptr) == 0 => *ptr == 0
10624 strlen(ptr) != 0 => *ptr != 0
10625 Other cases should reduce to one of these two (or a constant)
10626 due to the return value of strlen being unsigned. */
10627 if (TREE_CODE (arg0) == CALL_EXPR
10628 && integer_zerop (arg1))
10629 {
10630 tree fndecl = get_callee_fndecl (arg0);
10631
10632 if (fndecl
10633 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
10634 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
10635 && call_expr_nargs (arg0) == 1
10636 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
10637 {
10638 tree iref = build_fold_indirect_ref_loc (loc,
10639 CALL_EXPR_ARG (arg0, 0));
10640 return fold_build2_loc (loc, code, type, iref,
10641 build_int_cst (TREE_TYPE (iref), 0));
10642 }
10643 }
10644
10645 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10646 of X. Similarly fold (X >> C) == 0 into X >= 0. */
10647 if (TREE_CODE (arg0) == RSHIFT_EXPR
10648 && integer_zerop (arg1)
10649 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10650 {
10651 tree arg00 = TREE_OPERAND (arg0, 0);
10652 tree arg01 = TREE_OPERAND (arg0, 1);
10653 tree itype = TREE_TYPE (arg00);
10654 if (wi::to_wide (arg01) == element_precision (itype) - 1)
10655 {
10656 if (TYPE_UNSIGNED (itype))
10657 {
10658 itype = signed_type_for (itype);
10659 arg00 = fold_convert_loc (loc, itype, arg00);
10660 }
10661 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
10662 type, arg00, build_zero_cst (itype));
10663 }
10664 }
10665
10666 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
10667 (X & C) == 0 when C is a single bit. */
10668 if (TREE_CODE (arg0) == BIT_AND_EXPR
10669 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
10670 && integer_zerop (arg1)
10671 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10672 {
10673 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
10674 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
10675 TREE_OPERAND (arg0, 1));
10676 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
10677 type, tem,
10678 fold_convert_loc (loc, TREE_TYPE (arg0),
10679 arg1));
10680 }
10681
10682 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
10683 constant C is a power of two, i.e. a single bit. */
10684 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10685 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10686 && integer_zerop (arg1)
10687 && integer_pow2p (TREE_OPERAND (arg0, 1))
10688 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10689 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10690 {
10691 tree arg00 = TREE_OPERAND (arg0, 0);
10692 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10693 arg00, build_int_cst (TREE_TYPE (arg00), 0));
10694 }
10695
10696 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
10697 when is C is a power of two, i.e. a single bit. */
10698 if (TREE_CODE (arg0) == BIT_AND_EXPR
10699 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
10700 && integer_zerop (arg1)
10701 && integer_pow2p (TREE_OPERAND (arg0, 1))
10702 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10703 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10704 {
10705 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10706 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
10707 arg000, TREE_OPERAND (arg0, 1));
10708 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10709 tem, build_int_cst (TREE_TYPE (tem), 0));
10710 }
10711
10712 if (integer_zerop (arg1)
10713 && tree_expr_nonzero_p (arg0))
10714 {
10715 tree res = constant_boolean_node (code==NE_EXPR, type);
10716 return omit_one_operand_loc (loc, type, res, arg0);
10717 }
10718
10719 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
10720 if (TREE_CODE (arg0) == BIT_AND_EXPR
10721 && TREE_CODE (arg1) == BIT_AND_EXPR)
10722 {
10723 tree arg00 = TREE_OPERAND (arg0, 0);
10724 tree arg01 = TREE_OPERAND (arg0, 1);
10725 tree arg10 = TREE_OPERAND (arg1, 0);
10726 tree arg11 = TREE_OPERAND (arg1, 1);
10727 tree itype = TREE_TYPE (arg0);
10728
10729 if (operand_equal_p (arg01, arg11, 0))
10730 {
10731 tem = fold_convert_loc (loc, itype, arg10);
10732 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
10733 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg01);
10734 return fold_build2_loc (loc, code, type, tem,
10735 build_zero_cst (itype));
10736 }
10737 if (operand_equal_p (arg01, arg10, 0))
10738 {
10739 tem = fold_convert_loc (loc, itype, arg11);
10740 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
10741 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg01);
10742 return fold_build2_loc (loc, code, type, tem,
10743 build_zero_cst (itype));
10744 }
10745 if (operand_equal_p (arg00, arg11, 0))
10746 {
10747 tem = fold_convert_loc (loc, itype, arg10);
10748 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01, tem);
10749 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg00);
10750 return fold_build2_loc (loc, code, type, tem,
10751 build_zero_cst (itype));
10752 }
10753 if (operand_equal_p (arg00, arg10, 0))
10754 {
10755 tem = fold_convert_loc (loc, itype, arg11);
10756 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01, tem);
10757 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg00);
10758 return fold_build2_loc (loc, code, type, tem,
10759 build_zero_cst (itype));
10760 }
10761 }
10762
10763 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10764 && TREE_CODE (arg1) == BIT_XOR_EXPR)
10765 {
10766 tree arg00 = TREE_OPERAND (arg0, 0);
10767 tree arg01 = TREE_OPERAND (arg0, 1);
10768 tree arg10 = TREE_OPERAND (arg1, 0);
10769 tree arg11 = TREE_OPERAND (arg1, 1);
10770 tree itype = TREE_TYPE (arg0);
10771
10772 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
10773 operand_equal_p guarantees no side-effects so we don't need
10774 to use omit_one_operand on Z. */
10775 if (operand_equal_p (arg01, arg11, 0))
10776 return fold_build2_loc (loc, code, type, arg00,
10777 fold_convert_loc (loc, TREE_TYPE (arg00),
10778 arg10));
10779 if (operand_equal_p (arg01, arg10, 0))
10780 return fold_build2_loc (loc, code, type, arg00,
10781 fold_convert_loc (loc, TREE_TYPE (arg00),
10782 arg11));
10783 if (operand_equal_p (arg00, arg11, 0))
10784 return fold_build2_loc (loc, code, type, arg01,
10785 fold_convert_loc (loc, TREE_TYPE (arg01),
10786 arg10));
10787 if (operand_equal_p (arg00, arg10, 0))
10788 return fold_build2_loc (loc, code, type, arg01,
10789 fold_convert_loc (loc, TREE_TYPE (arg01),
10790 arg11));
10791
10792 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
10793 if (TREE_CODE (arg01) == INTEGER_CST
10794 && TREE_CODE (arg11) == INTEGER_CST)
10795 {
10796 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
10797 fold_convert_loc (loc, itype, arg11));
10798 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
10799 return fold_build2_loc (loc, code, type, tem,
10800 fold_convert_loc (loc, itype, arg10));
10801 }
10802 }
10803
10804 /* Attempt to simplify equality/inequality comparisons of complex
10805 values. Only lower the comparison if the result is known or
10806 can be simplified to a single scalar comparison. */
10807 if ((TREE_CODE (arg0) == COMPLEX_EXPR
10808 || TREE_CODE (arg0) == COMPLEX_CST)
10809 && (TREE_CODE (arg1) == COMPLEX_EXPR
10810 || TREE_CODE (arg1) == COMPLEX_CST))
10811 {
10812 tree real0, imag0, real1, imag1;
10813 tree rcond, icond;
10814
10815 if (TREE_CODE (arg0) == COMPLEX_EXPR)
10816 {
10817 real0 = TREE_OPERAND (arg0, 0);
10818 imag0 = TREE_OPERAND (arg0, 1);
10819 }
10820 else
10821 {
10822 real0 = TREE_REALPART (arg0);
10823 imag0 = TREE_IMAGPART (arg0);
10824 }
10825
10826 if (TREE_CODE (arg1) == COMPLEX_EXPR)
10827 {
10828 real1 = TREE_OPERAND (arg1, 0);
10829 imag1 = TREE_OPERAND (arg1, 1);
10830 }
10831 else
10832 {
10833 real1 = TREE_REALPART (arg1);
10834 imag1 = TREE_IMAGPART (arg1);
10835 }
10836
10837 rcond = fold_binary_loc (loc, code, type, real0, real1);
10838 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
10839 {
10840 if (integer_zerop (rcond))
10841 {
10842 if (code == EQ_EXPR)
10843 return omit_two_operands_loc (loc, type, boolean_false_node,
10844 imag0, imag1);
10845 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
10846 }
10847 else
10848 {
10849 if (code == NE_EXPR)
10850 return omit_two_operands_loc (loc, type, boolean_true_node,
10851 imag0, imag1);
10852 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
10853 }
10854 }
10855
10856 icond = fold_binary_loc (loc, code, type, imag0, imag1);
10857 if (icond && TREE_CODE (icond) == INTEGER_CST)
10858 {
10859 if (integer_zerop (icond))
10860 {
10861 if (code == EQ_EXPR)
10862 return omit_two_operands_loc (loc, type, boolean_false_node,
10863 real0, real1);
10864 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
10865 }
10866 else
10867 {
10868 if (code == NE_EXPR)
10869 return omit_two_operands_loc (loc, type, boolean_true_node,
10870 real0, real1);
10871 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
10872 }
10873 }
10874 }
10875
10876 return NULL_TREE;
10877
10878 case LT_EXPR:
10879 case GT_EXPR:
10880 case LE_EXPR:
10881 case GE_EXPR:
10882 tem = fold_comparison (loc, code, type, op0, op1);
10883 if (tem != NULL_TREE)
10884 return tem;
10885
10886 /* Transform comparisons of the form X +- C CMP X. */
10887 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
10888 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10889 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
10890 && !HONOR_SNANS (arg0))
10891 {
10892 tree arg01 = TREE_OPERAND (arg0, 1);
10893 enum tree_code code0 = TREE_CODE (arg0);
10894 int is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
10895
10896 /* (X - c) > X becomes false. */
10897 if (code == GT_EXPR
10898 && ((code0 == MINUS_EXPR && is_positive >= 0)
10899 || (code0 == PLUS_EXPR && is_positive <= 0)))
10900 return constant_boolean_node (0, type);
10901
10902 /* Likewise (X + c) < X becomes false. */
10903 if (code == LT_EXPR
10904 && ((code0 == PLUS_EXPR && is_positive >= 0)
10905 || (code0 == MINUS_EXPR && is_positive <= 0)))
10906 return constant_boolean_node (0, type);
10907
10908 /* Convert (X - c) <= X to true. */
10909 if (!HONOR_NANS (arg1)
10910 && code == LE_EXPR
10911 && ((code0 == MINUS_EXPR && is_positive >= 0)
10912 || (code0 == PLUS_EXPR && is_positive <= 0)))
10913 return constant_boolean_node (1, type);
10914
10915 /* Convert (X + c) >= X to true. */
10916 if (!HONOR_NANS (arg1)
10917 && code == GE_EXPR
10918 && ((code0 == PLUS_EXPR && is_positive >= 0)
10919 || (code0 == MINUS_EXPR && is_positive <= 0)))
10920 return constant_boolean_node (1, type);
10921 }
10922
10923 /* If we are comparing an ABS_EXPR with a constant, we can
10924 convert all the cases into explicit comparisons, but they may
10925 well not be faster than doing the ABS and one comparison.
10926 But ABS (X) <= C is a range comparison, which becomes a subtraction
10927 and a comparison, and is probably faster. */
10928 if (code == LE_EXPR
10929 && TREE_CODE (arg1) == INTEGER_CST
10930 && TREE_CODE (arg0) == ABS_EXPR
10931 && ! TREE_SIDE_EFFECTS (arg0)
10932 && (0 != (tem = negate_expr (arg1)))
10933 && TREE_CODE (tem) == INTEGER_CST
10934 && !TREE_OVERFLOW (tem))
10935 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
10936 build2 (GE_EXPR, type,
10937 TREE_OPERAND (arg0, 0), tem),
10938 build2 (LE_EXPR, type,
10939 TREE_OPERAND (arg0, 0), arg1));
10940
10941 /* Convert ABS_EXPR<x> >= 0 to true. */
10942 strict_overflow_p = false;
10943 if (code == GE_EXPR
10944 && (integer_zerop (arg1)
10945 || (! HONOR_NANS (arg0)
10946 && real_zerop (arg1)))
10947 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
10948 {
10949 if (strict_overflow_p)
10950 fold_overflow_warning (("assuming signed overflow does not occur "
10951 "when simplifying comparison of "
10952 "absolute value and zero"),
10953 WARN_STRICT_OVERFLOW_CONDITIONAL);
10954 return omit_one_operand_loc (loc, type,
10955 constant_boolean_node (true, type),
10956 arg0);
10957 }
10958
10959 /* Convert ABS_EXPR<x> < 0 to false. */
10960 strict_overflow_p = false;
10961 if (code == LT_EXPR
10962 && (integer_zerop (arg1) || real_zerop (arg1))
10963 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
10964 {
10965 if (strict_overflow_p)
10966 fold_overflow_warning (("assuming signed overflow does not occur "
10967 "when simplifying comparison of "
10968 "absolute value and zero"),
10969 WARN_STRICT_OVERFLOW_CONDITIONAL);
10970 return omit_one_operand_loc (loc, type,
10971 constant_boolean_node (false, type),
10972 arg0);
10973 }
10974
10975 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
10976 and similarly for >= into !=. */
10977 if ((code == LT_EXPR || code == GE_EXPR)
10978 && TYPE_UNSIGNED (TREE_TYPE (arg0))
10979 && TREE_CODE (arg1) == LSHIFT_EXPR
10980 && integer_onep (TREE_OPERAND (arg1, 0)))
10981 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
10982 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
10983 TREE_OPERAND (arg1, 1)),
10984 build_zero_cst (TREE_TYPE (arg0)));
10985
10986 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
10987 otherwise Y might be >= # of bits in X's type and thus e.g.
10988 (unsigned char) (1 << Y) for Y 15 might be 0.
10989 If the cast is widening, then 1 << Y should have unsigned type,
10990 otherwise if Y is number of bits in the signed shift type minus 1,
10991 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
10992 31 might be 0xffffffff80000000. */
10993 if ((code == LT_EXPR || code == GE_EXPR)
10994 && TYPE_UNSIGNED (TREE_TYPE (arg0))
10995 && CONVERT_EXPR_P (arg1)
10996 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
10997 && (element_precision (TREE_TYPE (arg1))
10998 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
10999 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
11000 || (element_precision (TREE_TYPE (arg1))
11001 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
11002 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11003 {
11004 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11005 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
11006 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11007 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
11008 build_zero_cst (TREE_TYPE (arg0)));
11009 }
11010
11011 return NULL_TREE;
11012
11013 case UNORDERED_EXPR:
11014 case ORDERED_EXPR:
11015 case UNLT_EXPR:
11016 case UNLE_EXPR:
11017 case UNGT_EXPR:
11018 case UNGE_EXPR:
11019 case UNEQ_EXPR:
11020 case LTGT_EXPR:
11021 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
11022 {
11023 tree targ0 = strip_float_extensions (arg0);
11024 tree targ1 = strip_float_extensions (arg1);
11025 tree newtype = TREE_TYPE (targ0);
11026
11027 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
11028 newtype = TREE_TYPE (targ1);
11029
11030 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
11031 return fold_build2_loc (loc, code, type,
11032 fold_convert_loc (loc, newtype, targ0),
11033 fold_convert_loc (loc, newtype, targ1));
11034 }
11035
11036 return NULL_TREE;
11037
11038 case COMPOUND_EXPR:
11039 /* When pedantic, a compound expression can be neither an lvalue
11040 nor an integer constant expression. */
11041 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11042 return NULL_TREE;
11043 /* Don't let (0, 0) be null pointer constant. */
11044 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11045 : fold_convert_loc (loc, type, arg1);
11046 return pedantic_non_lvalue_loc (loc, tem);
11047
11048 case ASSERT_EXPR:
11049 /* An ASSERT_EXPR should never be passed to fold_binary. */
11050 gcc_unreachable ();
11051
11052 default:
11053 return NULL_TREE;
11054 } /* switch (code) */
11055 }
11056
11057 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
11058 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
11059 of GOTO_EXPR. */
11060
11061 static tree
11062 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
11063 {
11064 switch (TREE_CODE (*tp))
11065 {
11066 case LABEL_EXPR:
11067 return *tp;
11068
11069 case GOTO_EXPR:
11070 *walk_subtrees = 0;
11071
11072 /* fall through */
11073
11074 default:
11075 return NULL_TREE;
11076 }
11077 }
11078
11079 /* Return whether the sub-tree ST contains a label which is accessible from
11080 outside the sub-tree. */
11081
11082 static bool
11083 contains_label_p (tree st)
11084 {
11085 return
11086 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
11087 }
11088
11089 /* Fold a ternary expression of code CODE and type TYPE with operands
11090 OP0, OP1, and OP2. Return the folded expression if folding is
11091 successful. Otherwise, return NULL_TREE. */
11092
11093 tree
11094 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
11095 tree op0, tree op1, tree op2)
11096 {
11097 tree tem;
11098 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
11099 enum tree_code_class kind = TREE_CODE_CLASS (code);
11100
11101 gcc_assert (IS_EXPR_CODE_CLASS (kind)
11102 && TREE_CODE_LENGTH (code) == 3);
11103
11104 /* If this is a commutative operation, and OP0 is a constant, move it
11105 to OP1 to reduce the number of tests below. */
11106 if (commutative_ternary_tree_code (code)
11107 && tree_swap_operands_p (op0, op1))
11108 return fold_build3_loc (loc, code, type, op1, op0, op2);
11109
11110 tem = generic_simplify (loc, code, type, op0, op1, op2);
11111 if (tem)
11112 return tem;
11113
11114 /* Strip any conversions that don't change the mode. This is safe
11115 for every expression, except for a comparison expression because
11116 its signedness is derived from its operands. So, in the latter
11117 case, only strip conversions that don't change the signedness.
11118
11119 Note that this is done as an internal manipulation within the
11120 constant folder, in order to find the simplest representation of
11121 the arguments so that their form can be studied. In any cases,
11122 the appropriate type conversions should be put back in the tree
11123 that will get out of the constant folder. */
11124 if (op0)
11125 {
11126 arg0 = op0;
11127 STRIP_NOPS (arg0);
11128 }
11129
11130 if (op1)
11131 {
11132 arg1 = op1;
11133 STRIP_NOPS (arg1);
11134 }
11135
11136 if (op2)
11137 {
11138 arg2 = op2;
11139 STRIP_NOPS (arg2);
11140 }
11141
11142 switch (code)
11143 {
11144 case COMPONENT_REF:
11145 if (TREE_CODE (arg0) == CONSTRUCTOR
11146 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11147 {
11148 unsigned HOST_WIDE_INT idx;
11149 tree field, value;
11150 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11151 if (field == arg1)
11152 return value;
11153 }
11154 return NULL_TREE;
11155
11156 case COND_EXPR:
11157 case VEC_COND_EXPR:
11158 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11159 so all simple results must be passed through pedantic_non_lvalue. */
11160 if (TREE_CODE (arg0) == INTEGER_CST)
11161 {
11162 tree unused_op = integer_zerop (arg0) ? op1 : op2;
11163 tem = integer_zerop (arg0) ? op2 : op1;
11164 /* Only optimize constant conditions when the selected branch
11165 has the same type as the COND_EXPR. This avoids optimizing
11166 away "c ? x : throw", where the throw has a void type.
11167 Avoid throwing away that operand which contains label. */
11168 if ((!TREE_SIDE_EFFECTS (unused_op)
11169 || !contains_label_p (unused_op))
11170 && (! VOID_TYPE_P (TREE_TYPE (tem))
11171 || VOID_TYPE_P (type)))
11172 return pedantic_non_lvalue_loc (loc, tem);
11173 return NULL_TREE;
11174 }
11175 else if (TREE_CODE (arg0) == VECTOR_CST)
11176 {
11177 if ((TREE_CODE (arg1) == VECTOR_CST
11178 || TREE_CODE (arg1) == CONSTRUCTOR)
11179 && (TREE_CODE (arg2) == VECTOR_CST
11180 || TREE_CODE (arg2) == CONSTRUCTOR))
11181 {
11182 unsigned int nelts = VECTOR_CST_NELTS (arg0), i;
11183 gcc_assert (nelts == TYPE_VECTOR_SUBPARTS (type));
11184 auto_vec_perm_indices sel (nelts);
11185 for (i = 0; i < nelts; i++)
11186 {
11187 tree val = VECTOR_CST_ELT (arg0, i);
11188 if (integer_all_onesp (val))
11189 sel.quick_push (i);
11190 else if (integer_zerop (val))
11191 sel.quick_push (nelts + i);
11192 else /* Currently unreachable. */
11193 return NULL_TREE;
11194 }
11195 tree t = fold_vec_perm (type, arg1, arg2, sel);
11196 if (t != NULL_TREE)
11197 return t;
11198 }
11199 }
11200
11201 /* If we have A op B ? A : C, we may be able to convert this to a
11202 simpler expression, depending on the operation and the values
11203 of B and C. Signed zeros prevent all of these transformations,
11204 for reasons given above each one.
11205
11206 Also try swapping the arguments and inverting the conditional. */
11207 if (COMPARISON_CLASS_P (arg0)
11208 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op1)
11209 && !HONOR_SIGNED_ZEROS (element_mode (op1)))
11210 {
11211 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
11212 if (tem)
11213 return tem;
11214 }
11215
11216 if (COMPARISON_CLASS_P (arg0)
11217 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op2)
11218 && !HONOR_SIGNED_ZEROS (element_mode (op2)))
11219 {
11220 location_t loc0 = expr_location_or (arg0, loc);
11221 tem = fold_invert_truthvalue (loc0, arg0);
11222 if (tem && COMPARISON_CLASS_P (tem))
11223 {
11224 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
11225 if (tem)
11226 return tem;
11227 }
11228 }
11229
11230 /* If the second operand is simpler than the third, swap them
11231 since that produces better jump optimization results. */
11232 if (truth_value_p (TREE_CODE (arg0))
11233 && tree_swap_operands_p (op1, op2))
11234 {
11235 location_t loc0 = expr_location_or (arg0, loc);
11236 /* See if this can be inverted. If it can't, possibly because
11237 it was a floating-point inequality comparison, don't do
11238 anything. */
11239 tem = fold_invert_truthvalue (loc0, arg0);
11240 if (tem)
11241 return fold_build3_loc (loc, code, type, tem, op2, op1);
11242 }
11243
11244 /* Convert A ? 1 : 0 to simply A. */
11245 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
11246 : (integer_onep (op1)
11247 && !VECTOR_TYPE_P (type)))
11248 && integer_zerop (op2)
11249 /* If we try to convert OP0 to our type, the
11250 call to fold will try to move the conversion inside
11251 a COND, which will recurse. In that case, the COND_EXPR
11252 is probably the best choice, so leave it alone. */
11253 && type == TREE_TYPE (arg0))
11254 return pedantic_non_lvalue_loc (loc, arg0);
11255
11256 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11257 over COND_EXPR in cases such as floating point comparisons. */
11258 if (integer_zerop (op1)
11259 && code == COND_EXPR
11260 && integer_onep (op2)
11261 && !VECTOR_TYPE_P (type)
11262 && truth_value_p (TREE_CODE (arg0)))
11263 return pedantic_non_lvalue_loc (loc,
11264 fold_convert_loc (loc, type,
11265 invert_truthvalue_loc (loc,
11266 arg0)));
11267
11268 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11269 if (TREE_CODE (arg0) == LT_EXPR
11270 && integer_zerop (TREE_OPERAND (arg0, 1))
11271 && integer_zerop (op2)
11272 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11273 {
11274 /* sign_bit_p looks through both zero and sign extensions,
11275 but for this optimization only sign extensions are
11276 usable. */
11277 tree tem2 = TREE_OPERAND (arg0, 0);
11278 while (tem != tem2)
11279 {
11280 if (TREE_CODE (tem2) != NOP_EXPR
11281 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
11282 {
11283 tem = NULL_TREE;
11284 break;
11285 }
11286 tem2 = TREE_OPERAND (tem2, 0);
11287 }
11288 /* sign_bit_p only checks ARG1 bits within A's precision.
11289 If <sign bit of A> has wider type than A, bits outside
11290 of A's precision in <sign bit of A> need to be checked.
11291 If they are all 0, this optimization needs to be done
11292 in unsigned A's type, if they are all 1 in signed A's type,
11293 otherwise this can't be done. */
11294 if (tem
11295 && TYPE_PRECISION (TREE_TYPE (tem))
11296 < TYPE_PRECISION (TREE_TYPE (arg1))
11297 && TYPE_PRECISION (TREE_TYPE (tem))
11298 < TYPE_PRECISION (type))
11299 {
11300 int inner_width, outer_width;
11301 tree tem_type;
11302
11303 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
11304 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
11305 if (outer_width > TYPE_PRECISION (type))
11306 outer_width = TYPE_PRECISION (type);
11307
11308 wide_int mask = wi::shifted_mask
11309 (inner_width, outer_width - inner_width, false,
11310 TYPE_PRECISION (TREE_TYPE (arg1)));
11311
11312 wide_int common = mask & wi::to_wide (arg1);
11313 if (common == mask)
11314 {
11315 tem_type = signed_type_for (TREE_TYPE (tem));
11316 tem = fold_convert_loc (loc, tem_type, tem);
11317 }
11318 else if (common == 0)
11319 {
11320 tem_type = unsigned_type_for (TREE_TYPE (tem));
11321 tem = fold_convert_loc (loc, tem_type, tem);
11322 }
11323 else
11324 tem = NULL;
11325 }
11326
11327 if (tem)
11328 return
11329 fold_convert_loc (loc, type,
11330 fold_build2_loc (loc, BIT_AND_EXPR,
11331 TREE_TYPE (tem), tem,
11332 fold_convert_loc (loc,
11333 TREE_TYPE (tem),
11334 arg1)));
11335 }
11336
11337 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11338 already handled above. */
11339 if (TREE_CODE (arg0) == BIT_AND_EXPR
11340 && integer_onep (TREE_OPERAND (arg0, 1))
11341 && integer_zerop (op2)
11342 && integer_pow2p (arg1))
11343 {
11344 tree tem = TREE_OPERAND (arg0, 0);
11345 STRIP_NOPS (tem);
11346 if (TREE_CODE (tem) == RSHIFT_EXPR
11347 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
11348 && (unsigned HOST_WIDE_INT) tree_log2 (arg1)
11349 == tree_to_uhwi (TREE_OPERAND (tem, 1)))
11350 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11351 fold_convert_loc (loc, type,
11352 TREE_OPERAND (tem, 0)),
11353 op1);
11354 }
11355
11356 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11357 is probably obsolete because the first operand should be a
11358 truth value (that's why we have the two cases above), but let's
11359 leave it in until we can confirm this for all front-ends. */
11360 if (integer_zerop (op2)
11361 && TREE_CODE (arg0) == NE_EXPR
11362 && integer_zerop (TREE_OPERAND (arg0, 1))
11363 && integer_pow2p (arg1)
11364 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11365 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11366 arg1, OEP_ONLY_CONST))
11367 return pedantic_non_lvalue_loc (loc,
11368 fold_convert_loc (loc, type,
11369 TREE_OPERAND (arg0, 0)));
11370
11371 /* Disable the transformations below for vectors, since
11372 fold_binary_op_with_conditional_arg may undo them immediately,
11373 yielding an infinite loop. */
11374 if (code == VEC_COND_EXPR)
11375 return NULL_TREE;
11376
11377 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11378 if (integer_zerop (op2)
11379 && truth_value_p (TREE_CODE (arg0))
11380 && truth_value_p (TREE_CODE (arg1))
11381 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11382 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
11383 : TRUTH_ANDIF_EXPR,
11384 type, fold_convert_loc (loc, type, arg0), op1);
11385
11386 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11387 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
11388 && truth_value_p (TREE_CODE (arg0))
11389 && truth_value_p (TREE_CODE (arg1))
11390 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11391 {
11392 location_t loc0 = expr_location_or (arg0, loc);
11393 /* Only perform transformation if ARG0 is easily inverted. */
11394 tem = fold_invert_truthvalue (loc0, arg0);
11395 if (tem)
11396 return fold_build2_loc (loc, code == VEC_COND_EXPR
11397 ? BIT_IOR_EXPR
11398 : TRUTH_ORIF_EXPR,
11399 type, fold_convert_loc (loc, type, tem),
11400 op1);
11401 }
11402
11403 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11404 if (integer_zerop (arg1)
11405 && truth_value_p (TREE_CODE (arg0))
11406 && truth_value_p (TREE_CODE (op2))
11407 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11408 {
11409 location_t loc0 = expr_location_or (arg0, loc);
11410 /* Only perform transformation if ARG0 is easily inverted. */
11411 tem = fold_invert_truthvalue (loc0, arg0);
11412 if (tem)
11413 return fold_build2_loc (loc, code == VEC_COND_EXPR
11414 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
11415 type, fold_convert_loc (loc, type, tem),
11416 op2);
11417 }
11418
11419 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11420 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
11421 && truth_value_p (TREE_CODE (arg0))
11422 && truth_value_p (TREE_CODE (op2))
11423 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11424 return fold_build2_loc (loc, code == VEC_COND_EXPR
11425 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
11426 type, fold_convert_loc (loc, type, arg0), op2);
11427
11428 return NULL_TREE;
11429
11430 case CALL_EXPR:
11431 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
11432 of fold_ternary on them. */
11433 gcc_unreachable ();
11434
11435 case BIT_FIELD_REF:
11436 if (TREE_CODE (arg0) == VECTOR_CST
11437 && (type == TREE_TYPE (TREE_TYPE (arg0))
11438 || (TREE_CODE (type) == VECTOR_TYPE
11439 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
11440 {
11441 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
11442 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
11443 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
11444 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
11445
11446 if (n != 0
11447 && (idx % width) == 0
11448 && (n % width) == 0
11449 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
11450 {
11451 idx = idx / width;
11452 n = n / width;
11453
11454 if (TREE_CODE (arg0) == VECTOR_CST)
11455 {
11456 if (n == 1)
11457 return VECTOR_CST_ELT (arg0, idx);
11458
11459 auto_vec<tree, 32> vals (n);
11460 for (unsigned i = 0; i < n; ++i)
11461 vals.quick_push (VECTOR_CST_ELT (arg0, idx + i));
11462 return build_vector (type, vals);
11463 }
11464 }
11465 }
11466
11467 /* On constants we can use native encode/interpret to constant
11468 fold (nearly) all BIT_FIELD_REFs. */
11469 if (CONSTANT_CLASS_P (arg0)
11470 && can_native_interpret_type_p (type)
11471 && BITS_PER_UNIT == 8)
11472 {
11473 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11474 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
11475 /* Limit us to a reasonable amount of work. To relax the
11476 other limitations we need bit-shifting of the buffer
11477 and rounding up the size. */
11478 if (bitpos % BITS_PER_UNIT == 0
11479 && bitsize % BITS_PER_UNIT == 0
11480 && bitsize <= MAX_BITSIZE_MODE_ANY_MODE)
11481 {
11482 unsigned char b[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
11483 unsigned HOST_WIDE_INT len
11484 = native_encode_expr (arg0, b, bitsize / BITS_PER_UNIT,
11485 bitpos / BITS_PER_UNIT);
11486 if (len > 0
11487 && len * BITS_PER_UNIT >= bitsize)
11488 {
11489 tree v = native_interpret_expr (type, b,
11490 bitsize / BITS_PER_UNIT);
11491 if (v)
11492 return v;
11493 }
11494 }
11495 }
11496
11497 return NULL_TREE;
11498
11499 case FMA_EXPR:
11500 /* For integers we can decompose the FMA if possible. */
11501 if (TREE_CODE (arg0) == INTEGER_CST
11502 && TREE_CODE (arg1) == INTEGER_CST)
11503 return fold_build2_loc (loc, PLUS_EXPR, type,
11504 const_binop (MULT_EXPR, arg0, arg1), arg2);
11505 if (integer_zerop (arg2))
11506 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11507
11508 return fold_fma (loc, type, arg0, arg1, arg2);
11509
11510 case VEC_PERM_EXPR:
11511 if (TREE_CODE (arg2) == VECTOR_CST)
11512 {
11513 unsigned int nelts = VECTOR_CST_NELTS (arg2), i, mask, mask2;
11514 bool need_mask_canon = false;
11515 bool need_mask_canon2 = false;
11516 bool all_in_vec0 = true;
11517 bool all_in_vec1 = true;
11518 bool maybe_identity = true;
11519 bool single_arg = (op0 == op1);
11520 bool changed = false;
11521
11522 mask2 = 2 * nelts - 1;
11523 mask = single_arg ? (nelts - 1) : mask2;
11524 gcc_assert (nelts == TYPE_VECTOR_SUBPARTS (type));
11525 auto_vec_perm_indices sel (nelts);
11526 auto_vec_perm_indices sel2 (nelts);
11527 for (i = 0; i < nelts; i++)
11528 {
11529 tree val = VECTOR_CST_ELT (arg2, i);
11530 if (TREE_CODE (val) != INTEGER_CST)
11531 return NULL_TREE;
11532
11533 /* Make sure that the perm value is in an acceptable
11534 range. */
11535 wi::tree_to_wide_ref t = wi::to_wide (val);
11536 need_mask_canon |= wi::gtu_p (t, mask);
11537 need_mask_canon2 |= wi::gtu_p (t, mask2);
11538 unsigned int elt = t.to_uhwi () & mask;
11539 unsigned int elt2 = t.to_uhwi () & mask2;
11540
11541 if (elt < nelts)
11542 all_in_vec1 = false;
11543 else
11544 all_in_vec0 = false;
11545
11546 if ((elt & (nelts - 1)) != i)
11547 maybe_identity = false;
11548
11549 sel.quick_push (elt);
11550 sel2.quick_push (elt2);
11551 }
11552
11553 if (maybe_identity)
11554 {
11555 if (all_in_vec0)
11556 return op0;
11557 if (all_in_vec1)
11558 return op1;
11559 }
11560
11561 if (all_in_vec0)
11562 op1 = op0;
11563 else if (all_in_vec1)
11564 {
11565 op0 = op1;
11566 for (i = 0; i < nelts; i++)
11567 sel[i] -= nelts;
11568 need_mask_canon = true;
11569 }
11570
11571 if ((TREE_CODE (op0) == VECTOR_CST
11572 || TREE_CODE (op0) == CONSTRUCTOR)
11573 && (TREE_CODE (op1) == VECTOR_CST
11574 || TREE_CODE (op1) == CONSTRUCTOR))
11575 {
11576 tree t = fold_vec_perm (type, op0, op1, sel);
11577 if (t != NULL_TREE)
11578 return t;
11579 }
11580
11581 if (op0 == op1 && !single_arg)
11582 changed = true;
11583
11584 /* Some targets are deficient and fail to expand a single
11585 argument permutation while still allowing an equivalent
11586 2-argument version. */
11587 if (need_mask_canon && arg2 == op2
11588 && !can_vec_perm_p (TYPE_MODE (type), false, &sel)
11589 && can_vec_perm_p (TYPE_MODE (type), false, &sel2))
11590 {
11591 need_mask_canon = need_mask_canon2;
11592 sel = sel2;
11593 }
11594
11595 if (need_mask_canon && arg2 == op2)
11596 {
11597 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
11598 auto_vec<tree, 32> tsel (nelts);
11599 for (i = 0; i < nelts; i++)
11600 tsel.quick_push (build_int_cst (eltype, sel[i]));
11601 op2 = build_vector (TREE_TYPE (arg2), tsel);
11602 changed = true;
11603 }
11604
11605 if (changed)
11606 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
11607 }
11608 return NULL_TREE;
11609
11610 case BIT_INSERT_EXPR:
11611 /* Perform (partial) constant folding of BIT_INSERT_EXPR. */
11612 if (TREE_CODE (arg0) == INTEGER_CST
11613 && TREE_CODE (arg1) == INTEGER_CST)
11614 {
11615 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11616 unsigned bitsize = TYPE_PRECISION (TREE_TYPE (arg1));
11617 wide_int tem = (wi::to_wide (arg0)
11618 & wi::shifted_mask (bitpos, bitsize, true,
11619 TYPE_PRECISION (type)));
11620 wide_int tem2
11621 = wi::lshift (wi::zext (wi::to_wide (arg1, TYPE_PRECISION (type)),
11622 bitsize), bitpos);
11623 return wide_int_to_tree (type, wi::bit_or (tem, tem2));
11624 }
11625 else if (TREE_CODE (arg0) == VECTOR_CST
11626 && CONSTANT_CLASS_P (arg1)
11627 && types_compatible_p (TREE_TYPE (TREE_TYPE (arg0)),
11628 TREE_TYPE (arg1)))
11629 {
11630 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11631 unsigned HOST_WIDE_INT elsize
11632 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (arg1)));
11633 if (bitpos % elsize == 0)
11634 {
11635 unsigned k = bitpos / elsize;
11636 if (operand_equal_p (VECTOR_CST_ELT (arg0, k), arg1, 0))
11637 return arg0;
11638 else
11639 {
11640 unsigned int nelts = VECTOR_CST_NELTS (arg0);
11641 auto_vec<tree, 32> elts (nelts);
11642 elts.quick_grow (nelts);
11643 memcpy (&elts[0], VECTOR_CST_ELTS (arg0),
11644 sizeof (tree) * nelts);
11645 elts[k] = arg1;
11646 return build_vector (type, elts);
11647 }
11648 }
11649 }
11650 return NULL_TREE;
11651
11652 default:
11653 return NULL_TREE;
11654 } /* switch (code) */
11655 }
11656
11657 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
11658 of an array (or vector). */
11659
11660 tree
11661 get_array_ctor_element_at_index (tree ctor, offset_int access_index)
11662 {
11663 tree index_type = NULL_TREE;
11664 offset_int low_bound = 0;
11665
11666 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
11667 {
11668 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
11669 if (domain_type && TYPE_MIN_VALUE (domain_type))
11670 {
11671 /* Static constructors for variably sized objects makes no sense. */
11672 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
11673 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
11674 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
11675 }
11676 }
11677
11678 if (index_type)
11679 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
11680 TYPE_SIGN (index_type));
11681
11682 offset_int index = low_bound - 1;
11683 if (index_type)
11684 index = wi::ext (index, TYPE_PRECISION (index_type),
11685 TYPE_SIGN (index_type));
11686
11687 offset_int max_index;
11688 unsigned HOST_WIDE_INT cnt;
11689 tree cfield, cval;
11690
11691 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
11692 {
11693 /* Array constructor might explicitly set index, or specify a range,
11694 or leave index NULL meaning that it is next index after previous
11695 one. */
11696 if (cfield)
11697 {
11698 if (TREE_CODE (cfield) == INTEGER_CST)
11699 max_index = index = wi::to_offset (cfield);
11700 else
11701 {
11702 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
11703 index = wi::to_offset (TREE_OPERAND (cfield, 0));
11704 max_index = wi::to_offset (TREE_OPERAND (cfield, 1));
11705 }
11706 }
11707 else
11708 {
11709 index += 1;
11710 if (index_type)
11711 index = wi::ext (index, TYPE_PRECISION (index_type),
11712 TYPE_SIGN (index_type));
11713 max_index = index;
11714 }
11715
11716 /* Do we have match? */
11717 if (wi::cmpu (access_index, index) >= 0
11718 && wi::cmpu (access_index, max_index) <= 0)
11719 return cval;
11720 }
11721 return NULL_TREE;
11722 }
11723
11724 /* Perform constant folding and related simplification of EXPR.
11725 The related simplifications include x*1 => x, x*0 => 0, etc.,
11726 and application of the associative law.
11727 NOP_EXPR conversions may be removed freely (as long as we
11728 are careful not to change the type of the overall expression).
11729 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
11730 but we can constant-fold them if they have constant operands. */
11731
11732 #ifdef ENABLE_FOLD_CHECKING
11733 # define fold(x) fold_1 (x)
11734 static tree fold_1 (tree);
11735 static
11736 #endif
11737 tree
11738 fold (tree expr)
11739 {
11740 const tree t = expr;
11741 enum tree_code code = TREE_CODE (t);
11742 enum tree_code_class kind = TREE_CODE_CLASS (code);
11743 tree tem;
11744 location_t loc = EXPR_LOCATION (expr);
11745
11746 /* Return right away if a constant. */
11747 if (kind == tcc_constant)
11748 return t;
11749
11750 /* CALL_EXPR-like objects with variable numbers of operands are
11751 treated specially. */
11752 if (kind == tcc_vl_exp)
11753 {
11754 if (code == CALL_EXPR)
11755 {
11756 tem = fold_call_expr (loc, expr, false);
11757 return tem ? tem : expr;
11758 }
11759 return expr;
11760 }
11761
11762 if (IS_EXPR_CODE_CLASS (kind))
11763 {
11764 tree type = TREE_TYPE (t);
11765 tree op0, op1, op2;
11766
11767 switch (TREE_CODE_LENGTH (code))
11768 {
11769 case 1:
11770 op0 = TREE_OPERAND (t, 0);
11771 tem = fold_unary_loc (loc, code, type, op0);
11772 return tem ? tem : expr;
11773 case 2:
11774 op0 = TREE_OPERAND (t, 0);
11775 op1 = TREE_OPERAND (t, 1);
11776 tem = fold_binary_loc (loc, code, type, op0, op1);
11777 return tem ? tem : expr;
11778 case 3:
11779 op0 = TREE_OPERAND (t, 0);
11780 op1 = TREE_OPERAND (t, 1);
11781 op2 = TREE_OPERAND (t, 2);
11782 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
11783 return tem ? tem : expr;
11784 default:
11785 break;
11786 }
11787 }
11788
11789 switch (code)
11790 {
11791 case ARRAY_REF:
11792 {
11793 tree op0 = TREE_OPERAND (t, 0);
11794 tree op1 = TREE_OPERAND (t, 1);
11795
11796 if (TREE_CODE (op1) == INTEGER_CST
11797 && TREE_CODE (op0) == CONSTRUCTOR
11798 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
11799 {
11800 tree val = get_array_ctor_element_at_index (op0,
11801 wi::to_offset (op1));
11802 if (val)
11803 return val;
11804 }
11805
11806 return t;
11807 }
11808
11809 /* Return a VECTOR_CST if possible. */
11810 case CONSTRUCTOR:
11811 {
11812 tree type = TREE_TYPE (t);
11813 if (TREE_CODE (type) != VECTOR_TYPE)
11814 return t;
11815
11816 unsigned i;
11817 tree val;
11818 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
11819 if (! CONSTANT_CLASS_P (val))
11820 return t;
11821
11822 return build_vector_from_ctor (type, CONSTRUCTOR_ELTS (t));
11823 }
11824
11825 case CONST_DECL:
11826 return fold (DECL_INITIAL (t));
11827
11828 default:
11829 return t;
11830 } /* switch (code) */
11831 }
11832
11833 #ifdef ENABLE_FOLD_CHECKING
11834 #undef fold
11835
11836 static void fold_checksum_tree (const_tree, struct md5_ctx *,
11837 hash_table<nofree_ptr_hash<const tree_node> > *);
11838 static void fold_check_failed (const_tree, const_tree);
11839 void print_fold_checksum (const_tree);
11840
11841 /* When --enable-checking=fold, compute a digest of expr before
11842 and after actual fold call to see if fold did not accidentally
11843 change original expr. */
11844
11845 tree
11846 fold (tree expr)
11847 {
11848 tree ret;
11849 struct md5_ctx ctx;
11850 unsigned char checksum_before[16], checksum_after[16];
11851 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
11852
11853 md5_init_ctx (&ctx);
11854 fold_checksum_tree (expr, &ctx, &ht);
11855 md5_finish_ctx (&ctx, checksum_before);
11856 ht.empty ();
11857
11858 ret = fold_1 (expr);
11859
11860 md5_init_ctx (&ctx);
11861 fold_checksum_tree (expr, &ctx, &ht);
11862 md5_finish_ctx (&ctx, checksum_after);
11863
11864 if (memcmp (checksum_before, checksum_after, 16))
11865 fold_check_failed (expr, ret);
11866
11867 return ret;
11868 }
11869
11870 void
11871 print_fold_checksum (const_tree expr)
11872 {
11873 struct md5_ctx ctx;
11874 unsigned char checksum[16], cnt;
11875 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
11876
11877 md5_init_ctx (&ctx);
11878 fold_checksum_tree (expr, &ctx, &ht);
11879 md5_finish_ctx (&ctx, checksum);
11880 for (cnt = 0; cnt < 16; ++cnt)
11881 fprintf (stderr, "%02x", checksum[cnt]);
11882 putc ('\n', stderr);
11883 }
11884
11885 static void
11886 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
11887 {
11888 internal_error ("fold check: original tree changed by fold");
11889 }
11890
11891 static void
11892 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
11893 hash_table<nofree_ptr_hash <const tree_node> > *ht)
11894 {
11895 const tree_node **slot;
11896 enum tree_code code;
11897 union tree_node buf;
11898 int i, len;
11899
11900 recursive_label:
11901 if (expr == NULL)
11902 return;
11903 slot = ht->find_slot (expr, INSERT);
11904 if (*slot != NULL)
11905 return;
11906 *slot = expr;
11907 code = TREE_CODE (expr);
11908 if (TREE_CODE_CLASS (code) == tcc_declaration
11909 && HAS_DECL_ASSEMBLER_NAME_P (expr))
11910 {
11911 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
11912 memcpy ((char *) &buf, expr, tree_size (expr));
11913 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
11914 buf.decl_with_vis.symtab_node = NULL;
11915 expr = (tree) &buf;
11916 }
11917 else if (TREE_CODE_CLASS (code) == tcc_type
11918 && (TYPE_POINTER_TO (expr)
11919 || TYPE_REFERENCE_TO (expr)
11920 || TYPE_CACHED_VALUES_P (expr)
11921 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
11922 || TYPE_NEXT_VARIANT (expr)
11923 || TYPE_ALIAS_SET_KNOWN_P (expr)))
11924 {
11925 /* Allow these fields to be modified. */
11926 tree tmp;
11927 memcpy ((char *) &buf, expr, tree_size (expr));
11928 expr = tmp = (tree) &buf;
11929 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
11930 TYPE_POINTER_TO (tmp) = NULL;
11931 TYPE_REFERENCE_TO (tmp) = NULL;
11932 TYPE_NEXT_VARIANT (tmp) = NULL;
11933 TYPE_ALIAS_SET (tmp) = -1;
11934 if (TYPE_CACHED_VALUES_P (tmp))
11935 {
11936 TYPE_CACHED_VALUES_P (tmp) = 0;
11937 TYPE_CACHED_VALUES (tmp) = NULL;
11938 }
11939 }
11940 md5_process_bytes (expr, tree_size (expr), ctx);
11941 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
11942 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
11943 if (TREE_CODE_CLASS (code) != tcc_type
11944 && TREE_CODE_CLASS (code) != tcc_declaration
11945 && code != TREE_LIST
11946 && code != SSA_NAME
11947 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
11948 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
11949 switch (TREE_CODE_CLASS (code))
11950 {
11951 case tcc_constant:
11952 switch (code)
11953 {
11954 case STRING_CST:
11955 md5_process_bytes (TREE_STRING_POINTER (expr),
11956 TREE_STRING_LENGTH (expr), ctx);
11957 break;
11958 case COMPLEX_CST:
11959 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
11960 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
11961 break;
11962 case VECTOR_CST:
11963 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
11964 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
11965 break;
11966 default:
11967 break;
11968 }
11969 break;
11970 case tcc_exceptional:
11971 switch (code)
11972 {
11973 case TREE_LIST:
11974 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
11975 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
11976 expr = TREE_CHAIN (expr);
11977 goto recursive_label;
11978 break;
11979 case TREE_VEC:
11980 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
11981 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
11982 break;
11983 default:
11984 break;
11985 }
11986 break;
11987 case tcc_expression:
11988 case tcc_reference:
11989 case tcc_comparison:
11990 case tcc_unary:
11991 case tcc_binary:
11992 case tcc_statement:
11993 case tcc_vl_exp:
11994 len = TREE_OPERAND_LENGTH (expr);
11995 for (i = 0; i < len; ++i)
11996 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
11997 break;
11998 case tcc_declaration:
11999 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
12000 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
12001 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
12002 {
12003 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
12004 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
12005 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
12006 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
12007 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
12008 }
12009
12010 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
12011 {
12012 if (TREE_CODE (expr) == FUNCTION_DECL)
12013 {
12014 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
12015 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
12016 }
12017 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
12018 }
12019 break;
12020 case tcc_type:
12021 if (TREE_CODE (expr) == ENUMERAL_TYPE)
12022 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
12023 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
12024 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
12025 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
12026 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
12027 if (INTEGRAL_TYPE_P (expr)
12028 || SCALAR_FLOAT_TYPE_P (expr))
12029 {
12030 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12031 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
12032 }
12033 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
12034 if (TREE_CODE (expr) == RECORD_TYPE
12035 || TREE_CODE (expr) == UNION_TYPE
12036 || TREE_CODE (expr) == QUAL_UNION_TYPE)
12037 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
12038 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12039 break;
12040 default:
12041 break;
12042 }
12043 }
12044
12045 /* Helper function for outputting the checksum of a tree T. When
12046 debugging with gdb, you can "define mynext" to be "next" followed
12047 by "call debug_fold_checksum (op0)", then just trace down till the
12048 outputs differ. */
12049
12050 DEBUG_FUNCTION void
12051 debug_fold_checksum (const_tree t)
12052 {
12053 int i;
12054 unsigned char checksum[16];
12055 struct md5_ctx ctx;
12056 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12057
12058 md5_init_ctx (&ctx);
12059 fold_checksum_tree (t, &ctx, &ht);
12060 md5_finish_ctx (&ctx, checksum);
12061 ht.empty ();
12062
12063 for (i = 0; i < 16; i++)
12064 fprintf (stderr, "%d ", checksum[i]);
12065
12066 fprintf (stderr, "\n");
12067 }
12068
12069 #endif
12070
12071 /* Fold a unary tree expression with code CODE of type TYPE with an
12072 operand OP0. LOC is the location of the resulting expression.
12073 Return a folded expression if successful. Otherwise, return a tree
12074 expression with code CODE of type TYPE with an operand OP0. */
12075
12076 tree
12077 fold_build1_loc (location_t loc,
12078 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
12079 {
12080 tree tem;
12081 #ifdef ENABLE_FOLD_CHECKING
12082 unsigned char checksum_before[16], checksum_after[16];
12083 struct md5_ctx ctx;
12084 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12085
12086 md5_init_ctx (&ctx);
12087 fold_checksum_tree (op0, &ctx, &ht);
12088 md5_finish_ctx (&ctx, checksum_before);
12089 ht.empty ();
12090 #endif
12091
12092 tem = fold_unary_loc (loc, code, type, op0);
12093 if (!tem)
12094 tem = build1_loc (loc, code, type, op0 PASS_MEM_STAT);
12095
12096 #ifdef ENABLE_FOLD_CHECKING
12097 md5_init_ctx (&ctx);
12098 fold_checksum_tree (op0, &ctx, &ht);
12099 md5_finish_ctx (&ctx, checksum_after);
12100
12101 if (memcmp (checksum_before, checksum_after, 16))
12102 fold_check_failed (op0, tem);
12103 #endif
12104 return tem;
12105 }
12106
12107 /* Fold a binary tree expression with code CODE of type TYPE with
12108 operands OP0 and OP1. LOC is the location of the resulting
12109 expression. Return a folded expression if successful. Otherwise,
12110 return a tree expression with code CODE of type TYPE with operands
12111 OP0 and OP1. */
12112
12113 tree
12114 fold_build2_loc (location_t loc,
12115 enum tree_code code, tree type, tree op0, tree op1
12116 MEM_STAT_DECL)
12117 {
12118 tree tem;
12119 #ifdef ENABLE_FOLD_CHECKING
12120 unsigned char checksum_before_op0[16],
12121 checksum_before_op1[16],
12122 checksum_after_op0[16],
12123 checksum_after_op1[16];
12124 struct md5_ctx ctx;
12125 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12126
12127 md5_init_ctx (&ctx);
12128 fold_checksum_tree (op0, &ctx, &ht);
12129 md5_finish_ctx (&ctx, checksum_before_op0);
12130 ht.empty ();
12131
12132 md5_init_ctx (&ctx);
12133 fold_checksum_tree (op1, &ctx, &ht);
12134 md5_finish_ctx (&ctx, checksum_before_op1);
12135 ht.empty ();
12136 #endif
12137
12138 tem = fold_binary_loc (loc, code, type, op0, op1);
12139 if (!tem)
12140 tem = build2_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
12141
12142 #ifdef ENABLE_FOLD_CHECKING
12143 md5_init_ctx (&ctx);
12144 fold_checksum_tree (op0, &ctx, &ht);
12145 md5_finish_ctx (&ctx, checksum_after_op0);
12146 ht.empty ();
12147
12148 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12149 fold_check_failed (op0, tem);
12150
12151 md5_init_ctx (&ctx);
12152 fold_checksum_tree (op1, &ctx, &ht);
12153 md5_finish_ctx (&ctx, checksum_after_op1);
12154
12155 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12156 fold_check_failed (op1, tem);
12157 #endif
12158 return tem;
12159 }
12160
12161 /* Fold a ternary tree expression with code CODE of type TYPE with
12162 operands OP0, OP1, and OP2. Return a folded expression if
12163 successful. Otherwise, return a tree expression with code CODE of
12164 type TYPE with operands OP0, OP1, and OP2. */
12165
12166 tree
12167 fold_build3_loc (location_t loc, enum tree_code code, tree type,
12168 tree op0, tree op1, tree op2 MEM_STAT_DECL)
12169 {
12170 tree tem;
12171 #ifdef ENABLE_FOLD_CHECKING
12172 unsigned char checksum_before_op0[16],
12173 checksum_before_op1[16],
12174 checksum_before_op2[16],
12175 checksum_after_op0[16],
12176 checksum_after_op1[16],
12177 checksum_after_op2[16];
12178 struct md5_ctx ctx;
12179 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12180
12181 md5_init_ctx (&ctx);
12182 fold_checksum_tree (op0, &ctx, &ht);
12183 md5_finish_ctx (&ctx, checksum_before_op0);
12184 ht.empty ();
12185
12186 md5_init_ctx (&ctx);
12187 fold_checksum_tree (op1, &ctx, &ht);
12188 md5_finish_ctx (&ctx, checksum_before_op1);
12189 ht.empty ();
12190
12191 md5_init_ctx (&ctx);
12192 fold_checksum_tree (op2, &ctx, &ht);
12193 md5_finish_ctx (&ctx, checksum_before_op2);
12194 ht.empty ();
12195 #endif
12196
12197 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
12198 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12199 if (!tem)
12200 tem = build3_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
12201
12202 #ifdef ENABLE_FOLD_CHECKING
12203 md5_init_ctx (&ctx);
12204 fold_checksum_tree (op0, &ctx, &ht);
12205 md5_finish_ctx (&ctx, checksum_after_op0);
12206 ht.empty ();
12207
12208 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12209 fold_check_failed (op0, tem);
12210
12211 md5_init_ctx (&ctx);
12212 fold_checksum_tree (op1, &ctx, &ht);
12213 md5_finish_ctx (&ctx, checksum_after_op1);
12214 ht.empty ();
12215
12216 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12217 fold_check_failed (op1, tem);
12218
12219 md5_init_ctx (&ctx);
12220 fold_checksum_tree (op2, &ctx, &ht);
12221 md5_finish_ctx (&ctx, checksum_after_op2);
12222
12223 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12224 fold_check_failed (op2, tem);
12225 #endif
12226 return tem;
12227 }
12228
12229 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
12230 arguments in ARGARRAY, and a null static chain.
12231 Return a folded expression if successful. Otherwise, return a CALL_EXPR
12232 of type TYPE from the given operands as constructed by build_call_array. */
12233
12234 tree
12235 fold_build_call_array_loc (location_t loc, tree type, tree fn,
12236 int nargs, tree *argarray)
12237 {
12238 tree tem;
12239 #ifdef ENABLE_FOLD_CHECKING
12240 unsigned char checksum_before_fn[16],
12241 checksum_before_arglist[16],
12242 checksum_after_fn[16],
12243 checksum_after_arglist[16];
12244 struct md5_ctx ctx;
12245 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12246 int i;
12247
12248 md5_init_ctx (&ctx);
12249 fold_checksum_tree (fn, &ctx, &ht);
12250 md5_finish_ctx (&ctx, checksum_before_fn);
12251 ht.empty ();
12252
12253 md5_init_ctx (&ctx);
12254 for (i = 0; i < nargs; i++)
12255 fold_checksum_tree (argarray[i], &ctx, &ht);
12256 md5_finish_ctx (&ctx, checksum_before_arglist);
12257 ht.empty ();
12258 #endif
12259
12260 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
12261 if (!tem)
12262 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
12263
12264 #ifdef ENABLE_FOLD_CHECKING
12265 md5_init_ctx (&ctx);
12266 fold_checksum_tree (fn, &ctx, &ht);
12267 md5_finish_ctx (&ctx, checksum_after_fn);
12268 ht.empty ();
12269
12270 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
12271 fold_check_failed (fn, tem);
12272
12273 md5_init_ctx (&ctx);
12274 for (i = 0; i < nargs; i++)
12275 fold_checksum_tree (argarray[i], &ctx, &ht);
12276 md5_finish_ctx (&ctx, checksum_after_arglist);
12277
12278 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
12279 fold_check_failed (NULL_TREE, tem);
12280 #endif
12281 return tem;
12282 }
12283
12284 /* Perform constant folding and related simplification of initializer
12285 expression EXPR. These behave identically to "fold_buildN" but ignore
12286 potential run-time traps and exceptions that fold must preserve. */
12287
12288 #define START_FOLD_INIT \
12289 int saved_signaling_nans = flag_signaling_nans;\
12290 int saved_trapping_math = flag_trapping_math;\
12291 int saved_rounding_math = flag_rounding_math;\
12292 int saved_trapv = flag_trapv;\
12293 int saved_folding_initializer = folding_initializer;\
12294 flag_signaling_nans = 0;\
12295 flag_trapping_math = 0;\
12296 flag_rounding_math = 0;\
12297 flag_trapv = 0;\
12298 folding_initializer = 1;
12299
12300 #define END_FOLD_INIT \
12301 flag_signaling_nans = saved_signaling_nans;\
12302 flag_trapping_math = saved_trapping_math;\
12303 flag_rounding_math = saved_rounding_math;\
12304 flag_trapv = saved_trapv;\
12305 folding_initializer = saved_folding_initializer;
12306
12307 tree
12308 fold_build1_initializer_loc (location_t loc, enum tree_code code,
12309 tree type, tree op)
12310 {
12311 tree result;
12312 START_FOLD_INIT;
12313
12314 result = fold_build1_loc (loc, code, type, op);
12315
12316 END_FOLD_INIT;
12317 return result;
12318 }
12319
12320 tree
12321 fold_build2_initializer_loc (location_t loc, enum tree_code code,
12322 tree type, tree op0, tree op1)
12323 {
12324 tree result;
12325 START_FOLD_INIT;
12326
12327 result = fold_build2_loc (loc, code, type, op0, op1);
12328
12329 END_FOLD_INIT;
12330 return result;
12331 }
12332
12333 tree
12334 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
12335 int nargs, tree *argarray)
12336 {
12337 tree result;
12338 START_FOLD_INIT;
12339
12340 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
12341
12342 END_FOLD_INIT;
12343 return result;
12344 }
12345
12346 #undef START_FOLD_INIT
12347 #undef END_FOLD_INIT
12348
12349 /* Determine if first argument is a multiple of second argument. Return 0 if
12350 it is not, or we cannot easily determined it to be.
12351
12352 An example of the sort of thing we care about (at this point; this routine
12353 could surely be made more general, and expanded to do what the *_DIV_EXPR's
12354 fold cases do now) is discovering that
12355
12356 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12357
12358 is a multiple of
12359
12360 SAVE_EXPR (J * 8)
12361
12362 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12363
12364 This code also handles discovering that
12365
12366 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12367
12368 is a multiple of 8 so we don't have to worry about dealing with a
12369 possible remainder.
12370
12371 Note that we *look* inside a SAVE_EXPR only to determine how it was
12372 calculated; it is not safe for fold to do much of anything else with the
12373 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12374 at run time. For example, the latter example above *cannot* be implemented
12375 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12376 evaluation time of the original SAVE_EXPR is not necessarily the same at
12377 the time the new expression is evaluated. The only optimization of this
12378 sort that would be valid is changing
12379
12380 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12381
12382 divided by 8 to
12383
12384 SAVE_EXPR (I) * SAVE_EXPR (J)
12385
12386 (where the same SAVE_EXPR (J) is used in the original and the
12387 transformed version). */
12388
12389 int
12390 multiple_of_p (tree type, const_tree top, const_tree bottom)
12391 {
12392 gimple *stmt;
12393 tree t1, op1, op2;
12394
12395 if (operand_equal_p (top, bottom, 0))
12396 return 1;
12397
12398 if (TREE_CODE (type) != INTEGER_TYPE)
12399 return 0;
12400
12401 switch (TREE_CODE (top))
12402 {
12403 case BIT_AND_EXPR:
12404 /* Bitwise and provides a power of two multiple. If the mask is
12405 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
12406 if (!integer_pow2p (bottom))
12407 return 0;
12408 /* FALLTHRU */
12409
12410 case MULT_EXPR:
12411 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12412 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
12413
12414 case MINUS_EXPR:
12415 /* It is impossible to prove if op0 - op1 is multiple of bottom
12416 precisely, so be conservative here checking if both op0 and op1
12417 are multiple of bottom. Note we check the second operand first
12418 since it's usually simpler. */
12419 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12420 && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
12421
12422 case PLUS_EXPR:
12423 /* The same as MINUS_EXPR, but handle cases like op0 + 0xfffffffd
12424 as op0 - 3 if the expression has unsigned type. For example,
12425 (X / 3) + 0xfffffffd is multiple of 3, but 0xfffffffd is not. */
12426 op1 = TREE_OPERAND (top, 1);
12427 if (TYPE_UNSIGNED (type)
12428 && TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sign_bit (op1))
12429 op1 = fold_build1 (NEGATE_EXPR, type, op1);
12430 return (multiple_of_p (type, op1, bottom)
12431 && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
12432
12433 case LSHIFT_EXPR:
12434 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
12435 {
12436 op1 = TREE_OPERAND (top, 1);
12437 /* const_binop may not detect overflow correctly,
12438 so check for it explicitly here. */
12439 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
12440 wi::to_wide (op1))
12441 && 0 != (t1 = fold_convert (type,
12442 const_binop (LSHIFT_EXPR,
12443 size_one_node,
12444 op1)))
12445 && !TREE_OVERFLOW (t1))
12446 return multiple_of_p (type, t1, bottom);
12447 }
12448 return 0;
12449
12450 case NOP_EXPR:
12451 /* Can't handle conversions from non-integral or wider integral type. */
12452 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
12453 || (TYPE_PRECISION (type)
12454 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
12455 return 0;
12456
12457 /* fall through */
12458
12459 case SAVE_EXPR:
12460 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
12461
12462 case COND_EXPR:
12463 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12464 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
12465
12466 case INTEGER_CST:
12467 if (TREE_CODE (bottom) != INTEGER_CST
12468 || integer_zerop (bottom)
12469 || (TYPE_UNSIGNED (type)
12470 && (tree_int_cst_sgn (top) < 0
12471 || tree_int_cst_sgn (bottom) < 0)))
12472 return 0;
12473 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
12474 SIGNED);
12475
12476 case SSA_NAME:
12477 if (TREE_CODE (bottom) == INTEGER_CST
12478 && (stmt = SSA_NAME_DEF_STMT (top)) != NULL
12479 && gimple_code (stmt) == GIMPLE_ASSIGN)
12480 {
12481 enum tree_code code = gimple_assign_rhs_code (stmt);
12482
12483 /* Check for special cases to see if top is defined as multiple
12484 of bottom:
12485
12486 top = (X & ~(bottom - 1) ; bottom is power of 2
12487
12488 or
12489
12490 Y = X % bottom
12491 top = X - Y. */
12492 if (code == BIT_AND_EXPR
12493 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
12494 && TREE_CODE (op2) == INTEGER_CST
12495 && integer_pow2p (bottom)
12496 && wi::multiple_of_p (wi::to_widest (op2),
12497 wi::to_widest (bottom), UNSIGNED))
12498 return 1;
12499
12500 op1 = gimple_assign_rhs1 (stmt);
12501 if (code == MINUS_EXPR
12502 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
12503 && TREE_CODE (op2) == SSA_NAME
12504 && (stmt = SSA_NAME_DEF_STMT (op2)) != NULL
12505 && gimple_code (stmt) == GIMPLE_ASSIGN
12506 && (code = gimple_assign_rhs_code (stmt)) == TRUNC_MOD_EXPR
12507 && operand_equal_p (op1, gimple_assign_rhs1 (stmt), 0)
12508 && operand_equal_p (bottom, gimple_assign_rhs2 (stmt), 0))
12509 return 1;
12510 }
12511
12512 /* fall through */
12513
12514 default:
12515 return 0;
12516 }
12517 }
12518
12519 #define tree_expr_nonnegative_warnv_p(X, Y) \
12520 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
12521
12522 #define RECURSE(X) \
12523 ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
12524
12525 /* Return true if CODE or TYPE is known to be non-negative. */
12526
12527 static bool
12528 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
12529 {
12530 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
12531 && truth_value_p (code))
12532 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
12533 have a signed:1 type (where the value is -1 and 0). */
12534 return true;
12535 return false;
12536 }
12537
12538 /* Return true if (CODE OP0) is known to be non-negative. If the return
12539 value is based on the assumption that signed overflow is undefined,
12540 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12541 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12542
12543 bool
12544 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12545 bool *strict_overflow_p, int depth)
12546 {
12547 if (TYPE_UNSIGNED (type))
12548 return true;
12549
12550 switch (code)
12551 {
12552 case ABS_EXPR:
12553 /* We can't return 1 if flag_wrapv is set because
12554 ABS_EXPR<INT_MIN> = INT_MIN. */
12555 if (!ANY_INTEGRAL_TYPE_P (type))
12556 return true;
12557 if (TYPE_OVERFLOW_UNDEFINED (type))
12558 {
12559 *strict_overflow_p = true;
12560 return true;
12561 }
12562 break;
12563
12564 case NON_LVALUE_EXPR:
12565 case FLOAT_EXPR:
12566 case FIX_TRUNC_EXPR:
12567 return RECURSE (op0);
12568
12569 CASE_CONVERT:
12570 {
12571 tree inner_type = TREE_TYPE (op0);
12572 tree outer_type = type;
12573
12574 if (TREE_CODE (outer_type) == REAL_TYPE)
12575 {
12576 if (TREE_CODE (inner_type) == REAL_TYPE)
12577 return RECURSE (op0);
12578 if (INTEGRAL_TYPE_P (inner_type))
12579 {
12580 if (TYPE_UNSIGNED (inner_type))
12581 return true;
12582 return RECURSE (op0);
12583 }
12584 }
12585 else if (INTEGRAL_TYPE_P (outer_type))
12586 {
12587 if (TREE_CODE (inner_type) == REAL_TYPE)
12588 return RECURSE (op0);
12589 if (INTEGRAL_TYPE_P (inner_type))
12590 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
12591 && TYPE_UNSIGNED (inner_type);
12592 }
12593 }
12594 break;
12595
12596 default:
12597 return tree_simple_nonnegative_warnv_p (code, type);
12598 }
12599
12600 /* We don't know sign of `t', so be conservative and return false. */
12601 return false;
12602 }
12603
12604 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
12605 value is based on the assumption that signed overflow is undefined,
12606 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12607 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12608
12609 bool
12610 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12611 tree op1, bool *strict_overflow_p,
12612 int depth)
12613 {
12614 if (TYPE_UNSIGNED (type))
12615 return true;
12616
12617 switch (code)
12618 {
12619 case POINTER_PLUS_EXPR:
12620 case PLUS_EXPR:
12621 if (FLOAT_TYPE_P (type))
12622 return RECURSE (op0) && RECURSE (op1);
12623
12624 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12625 both unsigned and at least 2 bits shorter than the result. */
12626 if (TREE_CODE (type) == INTEGER_TYPE
12627 && TREE_CODE (op0) == NOP_EXPR
12628 && TREE_CODE (op1) == NOP_EXPR)
12629 {
12630 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
12631 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
12632 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12633 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12634 {
12635 unsigned int prec = MAX (TYPE_PRECISION (inner1),
12636 TYPE_PRECISION (inner2)) + 1;
12637 return prec < TYPE_PRECISION (type);
12638 }
12639 }
12640 break;
12641
12642 case MULT_EXPR:
12643 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12644 {
12645 /* x * x is always non-negative for floating point x
12646 or without overflow. */
12647 if (operand_equal_p (op0, op1, 0)
12648 || (RECURSE (op0) && RECURSE (op1)))
12649 {
12650 if (ANY_INTEGRAL_TYPE_P (type)
12651 && TYPE_OVERFLOW_UNDEFINED (type))
12652 *strict_overflow_p = true;
12653 return true;
12654 }
12655 }
12656
12657 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12658 both unsigned and their total bits is shorter than the result. */
12659 if (TREE_CODE (type) == INTEGER_TYPE
12660 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
12661 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
12662 {
12663 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
12664 ? TREE_TYPE (TREE_OPERAND (op0, 0))
12665 : TREE_TYPE (op0);
12666 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
12667 ? TREE_TYPE (TREE_OPERAND (op1, 0))
12668 : TREE_TYPE (op1);
12669
12670 bool unsigned0 = TYPE_UNSIGNED (inner0);
12671 bool unsigned1 = TYPE_UNSIGNED (inner1);
12672
12673 if (TREE_CODE (op0) == INTEGER_CST)
12674 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
12675
12676 if (TREE_CODE (op1) == INTEGER_CST)
12677 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
12678
12679 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
12680 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
12681 {
12682 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
12683 ? tree_int_cst_min_precision (op0, UNSIGNED)
12684 : TYPE_PRECISION (inner0);
12685
12686 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
12687 ? tree_int_cst_min_precision (op1, UNSIGNED)
12688 : TYPE_PRECISION (inner1);
12689
12690 return precision0 + precision1 < TYPE_PRECISION (type);
12691 }
12692 }
12693 return false;
12694
12695 case BIT_AND_EXPR:
12696 case MAX_EXPR:
12697 return RECURSE (op0) || RECURSE (op1);
12698
12699 case BIT_IOR_EXPR:
12700 case BIT_XOR_EXPR:
12701 case MIN_EXPR:
12702 case RDIV_EXPR:
12703 case TRUNC_DIV_EXPR:
12704 case CEIL_DIV_EXPR:
12705 case FLOOR_DIV_EXPR:
12706 case ROUND_DIV_EXPR:
12707 return RECURSE (op0) && RECURSE (op1);
12708
12709 case TRUNC_MOD_EXPR:
12710 return RECURSE (op0);
12711
12712 case FLOOR_MOD_EXPR:
12713 return RECURSE (op1);
12714
12715 case CEIL_MOD_EXPR:
12716 case ROUND_MOD_EXPR:
12717 default:
12718 return tree_simple_nonnegative_warnv_p (code, type);
12719 }
12720
12721 /* We don't know sign of `t', so be conservative and return false. */
12722 return false;
12723 }
12724
12725 /* Return true if T is known to be non-negative. If the return
12726 value is based on the assumption that signed overflow is undefined,
12727 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12728 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12729
12730 bool
12731 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
12732 {
12733 if (TYPE_UNSIGNED (TREE_TYPE (t)))
12734 return true;
12735
12736 switch (TREE_CODE (t))
12737 {
12738 case INTEGER_CST:
12739 return tree_int_cst_sgn (t) >= 0;
12740
12741 case REAL_CST:
12742 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
12743
12744 case FIXED_CST:
12745 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
12746
12747 case COND_EXPR:
12748 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
12749
12750 case SSA_NAME:
12751 /* Limit the depth of recursion to avoid quadratic behavior.
12752 This is expected to catch almost all occurrences in practice.
12753 If this code misses important cases that unbounded recursion
12754 would not, passes that need this information could be revised
12755 to provide it through dataflow propagation. */
12756 return (!name_registered_for_update_p (t)
12757 && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
12758 && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
12759 strict_overflow_p, depth));
12760
12761 default:
12762 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
12763 }
12764 }
12765
12766 /* Return true if T is known to be non-negative. If the return
12767 value is based on the assumption that signed overflow is undefined,
12768 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12769 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12770
12771 bool
12772 tree_call_nonnegative_warnv_p (tree type, combined_fn fn, tree arg0, tree arg1,
12773 bool *strict_overflow_p, int depth)
12774 {
12775 switch (fn)
12776 {
12777 CASE_CFN_ACOS:
12778 CASE_CFN_ACOSH:
12779 CASE_CFN_CABS:
12780 CASE_CFN_COSH:
12781 CASE_CFN_ERFC:
12782 CASE_CFN_EXP:
12783 CASE_CFN_EXP10:
12784 CASE_CFN_EXP2:
12785 CASE_CFN_FABS:
12786 CASE_CFN_FDIM:
12787 CASE_CFN_HYPOT:
12788 CASE_CFN_POW10:
12789 CASE_CFN_FFS:
12790 CASE_CFN_PARITY:
12791 CASE_CFN_POPCOUNT:
12792 CASE_CFN_CLZ:
12793 CASE_CFN_CLRSB:
12794 case CFN_BUILT_IN_BSWAP32:
12795 case CFN_BUILT_IN_BSWAP64:
12796 /* Always true. */
12797 return true;
12798
12799 CASE_CFN_SQRT:
12800 CASE_CFN_SQRT_FN:
12801 /* sqrt(-0.0) is -0.0. */
12802 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
12803 return true;
12804 return RECURSE (arg0);
12805
12806 CASE_CFN_ASINH:
12807 CASE_CFN_ATAN:
12808 CASE_CFN_ATANH:
12809 CASE_CFN_CBRT:
12810 CASE_CFN_CEIL:
12811 CASE_CFN_ERF:
12812 CASE_CFN_EXPM1:
12813 CASE_CFN_FLOOR:
12814 CASE_CFN_FMOD:
12815 CASE_CFN_FREXP:
12816 CASE_CFN_ICEIL:
12817 CASE_CFN_IFLOOR:
12818 CASE_CFN_IRINT:
12819 CASE_CFN_IROUND:
12820 CASE_CFN_LCEIL:
12821 CASE_CFN_LDEXP:
12822 CASE_CFN_LFLOOR:
12823 CASE_CFN_LLCEIL:
12824 CASE_CFN_LLFLOOR:
12825 CASE_CFN_LLRINT:
12826 CASE_CFN_LLROUND:
12827 CASE_CFN_LRINT:
12828 CASE_CFN_LROUND:
12829 CASE_CFN_MODF:
12830 CASE_CFN_NEARBYINT:
12831 CASE_CFN_RINT:
12832 CASE_CFN_ROUND:
12833 CASE_CFN_SCALB:
12834 CASE_CFN_SCALBLN:
12835 CASE_CFN_SCALBN:
12836 CASE_CFN_SIGNBIT:
12837 CASE_CFN_SIGNIFICAND:
12838 CASE_CFN_SINH:
12839 CASE_CFN_TANH:
12840 CASE_CFN_TRUNC:
12841 /* True if the 1st argument is nonnegative. */
12842 return RECURSE (arg0);
12843
12844 CASE_CFN_FMAX:
12845 CASE_CFN_FMAX_FN:
12846 /* True if the 1st OR 2nd arguments are nonnegative. */
12847 return RECURSE (arg0) || RECURSE (arg1);
12848
12849 CASE_CFN_FMIN:
12850 CASE_CFN_FMIN_FN:
12851 /* True if the 1st AND 2nd arguments are nonnegative. */
12852 return RECURSE (arg0) && RECURSE (arg1);
12853
12854 CASE_CFN_COPYSIGN:
12855 CASE_CFN_COPYSIGN_FN:
12856 /* True if the 2nd argument is nonnegative. */
12857 return RECURSE (arg1);
12858
12859 CASE_CFN_POWI:
12860 /* True if the 1st argument is nonnegative or the second
12861 argument is an even integer. */
12862 if (TREE_CODE (arg1) == INTEGER_CST
12863 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
12864 return true;
12865 return RECURSE (arg0);
12866
12867 CASE_CFN_POW:
12868 /* True if the 1st argument is nonnegative or the second
12869 argument is an even integer valued real. */
12870 if (TREE_CODE (arg1) == REAL_CST)
12871 {
12872 REAL_VALUE_TYPE c;
12873 HOST_WIDE_INT n;
12874
12875 c = TREE_REAL_CST (arg1);
12876 n = real_to_integer (&c);
12877 if ((n & 1) == 0)
12878 {
12879 REAL_VALUE_TYPE cint;
12880 real_from_integer (&cint, VOIDmode, n, SIGNED);
12881 if (real_identical (&c, &cint))
12882 return true;
12883 }
12884 }
12885 return RECURSE (arg0);
12886
12887 default:
12888 break;
12889 }
12890 return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
12891 }
12892
12893 /* Return true if T is known to be non-negative. If the return
12894 value is based on the assumption that signed overflow is undefined,
12895 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12896 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12897
12898 static bool
12899 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
12900 {
12901 enum tree_code code = TREE_CODE (t);
12902 if (TYPE_UNSIGNED (TREE_TYPE (t)))
12903 return true;
12904
12905 switch (code)
12906 {
12907 case TARGET_EXPR:
12908 {
12909 tree temp = TARGET_EXPR_SLOT (t);
12910 t = TARGET_EXPR_INITIAL (t);
12911
12912 /* If the initializer is non-void, then it's a normal expression
12913 that will be assigned to the slot. */
12914 if (!VOID_TYPE_P (t))
12915 return RECURSE (t);
12916
12917 /* Otherwise, the initializer sets the slot in some way. One common
12918 way is an assignment statement at the end of the initializer. */
12919 while (1)
12920 {
12921 if (TREE_CODE (t) == BIND_EXPR)
12922 t = expr_last (BIND_EXPR_BODY (t));
12923 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
12924 || TREE_CODE (t) == TRY_CATCH_EXPR)
12925 t = expr_last (TREE_OPERAND (t, 0));
12926 else if (TREE_CODE (t) == STATEMENT_LIST)
12927 t = expr_last (t);
12928 else
12929 break;
12930 }
12931 if (TREE_CODE (t) == MODIFY_EXPR
12932 && TREE_OPERAND (t, 0) == temp)
12933 return RECURSE (TREE_OPERAND (t, 1));
12934
12935 return false;
12936 }
12937
12938 case CALL_EXPR:
12939 {
12940 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
12941 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
12942
12943 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
12944 get_call_combined_fn (t),
12945 arg0,
12946 arg1,
12947 strict_overflow_p, depth);
12948 }
12949 case COMPOUND_EXPR:
12950 case MODIFY_EXPR:
12951 return RECURSE (TREE_OPERAND (t, 1));
12952
12953 case BIND_EXPR:
12954 return RECURSE (expr_last (TREE_OPERAND (t, 1)));
12955
12956 case SAVE_EXPR:
12957 return RECURSE (TREE_OPERAND (t, 0));
12958
12959 default:
12960 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
12961 }
12962 }
12963
12964 #undef RECURSE
12965 #undef tree_expr_nonnegative_warnv_p
12966
12967 /* Return true if T is known to be non-negative. If the return
12968 value is based on the assumption that signed overflow is undefined,
12969 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12970 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12971
12972 bool
12973 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
12974 {
12975 enum tree_code code;
12976 if (t == error_mark_node)
12977 return false;
12978
12979 code = TREE_CODE (t);
12980 switch (TREE_CODE_CLASS (code))
12981 {
12982 case tcc_binary:
12983 case tcc_comparison:
12984 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
12985 TREE_TYPE (t),
12986 TREE_OPERAND (t, 0),
12987 TREE_OPERAND (t, 1),
12988 strict_overflow_p, depth);
12989
12990 case tcc_unary:
12991 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
12992 TREE_TYPE (t),
12993 TREE_OPERAND (t, 0),
12994 strict_overflow_p, depth);
12995
12996 case tcc_constant:
12997 case tcc_declaration:
12998 case tcc_reference:
12999 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13000
13001 default:
13002 break;
13003 }
13004
13005 switch (code)
13006 {
13007 case TRUTH_AND_EXPR:
13008 case TRUTH_OR_EXPR:
13009 case TRUTH_XOR_EXPR:
13010 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13011 TREE_TYPE (t),
13012 TREE_OPERAND (t, 0),
13013 TREE_OPERAND (t, 1),
13014 strict_overflow_p, depth);
13015 case TRUTH_NOT_EXPR:
13016 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13017 TREE_TYPE (t),
13018 TREE_OPERAND (t, 0),
13019 strict_overflow_p, depth);
13020
13021 case COND_EXPR:
13022 case CONSTRUCTOR:
13023 case OBJ_TYPE_REF:
13024 case ASSERT_EXPR:
13025 case ADDR_EXPR:
13026 case WITH_SIZE_EXPR:
13027 case SSA_NAME:
13028 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13029
13030 default:
13031 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
13032 }
13033 }
13034
13035 /* Return true if `t' is known to be non-negative. Handle warnings
13036 about undefined signed overflow. */
13037
13038 bool
13039 tree_expr_nonnegative_p (tree t)
13040 {
13041 bool ret, strict_overflow_p;
13042
13043 strict_overflow_p = false;
13044 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
13045 if (strict_overflow_p)
13046 fold_overflow_warning (("assuming signed overflow does not occur when "
13047 "determining that expression is always "
13048 "non-negative"),
13049 WARN_STRICT_OVERFLOW_MISC);
13050 return ret;
13051 }
13052
13053
13054 /* Return true when (CODE OP0) is an address and is known to be nonzero.
13055 For floating point we further ensure that T is not denormal.
13056 Similar logic is present in nonzero_address in rtlanal.h.
13057
13058 If the return value is based on the assumption that signed overflow
13059 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13060 change *STRICT_OVERFLOW_P. */
13061
13062 bool
13063 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
13064 bool *strict_overflow_p)
13065 {
13066 switch (code)
13067 {
13068 case ABS_EXPR:
13069 return tree_expr_nonzero_warnv_p (op0,
13070 strict_overflow_p);
13071
13072 case NOP_EXPR:
13073 {
13074 tree inner_type = TREE_TYPE (op0);
13075 tree outer_type = type;
13076
13077 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
13078 && tree_expr_nonzero_warnv_p (op0,
13079 strict_overflow_p));
13080 }
13081 break;
13082
13083 case NON_LVALUE_EXPR:
13084 return tree_expr_nonzero_warnv_p (op0,
13085 strict_overflow_p);
13086
13087 default:
13088 break;
13089 }
13090
13091 return false;
13092 }
13093
13094 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
13095 For floating point we further ensure that T is not denormal.
13096 Similar logic is present in nonzero_address in rtlanal.h.
13097
13098 If the return value is based on the assumption that signed overflow
13099 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13100 change *STRICT_OVERFLOW_P. */
13101
13102 bool
13103 tree_binary_nonzero_warnv_p (enum tree_code code,
13104 tree type,
13105 tree op0,
13106 tree op1, bool *strict_overflow_p)
13107 {
13108 bool sub_strict_overflow_p;
13109 switch (code)
13110 {
13111 case POINTER_PLUS_EXPR:
13112 case PLUS_EXPR:
13113 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
13114 {
13115 /* With the presence of negative values it is hard
13116 to say something. */
13117 sub_strict_overflow_p = false;
13118 if (!tree_expr_nonnegative_warnv_p (op0,
13119 &sub_strict_overflow_p)
13120 || !tree_expr_nonnegative_warnv_p (op1,
13121 &sub_strict_overflow_p))
13122 return false;
13123 /* One of operands must be positive and the other non-negative. */
13124 /* We don't set *STRICT_OVERFLOW_P here: even if this value
13125 overflows, on a twos-complement machine the sum of two
13126 nonnegative numbers can never be zero. */
13127 return (tree_expr_nonzero_warnv_p (op0,
13128 strict_overflow_p)
13129 || tree_expr_nonzero_warnv_p (op1,
13130 strict_overflow_p));
13131 }
13132 break;
13133
13134 case MULT_EXPR:
13135 if (TYPE_OVERFLOW_UNDEFINED (type))
13136 {
13137 if (tree_expr_nonzero_warnv_p (op0,
13138 strict_overflow_p)
13139 && tree_expr_nonzero_warnv_p (op1,
13140 strict_overflow_p))
13141 {
13142 *strict_overflow_p = true;
13143 return true;
13144 }
13145 }
13146 break;
13147
13148 case MIN_EXPR:
13149 sub_strict_overflow_p = false;
13150 if (tree_expr_nonzero_warnv_p (op0,
13151 &sub_strict_overflow_p)
13152 && tree_expr_nonzero_warnv_p (op1,
13153 &sub_strict_overflow_p))
13154 {
13155 if (sub_strict_overflow_p)
13156 *strict_overflow_p = true;
13157 }
13158 break;
13159
13160 case MAX_EXPR:
13161 sub_strict_overflow_p = false;
13162 if (tree_expr_nonzero_warnv_p (op0,
13163 &sub_strict_overflow_p))
13164 {
13165 if (sub_strict_overflow_p)
13166 *strict_overflow_p = true;
13167
13168 /* When both operands are nonzero, then MAX must be too. */
13169 if (tree_expr_nonzero_warnv_p (op1,
13170 strict_overflow_p))
13171 return true;
13172
13173 /* MAX where operand 0 is positive is positive. */
13174 return tree_expr_nonnegative_warnv_p (op0,
13175 strict_overflow_p);
13176 }
13177 /* MAX where operand 1 is positive is positive. */
13178 else if (tree_expr_nonzero_warnv_p (op1,
13179 &sub_strict_overflow_p)
13180 && tree_expr_nonnegative_warnv_p (op1,
13181 &sub_strict_overflow_p))
13182 {
13183 if (sub_strict_overflow_p)
13184 *strict_overflow_p = true;
13185 return true;
13186 }
13187 break;
13188
13189 case BIT_IOR_EXPR:
13190 return (tree_expr_nonzero_warnv_p (op1,
13191 strict_overflow_p)
13192 || tree_expr_nonzero_warnv_p (op0,
13193 strict_overflow_p));
13194
13195 default:
13196 break;
13197 }
13198
13199 return false;
13200 }
13201
13202 /* Return true when T is an address and is known to be nonzero.
13203 For floating point we further ensure that T is not denormal.
13204 Similar logic is present in nonzero_address in rtlanal.h.
13205
13206 If the return value is based on the assumption that signed overflow
13207 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13208 change *STRICT_OVERFLOW_P. */
13209
13210 bool
13211 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
13212 {
13213 bool sub_strict_overflow_p;
13214 switch (TREE_CODE (t))
13215 {
13216 case INTEGER_CST:
13217 return !integer_zerop (t);
13218
13219 case ADDR_EXPR:
13220 {
13221 tree base = TREE_OPERAND (t, 0);
13222
13223 if (!DECL_P (base))
13224 base = get_base_address (base);
13225
13226 if (base && TREE_CODE (base) == TARGET_EXPR)
13227 base = TARGET_EXPR_SLOT (base);
13228
13229 if (!base)
13230 return false;
13231
13232 /* For objects in symbol table check if we know they are non-zero.
13233 Don't do anything for variables and functions before symtab is built;
13234 it is quite possible that they will be declared weak later. */
13235 int nonzero_addr = maybe_nonzero_address (base);
13236 if (nonzero_addr >= 0)
13237 return nonzero_addr;
13238
13239 /* Constants are never weak. */
13240 if (CONSTANT_CLASS_P (base))
13241 return true;
13242
13243 return false;
13244 }
13245
13246 case COND_EXPR:
13247 sub_strict_overflow_p = false;
13248 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13249 &sub_strict_overflow_p)
13250 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
13251 &sub_strict_overflow_p))
13252 {
13253 if (sub_strict_overflow_p)
13254 *strict_overflow_p = true;
13255 return true;
13256 }
13257 break;
13258
13259 case SSA_NAME:
13260 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
13261 break;
13262 return expr_not_equal_to (t, wi::zero (TYPE_PRECISION (TREE_TYPE (t))));
13263
13264 default:
13265 break;
13266 }
13267 return false;
13268 }
13269
13270 #define integer_valued_real_p(X) \
13271 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
13272
13273 #define RECURSE(X) \
13274 ((integer_valued_real_p) (X, depth + 1))
13275
13276 /* Return true if the floating point result of (CODE OP0) has an
13277 integer value. We also allow +Inf, -Inf and NaN to be considered
13278 integer values. Return false for signaling NaN.
13279
13280 DEPTH is the current nesting depth of the query. */
13281
13282 bool
13283 integer_valued_real_unary_p (tree_code code, tree op0, int depth)
13284 {
13285 switch (code)
13286 {
13287 case FLOAT_EXPR:
13288 return true;
13289
13290 case ABS_EXPR:
13291 return RECURSE (op0);
13292
13293 CASE_CONVERT:
13294 {
13295 tree type = TREE_TYPE (op0);
13296 if (TREE_CODE (type) == INTEGER_TYPE)
13297 return true;
13298 if (TREE_CODE (type) == REAL_TYPE)
13299 return RECURSE (op0);
13300 break;
13301 }
13302
13303 default:
13304 break;
13305 }
13306 return false;
13307 }
13308
13309 /* Return true if the floating point result of (CODE OP0 OP1) has an
13310 integer value. We also allow +Inf, -Inf and NaN to be considered
13311 integer values. Return false for signaling NaN.
13312
13313 DEPTH is the current nesting depth of the query. */
13314
13315 bool
13316 integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth)
13317 {
13318 switch (code)
13319 {
13320 case PLUS_EXPR:
13321 case MINUS_EXPR:
13322 case MULT_EXPR:
13323 case MIN_EXPR:
13324 case MAX_EXPR:
13325 return RECURSE (op0) && RECURSE (op1);
13326
13327 default:
13328 break;
13329 }
13330 return false;
13331 }
13332
13333 /* Return true if the floating point result of calling FNDECL with arguments
13334 ARG0 and ARG1 has an integer value. We also allow +Inf, -Inf and NaN to be
13335 considered integer values. Return false for signaling NaN. If FNDECL
13336 takes fewer than 2 arguments, the remaining ARGn are null.
13337
13338 DEPTH is the current nesting depth of the query. */
13339
13340 bool
13341 integer_valued_real_call_p (combined_fn fn, tree arg0, tree arg1, int depth)
13342 {
13343 switch (fn)
13344 {
13345 CASE_CFN_CEIL:
13346 CASE_CFN_FLOOR:
13347 CASE_CFN_NEARBYINT:
13348 CASE_CFN_RINT:
13349 CASE_CFN_ROUND:
13350 CASE_CFN_TRUNC:
13351 return true;
13352
13353 CASE_CFN_FMIN:
13354 CASE_CFN_FMIN_FN:
13355 CASE_CFN_FMAX:
13356 CASE_CFN_FMAX_FN:
13357 return RECURSE (arg0) && RECURSE (arg1);
13358
13359 default:
13360 break;
13361 }
13362 return false;
13363 }
13364
13365 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
13366 has an integer value. We also allow +Inf, -Inf and NaN to be
13367 considered integer values. Return false for signaling NaN.
13368
13369 DEPTH is the current nesting depth of the query. */
13370
13371 bool
13372 integer_valued_real_single_p (tree t, int depth)
13373 {
13374 switch (TREE_CODE (t))
13375 {
13376 case REAL_CST:
13377 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
13378
13379 case COND_EXPR:
13380 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
13381
13382 case SSA_NAME:
13383 /* Limit the depth of recursion to avoid quadratic behavior.
13384 This is expected to catch almost all occurrences in practice.
13385 If this code misses important cases that unbounded recursion
13386 would not, passes that need this information could be revised
13387 to provide it through dataflow propagation. */
13388 return (!name_registered_for_update_p (t)
13389 && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
13390 && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t),
13391 depth));
13392
13393 default:
13394 break;
13395 }
13396 return false;
13397 }
13398
13399 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
13400 has an integer value. We also allow +Inf, -Inf and NaN to be
13401 considered integer values. Return false for signaling NaN.
13402
13403 DEPTH is the current nesting depth of the query. */
13404
13405 static bool
13406 integer_valued_real_invalid_p (tree t, int depth)
13407 {
13408 switch (TREE_CODE (t))
13409 {
13410 case COMPOUND_EXPR:
13411 case MODIFY_EXPR:
13412 case BIND_EXPR:
13413 return RECURSE (TREE_OPERAND (t, 1));
13414
13415 case SAVE_EXPR:
13416 return RECURSE (TREE_OPERAND (t, 0));
13417
13418 default:
13419 break;
13420 }
13421 return false;
13422 }
13423
13424 #undef RECURSE
13425 #undef integer_valued_real_p
13426
13427 /* Return true if the floating point expression T has an integer value.
13428 We also allow +Inf, -Inf and NaN to be considered integer values.
13429 Return false for signaling NaN.
13430
13431 DEPTH is the current nesting depth of the query. */
13432
13433 bool
13434 integer_valued_real_p (tree t, int depth)
13435 {
13436 if (t == error_mark_node)
13437 return false;
13438
13439 tree_code code = TREE_CODE (t);
13440 switch (TREE_CODE_CLASS (code))
13441 {
13442 case tcc_binary:
13443 case tcc_comparison:
13444 return integer_valued_real_binary_p (code, TREE_OPERAND (t, 0),
13445 TREE_OPERAND (t, 1), depth);
13446
13447 case tcc_unary:
13448 return integer_valued_real_unary_p (code, TREE_OPERAND (t, 0), depth);
13449
13450 case tcc_constant:
13451 case tcc_declaration:
13452 case tcc_reference:
13453 return integer_valued_real_single_p (t, depth);
13454
13455 default:
13456 break;
13457 }
13458
13459 switch (code)
13460 {
13461 case COND_EXPR:
13462 case SSA_NAME:
13463 return integer_valued_real_single_p (t, depth);
13464
13465 case CALL_EXPR:
13466 {
13467 tree arg0 = (call_expr_nargs (t) > 0
13468 ? CALL_EXPR_ARG (t, 0)
13469 : NULL_TREE);
13470 tree arg1 = (call_expr_nargs (t) > 1
13471 ? CALL_EXPR_ARG (t, 1)
13472 : NULL_TREE);
13473 return integer_valued_real_call_p (get_call_combined_fn (t),
13474 arg0, arg1, depth);
13475 }
13476
13477 default:
13478 return integer_valued_real_invalid_p (t, depth);
13479 }
13480 }
13481
13482 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
13483 attempt to fold the expression to a constant without modifying TYPE,
13484 OP0 or OP1.
13485
13486 If the expression could be simplified to a constant, then return
13487 the constant. If the expression would not be simplified to a
13488 constant, then return NULL_TREE. */
13489
13490 tree
13491 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
13492 {
13493 tree tem = fold_binary (code, type, op0, op1);
13494 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13495 }
13496
13497 /* Given the components of a unary expression CODE, TYPE and OP0,
13498 attempt to fold the expression to a constant without modifying
13499 TYPE or OP0.
13500
13501 If the expression could be simplified to a constant, then return
13502 the constant. If the expression would not be simplified to a
13503 constant, then return NULL_TREE. */
13504
13505 tree
13506 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
13507 {
13508 tree tem = fold_unary (code, type, op0);
13509 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13510 }
13511
13512 /* If EXP represents referencing an element in a constant string
13513 (either via pointer arithmetic or array indexing), return the
13514 tree representing the value accessed, otherwise return NULL. */
13515
13516 tree
13517 fold_read_from_constant_string (tree exp)
13518 {
13519 if ((TREE_CODE (exp) == INDIRECT_REF
13520 || TREE_CODE (exp) == ARRAY_REF)
13521 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
13522 {
13523 tree exp1 = TREE_OPERAND (exp, 0);
13524 tree index;
13525 tree string;
13526 location_t loc = EXPR_LOCATION (exp);
13527
13528 if (TREE_CODE (exp) == INDIRECT_REF)
13529 string = string_constant (exp1, &index);
13530 else
13531 {
13532 tree low_bound = array_ref_low_bound (exp);
13533 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
13534
13535 /* Optimize the special-case of a zero lower bound.
13536
13537 We convert the low_bound to sizetype to avoid some problems
13538 with constant folding. (E.g. suppose the lower bound is 1,
13539 and its mode is QI. Without the conversion,l (ARRAY
13540 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
13541 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
13542 if (! integer_zerop (low_bound))
13543 index = size_diffop_loc (loc, index,
13544 fold_convert_loc (loc, sizetype, low_bound));
13545
13546 string = exp1;
13547 }
13548
13549 scalar_int_mode char_mode;
13550 if (string
13551 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
13552 && TREE_CODE (string) == STRING_CST
13553 && TREE_CODE (index) == INTEGER_CST
13554 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
13555 && is_int_mode (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))),
13556 &char_mode)
13557 && GET_MODE_SIZE (char_mode) == 1)
13558 return build_int_cst_type (TREE_TYPE (exp),
13559 (TREE_STRING_POINTER (string)
13560 [TREE_INT_CST_LOW (index)]));
13561 }
13562 return NULL;
13563 }
13564
13565 /* Return the tree for neg (ARG0) when ARG0 is known to be either
13566 an integer constant, real, or fixed-point constant.
13567
13568 TYPE is the type of the result. */
13569
13570 static tree
13571 fold_negate_const (tree arg0, tree type)
13572 {
13573 tree t = NULL_TREE;
13574
13575 switch (TREE_CODE (arg0))
13576 {
13577 case INTEGER_CST:
13578 {
13579 bool overflow;
13580 wide_int val = wi::neg (wi::to_wide (arg0), &overflow);
13581 t = force_fit_type (type, val, 1,
13582 (overflow && ! TYPE_UNSIGNED (type))
13583 || TREE_OVERFLOW (arg0));
13584 break;
13585 }
13586
13587 case REAL_CST:
13588 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13589 break;
13590
13591 case FIXED_CST:
13592 {
13593 FIXED_VALUE_TYPE f;
13594 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
13595 &(TREE_FIXED_CST (arg0)), NULL,
13596 TYPE_SATURATING (type));
13597 t = build_fixed (type, f);
13598 /* Propagate overflow flags. */
13599 if (overflow_p | TREE_OVERFLOW (arg0))
13600 TREE_OVERFLOW (t) = 1;
13601 break;
13602 }
13603
13604 default:
13605 gcc_unreachable ();
13606 }
13607
13608 return t;
13609 }
13610
13611 /* Return the tree for abs (ARG0) when ARG0 is known to be either
13612 an integer constant or real constant.
13613
13614 TYPE is the type of the result. */
13615
13616 tree
13617 fold_abs_const (tree arg0, tree type)
13618 {
13619 tree t = NULL_TREE;
13620
13621 switch (TREE_CODE (arg0))
13622 {
13623 case INTEGER_CST:
13624 {
13625 /* If the value is unsigned or non-negative, then the absolute value
13626 is the same as the ordinary value. */
13627 if (!wi::neg_p (wi::to_wide (arg0), TYPE_SIGN (type)))
13628 t = arg0;
13629
13630 /* If the value is negative, then the absolute value is
13631 its negation. */
13632 else
13633 {
13634 bool overflow;
13635 wide_int val = wi::neg (wi::to_wide (arg0), &overflow);
13636 t = force_fit_type (type, val, -1,
13637 overflow | TREE_OVERFLOW (arg0));
13638 }
13639 }
13640 break;
13641
13642 case REAL_CST:
13643 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
13644 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13645 else
13646 t = arg0;
13647 break;
13648
13649 default:
13650 gcc_unreachable ();
13651 }
13652
13653 return t;
13654 }
13655
13656 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13657 constant. TYPE is the type of the result. */
13658
13659 static tree
13660 fold_not_const (const_tree arg0, tree type)
13661 {
13662 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
13663
13664 return force_fit_type (type, ~wi::to_wide (arg0), 0, TREE_OVERFLOW (arg0));
13665 }
13666
13667 /* Given CODE, a relational operator, the target type, TYPE and two
13668 constant operands OP0 and OP1, return the result of the
13669 relational operation. If the result is not a compile time
13670 constant, then return NULL_TREE. */
13671
13672 static tree
13673 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
13674 {
13675 int result, invert;
13676
13677 /* From here on, the only cases we handle are when the result is
13678 known to be a constant. */
13679
13680 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
13681 {
13682 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
13683 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
13684
13685 /* Handle the cases where either operand is a NaN. */
13686 if (real_isnan (c0) || real_isnan (c1))
13687 {
13688 switch (code)
13689 {
13690 case EQ_EXPR:
13691 case ORDERED_EXPR:
13692 result = 0;
13693 break;
13694
13695 case NE_EXPR:
13696 case UNORDERED_EXPR:
13697 case UNLT_EXPR:
13698 case UNLE_EXPR:
13699 case UNGT_EXPR:
13700 case UNGE_EXPR:
13701 case UNEQ_EXPR:
13702 result = 1;
13703 break;
13704
13705 case LT_EXPR:
13706 case LE_EXPR:
13707 case GT_EXPR:
13708 case GE_EXPR:
13709 case LTGT_EXPR:
13710 if (flag_trapping_math)
13711 return NULL_TREE;
13712 result = 0;
13713 break;
13714
13715 default:
13716 gcc_unreachable ();
13717 }
13718
13719 return constant_boolean_node (result, type);
13720 }
13721
13722 return constant_boolean_node (real_compare (code, c0, c1), type);
13723 }
13724
13725 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
13726 {
13727 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
13728 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
13729 return constant_boolean_node (fixed_compare (code, c0, c1), type);
13730 }
13731
13732 /* Handle equality/inequality of complex constants. */
13733 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
13734 {
13735 tree rcond = fold_relational_const (code, type,
13736 TREE_REALPART (op0),
13737 TREE_REALPART (op1));
13738 tree icond = fold_relational_const (code, type,
13739 TREE_IMAGPART (op0),
13740 TREE_IMAGPART (op1));
13741 if (code == EQ_EXPR)
13742 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
13743 else if (code == NE_EXPR)
13744 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
13745 else
13746 return NULL_TREE;
13747 }
13748
13749 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
13750 {
13751 if (!VECTOR_TYPE_P (type))
13752 {
13753 /* Have vector comparison with scalar boolean result. */
13754 gcc_assert ((code == EQ_EXPR || code == NE_EXPR)
13755 && VECTOR_CST_NELTS (op0) == VECTOR_CST_NELTS (op1));
13756 for (unsigned i = 0; i < VECTOR_CST_NELTS (op0); i++)
13757 {
13758 tree elem0 = VECTOR_CST_ELT (op0, i);
13759 tree elem1 = VECTOR_CST_ELT (op1, i);
13760 tree tmp = fold_relational_const (code, type, elem0, elem1);
13761 if (tmp == NULL_TREE)
13762 return NULL_TREE;
13763 if (integer_zerop (tmp))
13764 return constant_boolean_node (false, type);
13765 }
13766 return constant_boolean_node (true, type);
13767 }
13768 unsigned count = VECTOR_CST_NELTS (op0);
13769 gcc_assert (VECTOR_CST_NELTS (op1) == count
13770 && TYPE_VECTOR_SUBPARTS (type) == count);
13771
13772 auto_vec<tree, 32> elts (count);
13773 for (unsigned i = 0; i < count; i++)
13774 {
13775 tree elem_type = TREE_TYPE (type);
13776 tree elem0 = VECTOR_CST_ELT (op0, i);
13777 tree elem1 = VECTOR_CST_ELT (op1, i);
13778
13779 tree tem = fold_relational_const (code, elem_type,
13780 elem0, elem1);
13781
13782 if (tem == NULL_TREE)
13783 return NULL_TREE;
13784
13785 elts.quick_push (build_int_cst (elem_type,
13786 integer_zerop (tem) ? 0 : -1));
13787 }
13788
13789 return build_vector (type, elts);
13790 }
13791
13792 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
13793
13794 To compute GT, swap the arguments and do LT.
13795 To compute GE, do LT and invert the result.
13796 To compute LE, swap the arguments, do LT and invert the result.
13797 To compute NE, do EQ and invert the result.
13798
13799 Therefore, the code below must handle only EQ and LT. */
13800
13801 if (code == LE_EXPR || code == GT_EXPR)
13802 {
13803 std::swap (op0, op1);
13804 code = swap_tree_comparison (code);
13805 }
13806
13807 /* Note that it is safe to invert for real values here because we
13808 have already handled the one case that it matters. */
13809
13810 invert = 0;
13811 if (code == NE_EXPR || code == GE_EXPR)
13812 {
13813 invert = 1;
13814 code = invert_tree_comparison (code, false);
13815 }
13816
13817 /* Compute a result for LT or EQ if args permit;
13818 Otherwise return T. */
13819 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
13820 {
13821 if (code == EQ_EXPR)
13822 result = tree_int_cst_equal (op0, op1);
13823 else
13824 result = tree_int_cst_lt (op0, op1);
13825 }
13826 else
13827 return NULL_TREE;
13828
13829 if (invert)
13830 result ^= 1;
13831 return constant_boolean_node (result, type);
13832 }
13833
13834 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
13835 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
13836 itself. */
13837
13838 tree
13839 fold_build_cleanup_point_expr (tree type, tree expr)
13840 {
13841 /* If the expression does not have side effects then we don't have to wrap
13842 it with a cleanup point expression. */
13843 if (!TREE_SIDE_EFFECTS (expr))
13844 return expr;
13845
13846 /* If the expression is a return, check to see if the expression inside the
13847 return has no side effects or the right hand side of the modify expression
13848 inside the return. If either don't have side effects set we don't need to
13849 wrap the expression in a cleanup point expression. Note we don't check the
13850 left hand side of the modify because it should always be a return decl. */
13851 if (TREE_CODE (expr) == RETURN_EXPR)
13852 {
13853 tree op = TREE_OPERAND (expr, 0);
13854 if (!op || !TREE_SIDE_EFFECTS (op))
13855 return expr;
13856 op = TREE_OPERAND (op, 1);
13857 if (!TREE_SIDE_EFFECTS (op))
13858 return expr;
13859 }
13860
13861 return build1_loc (EXPR_LOCATION (expr), CLEANUP_POINT_EXPR, type, expr);
13862 }
13863
13864 /* Given a pointer value OP0 and a type TYPE, return a simplified version
13865 of an indirection through OP0, or NULL_TREE if no simplification is
13866 possible. */
13867
13868 tree
13869 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
13870 {
13871 tree sub = op0;
13872 tree subtype;
13873
13874 STRIP_NOPS (sub);
13875 subtype = TREE_TYPE (sub);
13876 if (!POINTER_TYPE_P (subtype)
13877 || TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (op0)))
13878 return NULL_TREE;
13879
13880 if (TREE_CODE (sub) == ADDR_EXPR)
13881 {
13882 tree op = TREE_OPERAND (sub, 0);
13883 tree optype = TREE_TYPE (op);
13884 /* *&CONST_DECL -> to the value of the const decl. */
13885 if (TREE_CODE (op) == CONST_DECL)
13886 return DECL_INITIAL (op);
13887 /* *&p => p; make sure to handle *&"str"[cst] here. */
13888 if (type == optype)
13889 {
13890 tree fop = fold_read_from_constant_string (op);
13891 if (fop)
13892 return fop;
13893 else
13894 return op;
13895 }
13896 /* *(foo *)&fooarray => fooarray[0] */
13897 else if (TREE_CODE (optype) == ARRAY_TYPE
13898 && type == TREE_TYPE (optype)
13899 && (!in_gimple_form
13900 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
13901 {
13902 tree type_domain = TYPE_DOMAIN (optype);
13903 tree min_val = size_zero_node;
13904 if (type_domain && TYPE_MIN_VALUE (type_domain))
13905 min_val = TYPE_MIN_VALUE (type_domain);
13906 if (in_gimple_form
13907 && TREE_CODE (min_val) != INTEGER_CST)
13908 return NULL_TREE;
13909 return build4_loc (loc, ARRAY_REF, type, op, min_val,
13910 NULL_TREE, NULL_TREE);
13911 }
13912 /* *(foo *)&complexfoo => __real__ complexfoo */
13913 else if (TREE_CODE (optype) == COMPLEX_TYPE
13914 && type == TREE_TYPE (optype))
13915 return fold_build1_loc (loc, REALPART_EXPR, type, op);
13916 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
13917 else if (TREE_CODE (optype) == VECTOR_TYPE
13918 && type == TREE_TYPE (optype))
13919 {
13920 tree part_width = TYPE_SIZE (type);
13921 tree index = bitsize_int (0);
13922 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
13923 }
13924 }
13925
13926 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
13927 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
13928 {
13929 tree op00 = TREE_OPERAND (sub, 0);
13930 tree op01 = TREE_OPERAND (sub, 1);
13931
13932 STRIP_NOPS (op00);
13933 if (TREE_CODE (op00) == ADDR_EXPR)
13934 {
13935 tree op00type;
13936 op00 = TREE_OPERAND (op00, 0);
13937 op00type = TREE_TYPE (op00);
13938
13939 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
13940 if (TREE_CODE (op00type) == VECTOR_TYPE
13941 && type == TREE_TYPE (op00type))
13942 {
13943 tree part_width = TYPE_SIZE (type);
13944 unsigned HOST_WIDE_INT max_offset
13945 = (tree_to_uhwi (part_width) / BITS_PER_UNIT
13946 * TYPE_VECTOR_SUBPARTS (op00type));
13947 if (tree_int_cst_sign_bit (op01) == 0
13948 && compare_tree_int (op01, max_offset) == -1)
13949 {
13950 unsigned HOST_WIDE_INT offset = tree_to_uhwi (op01);
13951 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
13952 tree index = bitsize_int (indexi);
13953 return fold_build3_loc (loc,
13954 BIT_FIELD_REF, type, op00,
13955 part_width, index);
13956 }
13957 }
13958 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
13959 else if (TREE_CODE (op00type) == COMPLEX_TYPE
13960 && type == TREE_TYPE (op00type))
13961 {
13962 tree size = TYPE_SIZE_UNIT (type);
13963 if (tree_int_cst_equal (size, op01))
13964 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
13965 }
13966 /* ((foo *)&fooarray)[1] => fooarray[1] */
13967 else if (TREE_CODE (op00type) == ARRAY_TYPE
13968 && type == TREE_TYPE (op00type))
13969 {
13970 tree type_domain = TYPE_DOMAIN (op00type);
13971 tree min = size_zero_node;
13972 if (type_domain && TYPE_MIN_VALUE (type_domain))
13973 min = TYPE_MIN_VALUE (type_domain);
13974 offset_int off = wi::to_offset (op01);
13975 offset_int el_sz = wi::to_offset (TYPE_SIZE_UNIT (type));
13976 offset_int remainder;
13977 off = wi::divmod_trunc (off, el_sz, SIGNED, &remainder);
13978 if (remainder == 0 && TREE_CODE (min) == INTEGER_CST)
13979 {
13980 off = off + wi::to_offset (min);
13981 op01 = wide_int_to_tree (sizetype, off);
13982 return build4_loc (loc, ARRAY_REF, type, op00, op01,
13983 NULL_TREE, NULL_TREE);
13984 }
13985 }
13986 }
13987 }
13988
13989 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
13990 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
13991 && type == TREE_TYPE (TREE_TYPE (subtype))
13992 && (!in_gimple_form
13993 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
13994 {
13995 tree type_domain;
13996 tree min_val = size_zero_node;
13997 sub = build_fold_indirect_ref_loc (loc, sub);
13998 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
13999 if (type_domain && TYPE_MIN_VALUE (type_domain))
14000 min_val = TYPE_MIN_VALUE (type_domain);
14001 if (in_gimple_form
14002 && TREE_CODE (min_val) != INTEGER_CST)
14003 return NULL_TREE;
14004 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
14005 NULL_TREE);
14006 }
14007
14008 return NULL_TREE;
14009 }
14010
14011 /* Builds an expression for an indirection through T, simplifying some
14012 cases. */
14013
14014 tree
14015 build_fold_indirect_ref_loc (location_t loc, tree t)
14016 {
14017 tree type = TREE_TYPE (TREE_TYPE (t));
14018 tree sub = fold_indirect_ref_1 (loc, type, t);
14019
14020 if (sub)
14021 return sub;
14022
14023 return build1_loc (loc, INDIRECT_REF, type, t);
14024 }
14025
14026 /* Given an INDIRECT_REF T, return either T or a simplified version. */
14027
14028 tree
14029 fold_indirect_ref_loc (location_t loc, tree t)
14030 {
14031 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
14032
14033 if (sub)
14034 return sub;
14035 else
14036 return t;
14037 }
14038
14039 /* Strip non-trapping, non-side-effecting tree nodes from an expression
14040 whose result is ignored. The type of the returned tree need not be
14041 the same as the original expression. */
14042
14043 tree
14044 fold_ignored_result (tree t)
14045 {
14046 if (!TREE_SIDE_EFFECTS (t))
14047 return integer_zero_node;
14048
14049 for (;;)
14050 switch (TREE_CODE_CLASS (TREE_CODE (t)))
14051 {
14052 case tcc_unary:
14053 t = TREE_OPERAND (t, 0);
14054 break;
14055
14056 case tcc_binary:
14057 case tcc_comparison:
14058 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14059 t = TREE_OPERAND (t, 0);
14060 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
14061 t = TREE_OPERAND (t, 1);
14062 else
14063 return t;
14064 break;
14065
14066 case tcc_expression:
14067 switch (TREE_CODE (t))
14068 {
14069 case COMPOUND_EXPR:
14070 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14071 return t;
14072 t = TREE_OPERAND (t, 0);
14073 break;
14074
14075 case COND_EXPR:
14076 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
14077 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
14078 return t;
14079 t = TREE_OPERAND (t, 0);
14080 break;
14081
14082 default:
14083 return t;
14084 }
14085 break;
14086
14087 default:
14088 return t;
14089 }
14090 }
14091
14092 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
14093
14094 tree
14095 round_up_loc (location_t loc, tree value, unsigned int divisor)
14096 {
14097 tree div = NULL_TREE;
14098
14099 if (divisor == 1)
14100 return value;
14101
14102 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14103 have to do anything. Only do this when we are not given a const,
14104 because in that case, this check is more expensive than just
14105 doing it. */
14106 if (TREE_CODE (value) != INTEGER_CST)
14107 {
14108 div = build_int_cst (TREE_TYPE (value), divisor);
14109
14110 if (multiple_of_p (TREE_TYPE (value), value, div))
14111 return value;
14112 }
14113
14114 /* If divisor is a power of two, simplify this to bit manipulation. */
14115 if (pow2_or_zerop (divisor))
14116 {
14117 if (TREE_CODE (value) == INTEGER_CST)
14118 {
14119 wide_int val = wi::to_wide (value);
14120 bool overflow_p;
14121
14122 if ((val & (divisor - 1)) == 0)
14123 return value;
14124
14125 overflow_p = TREE_OVERFLOW (value);
14126 val += divisor - 1;
14127 val &= (int) -divisor;
14128 if (val == 0)
14129 overflow_p = true;
14130
14131 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
14132 }
14133 else
14134 {
14135 tree t;
14136
14137 t = build_int_cst (TREE_TYPE (value), divisor - 1);
14138 value = size_binop_loc (loc, PLUS_EXPR, value, t);
14139 t = build_int_cst (TREE_TYPE (value), - (int) divisor);
14140 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14141 }
14142 }
14143 else
14144 {
14145 if (!div)
14146 div = build_int_cst (TREE_TYPE (value), divisor);
14147 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
14148 value = size_binop_loc (loc, MULT_EXPR, value, div);
14149 }
14150
14151 return value;
14152 }
14153
14154 /* Likewise, but round down. */
14155
14156 tree
14157 round_down_loc (location_t loc, tree value, int divisor)
14158 {
14159 tree div = NULL_TREE;
14160
14161 gcc_assert (divisor > 0);
14162 if (divisor == 1)
14163 return value;
14164
14165 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14166 have to do anything. Only do this when we are not given a const,
14167 because in that case, this check is more expensive than just
14168 doing it. */
14169 if (TREE_CODE (value) != INTEGER_CST)
14170 {
14171 div = build_int_cst (TREE_TYPE (value), divisor);
14172
14173 if (multiple_of_p (TREE_TYPE (value), value, div))
14174 return value;
14175 }
14176
14177 /* If divisor is a power of two, simplify this to bit manipulation. */
14178 if (pow2_or_zerop (divisor))
14179 {
14180 tree t;
14181
14182 t = build_int_cst (TREE_TYPE (value), -divisor);
14183 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14184 }
14185 else
14186 {
14187 if (!div)
14188 div = build_int_cst (TREE_TYPE (value), divisor);
14189 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
14190 value = size_binop_loc (loc, MULT_EXPR, value, div);
14191 }
14192
14193 return value;
14194 }
14195
14196 /* Returns the pointer to the base of the object addressed by EXP and
14197 extracts the information about the offset of the access, storing it
14198 to PBITPOS and POFFSET. */
14199
14200 static tree
14201 split_address_to_core_and_offset (tree exp,
14202 HOST_WIDE_INT *pbitpos, tree *poffset)
14203 {
14204 tree core;
14205 machine_mode mode;
14206 int unsignedp, reversep, volatilep;
14207 HOST_WIDE_INT bitsize;
14208 location_t loc = EXPR_LOCATION (exp);
14209
14210 if (TREE_CODE (exp) == ADDR_EXPR)
14211 {
14212 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
14213 poffset, &mode, &unsignedp, &reversep,
14214 &volatilep);
14215 core = build_fold_addr_expr_loc (loc, core);
14216 }
14217 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
14218 {
14219 core = TREE_OPERAND (exp, 0);
14220 STRIP_NOPS (core);
14221 *pbitpos = 0;
14222 *poffset = TREE_OPERAND (exp, 1);
14223 if (TREE_CODE (*poffset) == INTEGER_CST)
14224 {
14225 offset_int tem = wi::sext (wi::to_offset (*poffset),
14226 TYPE_PRECISION (TREE_TYPE (*poffset)));
14227 tem <<= LOG2_BITS_PER_UNIT;
14228 if (wi::fits_shwi_p (tem))
14229 {
14230 *pbitpos = tem.to_shwi ();
14231 *poffset = NULL_TREE;
14232 }
14233 }
14234 }
14235 else
14236 {
14237 core = exp;
14238 *pbitpos = 0;
14239 *poffset = NULL_TREE;
14240 }
14241
14242 return core;
14243 }
14244
14245 /* Returns true if addresses of E1 and E2 differ by a constant, false
14246 otherwise. If they do, E1 - E2 is stored in *DIFF. */
14247
14248 bool
14249 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
14250 {
14251 tree core1, core2;
14252 HOST_WIDE_INT bitpos1, bitpos2;
14253 tree toffset1, toffset2, tdiff, type;
14254
14255 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
14256 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
14257
14258 if (bitpos1 % BITS_PER_UNIT != 0
14259 || bitpos2 % BITS_PER_UNIT != 0
14260 || !operand_equal_p (core1, core2, 0))
14261 return false;
14262
14263 if (toffset1 && toffset2)
14264 {
14265 type = TREE_TYPE (toffset1);
14266 if (type != TREE_TYPE (toffset2))
14267 toffset2 = fold_convert (type, toffset2);
14268
14269 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
14270 if (!cst_and_fits_in_hwi (tdiff))
14271 return false;
14272
14273 *diff = int_cst_value (tdiff);
14274 }
14275 else if (toffset1 || toffset2)
14276 {
14277 /* If only one of the offsets is non-constant, the difference cannot
14278 be a constant. */
14279 return false;
14280 }
14281 else
14282 *diff = 0;
14283
14284 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
14285 return true;
14286 }
14287
14288 /* Return OFF converted to a pointer offset type suitable as offset for
14289 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
14290 tree
14291 convert_to_ptrofftype_loc (location_t loc, tree off)
14292 {
14293 return fold_convert_loc (loc, sizetype, off);
14294 }
14295
14296 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14297 tree
14298 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
14299 {
14300 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14301 ptr, convert_to_ptrofftype_loc (loc, off));
14302 }
14303
14304 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14305 tree
14306 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
14307 {
14308 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14309 ptr, size_int (off));
14310 }
14311
14312 /* Return a char pointer for a C string if it is a string constant
14313 or sum of string constant and integer constant. We only support
14314 string constants properly terminated with '\0' character.
14315 If STRLEN is a valid pointer, length (including terminating character)
14316 of returned string is stored to the argument. */
14317
14318 const char *
14319 c_getstr (tree src, unsigned HOST_WIDE_INT *strlen)
14320 {
14321 tree offset_node;
14322
14323 if (strlen)
14324 *strlen = 0;
14325
14326 src = string_constant (src, &offset_node);
14327 if (src == 0)
14328 return NULL;
14329
14330 unsigned HOST_WIDE_INT offset = 0;
14331 if (offset_node != NULL_TREE)
14332 {
14333 if (!tree_fits_uhwi_p (offset_node))
14334 return NULL;
14335 else
14336 offset = tree_to_uhwi (offset_node);
14337 }
14338
14339 unsigned HOST_WIDE_INT string_length = TREE_STRING_LENGTH (src);
14340 const char *string = TREE_STRING_POINTER (src);
14341
14342 /* Support only properly null-terminated strings. */
14343 if (string_length == 0
14344 || string[string_length - 1] != '\0'
14345 || offset >= string_length)
14346 return NULL;
14347
14348 if (strlen)
14349 *strlen = string_length - offset;
14350 return string + offset;
14351 }
14352
14353 #if CHECKING_P
14354
14355 namespace selftest {
14356
14357 /* Helper functions for writing tests of folding trees. */
14358
14359 /* Verify that the binary op (LHS CODE RHS) folds to CONSTANT. */
14360
14361 static void
14362 assert_binop_folds_to_const (tree lhs, enum tree_code code, tree rhs,
14363 tree constant)
14364 {
14365 ASSERT_EQ (constant, fold_build2 (code, TREE_TYPE (lhs), lhs, rhs));
14366 }
14367
14368 /* Verify that the binary op (LHS CODE RHS) folds to an NON_LVALUE_EXPR
14369 wrapping WRAPPED_EXPR. */
14370
14371 static void
14372 assert_binop_folds_to_nonlvalue (tree lhs, enum tree_code code, tree rhs,
14373 tree wrapped_expr)
14374 {
14375 tree result = fold_build2 (code, TREE_TYPE (lhs), lhs, rhs);
14376 ASSERT_NE (wrapped_expr, result);
14377 ASSERT_EQ (NON_LVALUE_EXPR, TREE_CODE (result));
14378 ASSERT_EQ (wrapped_expr, TREE_OPERAND (result, 0));
14379 }
14380
14381 /* Verify that various arithmetic binary operations are folded
14382 correctly. */
14383
14384 static void
14385 test_arithmetic_folding ()
14386 {
14387 tree type = integer_type_node;
14388 tree x = create_tmp_var_raw (type, "x");
14389 tree zero = build_zero_cst (type);
14390 tree one = build_int_cst (type, 1);
14391
14392 /* Addition. */
14393 /* 1 <-- (0 + 1) */
14394 assert_binop_folds_to_const (zero, PLUS_EXPR, one,
14395 one);
14396 assert_binop_folds_to_const (one, PLUS_EXPR, zero,
14397 one);
14398
14399 /* (nonlvalue)x <-- (x + 0) */
14400 assert_binop_folds_to_nonlvalue (x, PLUS_EXPR, zero,
14401 x);
14402
14403 /* Subtraction. */
14404 /* 0 <-- (x - x) */
14405 assert_binop_folds_to_const (x, MINUS_EXPR, x,
14406 zero);
14407 assert_binop_folds_to_nonlvalue (x, MINUS_EXPR, zero,
14408 x);
14409
14410 /* Multiplication. */
14411 /* 0 <-- (x * 0) */
14412 assert_binop_folds_to_const (x, MULT_EXPR, zero,
14413 zero);
14414
14415 /* (nonlvalue)x <-- (x * 1) */
14416 assert_binop_folds_to_nonlvalue (x, MULT_EXPR, one,
14417 x);
14418 }
14419
14420 /* Verify that various binary operations on vectors are folded
14421 correctly. */
14422
14423 static void
14424 test_vector_folding ()
14425 {
14426 tree inner_type = integer_type_node;
14427 tree type = build_vector_type (inner_type, 4);
14428 tree zero = build_zero_cst (type);
14429 tree one = build_one_cst (type);
14430
14431 /* Verify equality tests that return a scalar boolean result. */
14432 tree res_type = boolean_type_node;
14433 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, one)));
14434 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, zero)));
14435 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, zero, one)));
14436 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, one, one)));
14437 }
14438
14439 /* Run all of the selftests within this file. */
14440
14441 void
14442 fold_const_c_tests ()
14443 {
14444 test_arithmetic_folding ();
14445 test_vector_folding ();
14446 }
14447
14448 } // namespace selftest
14449
14450 #endif /* CHECKING_P */