Use tree_vector_builder instead of build_vector
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2017 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
27
28 /* The entry points in this file are fold, size_int_wide and size_binop.
29
30 fold takes a tree as argument and returns a simplified tree.
31
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
35
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
38
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
42
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "backend.h"
47 #include "target.h"
48 #include "rtl.h"
49 #include "tree.h"
50 #include "gimple.h"
51 #include "predict.h"
52 #include "memmodel.h"
53 #include "tm_p.h"
54 #include "tree-ssa-operands.h"
55 #include "optabs-query.h"
56 #include "cgraph.h"
57 #include "diagnostic-core.h"
58 #include "flags.h"
59 #include "alias.h"
60 #include "fold-const.h"
61 #include "fold-const-call.h"
62 #include "stor-layout.h"
63 #include "calls.h"
64 #include "tree-iterator.h"
65 #include "expr.h"
66 #include "intl.h"
67 #include "langhooks.h"
68 #include "tree-eh.h"
69 #include "gimplify.h"
70 #include "tree-dfa.h"
71 #include "builtins.h"
72 #include "generic-match.h"
73 #include "gimple-fold.h"
74 #include "params.h"
75 #include "tree-into-ssa.h"
76 #include "md5.h"
77 #include "case-cfn-macros.h"
78 #include "stringpool.h"
79 #include "tree-vrp.h"
80 #include "tree-ssanames.h"
81 #include "selftest.h"
82 #include "stringpool.h"
83 #include "attribs.h"
84 #include "tree-vector-builder.h"
85
86 /* Nonzero if we are folding constants inside an initializer; zero
87 otherwise. */
88 int folding_initializer = 0;
89
90 /* The following constants represent a bit based encoding of GCC's
91 comparison operators. This encoding simplifies transformations
92 on relational comparison operators, such as AND and OR. */
93 enum comparison_code {
94 COMPCODE_FALSE = 0,
95 COMPCODE_LT = 1,
96 COMPCODE_EQ = 2,
97 COMPCODE_LE = 3,
98 COMPCODE_GT = 4,
99 COMPCODE_LTGT = 5,
100 COMPCODE_GE = 6,
101 COMPCODE_ORD = 7,
102 COMPCODE_UNORD = 8,
103 COMPCODE_UNLT = 9,
104 COMPCODE_UNEQ = 10,
105 COMPCODE_UNLE = 11,
106 COMPCODE_UNGT = 12,
107 COMPCODE_NE = 13,
108 COMPCODE_UNGE = 14,
109 COMPCODE_TRUE = 15
110 };
111
112 static bool negate_expr_p (tree);
113 static tree negate_expr (tree);
114 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
115 static enum comparison_code comparison_to_compcode (enum tree_code);
116 static enum tree_code compcode_to_comparison (enum comparison_code);
117 static int twoval_comparison_p (tree, tree *, tree *, int *);
118 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
119 static tree optimize_bit_field_compare (location_t, enum tree_code,
120 tree, tree, tree);
121 static int simple_operand_p (const_tree);
122 static bool simple_operand_p_2 (tree);
123 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
124 static tree range_predecessor (tree);
125 static tree range_successor (tree);
126 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
127 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
128 static tree unextend (tree, int, int, tree);
129 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
130 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
131 static tree fold_binary_op_with_conditional_arg (location_t,
132 enum tree_code, tree,
133 tree, tree,
134 tree, tree, int);
135 static tree fold_negate_const (tree, tree);
136 static tree fold_not_const (const_tree, tree);
137 static tree fold_relational_const (enum tree_code, tree, tree, tree);
138 static tree fold_convert_const (enum tree_code, tree, tree);
139 static tree fold_view_convert_expr (tree, tree);
140 static tree fold_negate_expr (location_t, tree);
141
142
143 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
144 Otherwise, return LOC. */
145
146 static location_t
147 expr_location_or (tree t, location_t loc)
148 {
149 location_t tloc = EXPR_LOCATION (t);
150 return tloc == UNKNOWN_LOCATION ? loc : tloc;
151 }
152
153 /* Similar to protected_set_expr_location, but never modify x in place,
154 if location can and needs to be set, unshare it. */
155
156 static inline tree
157 protected_set_expr_location_unshare (tree x, location_t loc)
158 {
159 if (CAN_HAVE_LOCATION_P (x)
160 && EXPR_LOCATION (x) != loc
161 && !(TREE_CODE (x) == SAVE_EXPR
162 || TREE_CODE (x) == TARGET_EXPR
163 || TREE_CODE (x) == BIND_EXPR))
164 {
165 x = copy_node (x);
166 SET_EXPR_LOCATION (x, loc);
167 }
168 return x;
169 }
170 \f
171 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
172 division and returns the quotient. Otherwise returns
173 NULL_TREE. */
174
175 tree
176 div_if_zero_remainder (const_tree arg1, const_tree arg2)
177 {
178 widest_int quo;
179
180 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
181 SIGNED, &quo))
182 return wide_int_to_tree (TREE_TYPE (arg1), quo);
183
184 return NULL_TREE;
185 }
186 \f
187 /* This is nonzero if we should defer warnings about undefined
188 overflow. This facility exists because these warnings are a
189 special case. The code to estimate loop iterations does not want
190 to issue any warnings, since it works with expressions which do not
191 occur in user code. Various bits of cleanup code call fold(), but
192 only use the result if it has certain characteristics (e.g., is a
193 constant); that code only wants to issue a warning if the result is
194 used. */
195
196 static int fold_deferring_overflow_warnings;
197
198 /* If a warning about undefined overflow is deferred, this is the
199 warning. Note that this may cause us to turn two warnings into
200 one, but that is fine since it is sufficient to only give one
201 warning per expression. */
202
203 static const char* fold_deferred_overflow_warning;
204
205 /* If a warning about undefined overflow is deferred, this is the
206 level at which the warning should be emitted. */
207
208 static enum warn_strict_overflow_code fold_deferred_overflow_code;
209
210 /* Start deferring overflow warnings. We could use a stack here to
211 permit nested calls, but at present it is not necessary. */
212
213 void
214 fold_defer_overflow_warnings (void)
215 {
216 ++fold_deferring_overflow_warnings;
217 }
218
219 /* Stop deferring overflow warnings. If there is a pending warning,
220 and ISSUE is true, then issue the warning if appropriate. STMT is
221 the statement with which the warning should be associated (used for
222 location information); STMT may be NULL. CODE is the level of the
223 warning--a warn_strict_overflow_code value. This function will use
224 the smaller of CODE and the deferred code when deciding whether to
225 issue the warning. CODE may be zero to mean to always use the
226 deferred code. */
227
228 void
229 fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
230 {
231 const char *warnmsg;
232 location_t locus;
233
234 gcc_assert (fold_deferring_overflow_warnings > 0);
235 --fold_deferring_overflow_warnings;
236 if (fold_deferring_overflow_warnings > 0)
237 {
238 if (fold_deferred_overflow_warning != NULL
239 && code != 0
240 && code < (int) fold_deferred_overflow_code)
241 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
242 return;
243 }
244
245 warnmsg = fold_deferred_overflow_warning;
246 fold_deferred_overflow_warning = NULL;
247
248 if (!issue || warnmsg == NULL)
249 return;
250
251 if (gimple_no_warning_p (stmt))
252 return;
253
254 /* Use the smallest code level when deciding to issue the
255 warning. */
256 if (code == 0 || code > (int) fold_deferred_overflow_code)
257 code = fold_deferred_overflow_code;
258
259 if (!issue_strict_overflow_warning (code))
260 return;
261
262 if (stmt == NULL)
263 locus = input_location;
264 else
265 locus = gimple_location (stmt);
266 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
267 }
268
269 /* Stop deferring overflow warnings, ignoring any deferred
270 warnings. */
271
272 void
273 fold_undefer_and_ignore_overflow_warnings (void)
274 {
275 fold_undefer_overflow_warnings (false, NULL, 0);
276 }
277
278 /* Whether we are deferring overflow warnings. */
279
280 bool
281 fold_deferring_overflow_warnings_p (void)
282 {
283 return fold_deferring_overflow_warnings > 0;
284 }
285
286 /* This is called when we fold something based on the fact that signed
287 overflow is undefined. */
288
289 void
290 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
291 {
292 if (fold_deferring_overflow_warnings > 0)
293 {
294 if (fold_deferred_overflow_warning == NULL
295 || wc < fold_deferred_overflow_code)
296 {
297 fold_deferred_overflow_warning = gmsgid;
298 fold_deferred_overflow_code = wc;
299 }
300 }
301 else if (issue_strict_overflow_warning (wc))
302 warning (OPT_Wstrict_overflow, gmsgid);
303 }
304 \f
305 /* Return true if the built-in mathematical function specified by CODE
306 is odd, i.e. -f(x) == f(-x). */
307
308 bool
309 negate_mathfn_p (combined_fn fn)
310 {
311 switch (fn)
312 {
313 CASE_CFN_ASIN:
314 CASE_CFN_ASINH:
315 CASE_CFN_ATAN:
316 CASE_CFN_ATANH:
317 CASE_CFN_CASIN:
318 CASE_CFN_CASINH:
319 CASE_CFN_CATAN:
320 CASE_CFN_CATANH:
321 CASE_CFN_CBRT:
322 CASE_CFN_CPROJ:
323 CASE_CFN_CSIN:
324 CASE_CFN_CSINH:
325 CASE_CFN_CTAN:
326 CASE_CFN_CTANH:
327 CASE_CFN_ERF:
328 CASE_CFN_LLROUND:
329 CASE_CFN_LROUND:
330 CASE_CFN_ROUND:
331 CASE_CFN_SIN:
332 CASE_CFN_SINH:
333 CASE_CFN_TAN:
334 CASE_CFN_TANH:
335 CASE_CFN_TRUNC:
336 return true;
337
338 CASE_CFN_LLRINT:
339 CASE_CFN_LRINT:
340 CASE_CFN_NEARBYINT:
341 CASE_CFN_RINT:
342 return !flag_rounding_math;
343
344 default:
345 break;
346 }
347 return false;
348 }
349
350 /* Check whether we may negate an integer constant T without causing
351 overflow. */
352
353 bool
354 may_negate_without_overflow_p (const_tree t)
355 {
356 tree type;
357
358 gcc_assert (TREE_CODE (t) == INTEGER_CST);
359
360 type = TREE_TYPE (t);
361 if (TYPE_UNSIGNED (type))
362 return false;
363
364 return !wi::only_sign_bit_p (wi::to_wide (t));
365 }
366
367 /* Determine whether an expression T can be cheaply negated using
368 the function negate_expr without introducing undefined overflow. */
369
370 static bool
371 negate_expr_p (tree t)
372 {
373 tree type;
374
375 if (t == 0)
376 return false;
377
378 type = TREE_TYPE (t);
379
380 STRIP_SIGN_NOPS (t);
381 switch (TREE_CODE (t))
382 {
383 case INTEGER_CST:
384 if (INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type))
385 return true;
386
387 /* Check that -CST will not overflow type. */
388 return may_negate_without_overflow_p (t);
389 case BIT_NOT_EXPR:
390 return (INTEGRAL_TYPE_P (type)
391 && TYPE_OVERFLOW_WRAPS (type));
392
393 case FIXED_CST:
394 return true;
395
396 case NEGATE_EXPR:
397 return !TYPE_OVERFLOW_SANITIZED (type);
398
399 case REAL_CST:
400 /* We want to canonicalize to positive real constants. Pretend
401 that only negative ones can be easily negated. */
402 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
403
404 case COMPLEX_CST:
405 return negate_expr_p (TREE_REALPART (t))
406 && negate_expr_p (TREE_IMAGPART (t));
407
408 case VECTOR_CST:
409 {
410 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
411 return true;
412
413 int count = VECTOR_CST_NELTS (t), i;
414
415 for (i = 0; i < count; i++)
416 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
417 return false;
418
419 return true;
420 }
421
422 case COMPLEX_EXPR:
423 return negate_expr_p (TREE_OPERAND (t, 0))
424 && negate_expr_p (TREE_OPERAND (t, 1));
425
426 case CONJ_EXPR:
427 return negate_expr_p (TREE_OPERAND (t, 0));
428
429 case PLUS_EXPR:
430 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
431 || HONOR_SIGNED_ZEROS (element_mode (type))
432 || (ANY_INTEGRAL_TYPE_P (type)
433 && ! TYPE_OVERFLOW_WRAPS (type)))
434 return false;
435 /* -(A + B) -> (-B) - A. */
436 if (negate_expr_p (TREE_OPERAND (t, 1)))
437 return true;
438 /* -(A + B) -> (-A) - B. */
439 return negate_expr_p (TREE_OPERAND (t, 0));
440
441 case MINUS_EXPR:
442 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
443 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
444 && !HONOR_SIGNED_ZEROS (element_mode (type))
445 && (! ANY_INTEGRAL_TYPE_P (type)
446 || TYPE_OVERFLOW_WRAPS (type));
447
448 case MULT_EXPR:
449 if (TYPE_UNSIGNED (type))
450 break;
451 /* INT_MIN/n * n doesn't overflow while negating one operand it does
452 if n is a (negative) power of two. */
453 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
454 && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
455 && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
456 && (wi::popcount
457 (wi::abs (wi::to_wide (TREE_OPERAND (t, 0))))) != 1)
458 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
459 && (wi::popcount
460 (wi::abs (wi::to_wide (TREE_OPERAND (t, 1))))) != 1)))
461 break;
462
463 /* Fall through. */
464
465 case RDIV_EXPR:
466 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
467 return negate_expr_p (TREE_OPERAND (t, 1))
468 || negate_expr_p (TREE_OPERAND (t, 0));
469 break;
470
471 case TRUNC_DIV_EXPR:
472 case ROUND_DIV_EXPR:
473 case EXACT_DIV_EXPR:
474 if (TYPE_UNSIGNED (type))
475 break;
476 if (negate_expr_p (TREE_OPERAND (t, 0)))
477 return true;
478 /* In general we can't negate B in A / B, because if A is INT_MIN and
479 B is 1, we may turn this into INT_MIN / -1 which is undefined
480 and actually traps on some architectures. */
481 if (! INTEGRAL_TYPE_P (TREE_TYPE (t))
482 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
483 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
484 && ! integer_onep (TREE_OPERAND (t, 1))))
485 return negate_expr_p (TREE_OPERAND (t, 1));
486 break;
487
488 case NOP_EXPR:
489 /* Negate -((double)float) as (double)(-float). */
490 if (TREE_CODE (type) == REAL_TYPE)
491 {
492 tree tem = strip_float_extensions (t);
493 if (tem != t)
494 return negate_expr_p (tem);
495 }
496 break;
497
498 case CALL_EXPR:
499 /* Negate -f(x) as f(-x). */
500 if (negate_mathfn_p (get_call_combined_fn (t)))
501 return negate_expr_p (CALL_EXPR_ARG (t, 0));
502 break;
503
504 case RSHIFT_EXPR:
505 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
506 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
507 {
508 tree op1 = TREE_OPERAND (t, 1);
509 if (wi::to_wide (op1) == TYPE_PRECISION (type) - 1)
510 return true;
511 }
512 break;
513
514 default:
515 break;
516 }
517 return false;
518 }
519
520 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
521 simplification is possible.
522 If negate_expr_p would return true for T, NULL_TREE will never be
523 returned. */
524
525 static tree
526 fold_negate_expr_1 (location_t loc, tree t)
527 {
528 tree type = TREE_TYPE (t);
529 tree tem;
530
531 switch (TREE_CODE (t))
532 {
533 /* Convert - (~A) to A + 1. */
534 case BIT_NOT_EXPR:
535 if (INTEGRAL_TYPE_P (type))
536 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
537 build_one_cst (type));
538 break;
539
540 case INTEGER_CST:
541 tem = fold_negate_const (t, type);
542 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
543 || (ANY_INTEGRAL_TYPE_P (type)
544 && !TYPE_OVERFLOW_TRAPS (type)
545 && TYPE_OVERFLOW_WRAPS (type))
546 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
547 return tem;
548 break;
549
550 case REAL_CST:
551 tem = fold_negate_const (t, type);
552 return tem;
553
554 case FIXED_CST:
555 tem = fold_negate_const (t, type);
556 return tem;
557
558 case COMPLEX_CST:
559 {
560 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
561 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
562 if (rpart && ipart)
563 return build_complex (type, rpart, ipart);
564 }
565 break;
566
567 case VECTOR_CST:
568 {
569 int count = VECTOR_CST_NELTS (t), i;
570
571 auto_vec<tree, 32> elts (count);
572 for (i = 0; i < count; i++)
573 {
574 tree elt = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
575 if (elt == NULL_TREE)
576 return NULL_TREE;
577 elts.quick_push (elt);
578 }
579
580 return build_vector (type, elts);
581 }
582
583 case COMPLEX_EXPR:
584 if (negate_expr_p (t))
585 return fold_build2_loc (loc, COMPLEX_EXPR, type,
586 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
587 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
588 break;
589
590 case CONJ_EXPR:
591 if (negate_expr_p (t))
592 return fold_build1_loc (loc, CONJ_EXPR, type,
593 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
594 break;
595
596 case NEGATE_EXPR:
597 if (!TYPE_OVERFLOW_SANITIZED (type))
598 return TREE_OPERAND (t, 0);
599 break;
600
601 case PLUS_EXPR:
602 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
603 && !HONOR_SIGNED_ZEROS (element_mode (type)))
604 {
605 /* -(A + B) -> (-B) - A. */
606 if (negate_expr_p (TREE_OPERAND (t, 1)))
607 {
608 tem = negate_expr (TREE_OPERAND (t, 1));
609 return fold_build2_loc (loc, MINUS_EXPR, type,
610 tem, TREE_OPERAND (t, 0));
611 }
612
613 /* -(A + B) -> (-A) - B. */
614 if (negate_expr_p (TREE_OPERAND (t, 0)))
615 {
616 tem = negate_expr (TREE_OPERAND (t, 0));
617 return fold_build2_loc (loc, MINUS_EXPR, type,
618 tem, TREE_OPERAND (t, 1));
619 }
620 }
621 break;
622
623 case MINUS_EXPR:
624 /* - (A - B) -> B - A */
625 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
626 && !HONOR_SIGNED_ZEROS (element_mode (type)))
627 return fold_build2_loc (loc, MINUS_EXPR, type,
628 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
629 break;
630
631 case MULT_EXPR:
632 if (TYPE_UNSIGNED (type))
633 break;
634
635 /* Fall through. */
636
637 case RDIV_EXPR:
638 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
639 {
640 tem = TREE_OPERAND (t, 1);
641 if (negate_expr_p (tem))
642 return fold_build2_loc (loc, TREE_CODE (t), type,
643 TREE_OPERAND (t, 0), negate_expr (tem));
644 tem = TREE_OPERAND (t, 0);
645 if (negate_expr_p (tem))
646 return fold_build2_loc (loc, TREE_CODE (t), type,
647 negate_expr (tem), TREE_OPERAND (t, 1));
648 }
649 break;
650
651 case TRUNC_DIV_EXPR:
652 case ROUND_DIV_EXPR:
653 case EXACT_DIV_EXPR:
654 if (TYPE_UNSIGNED (type))
655 break;
656 if (negate_expr_p (TREE_OPERAND (t, 0)))
657 return fold_build2_loc (loc, TREE_CODE (t), type,
658 negate_expr (TREE_OPERAND (t, 0)),
659 TREE_OPERAND (t, 1));
660 /* In general we can't negate B in A / B, because if A is INT_MIN and
661 B is 1, we may turn this into INT_MIN / -1 which is undefined
662 and actually traps on some architectures. */
663 if ((! INTEGRAL_TYPE_P (TREE_TYPE (t))
664 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
665 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
666 && ! integer_onep (TREE_OPERAND (t, 1))))
667 && negate_expr_p (TREE_OPERAND (t, 1)))
668 return fold_build2_loc (loc, TREE_CODE (t), type,
669 TREE_OPERAND (t, 0),
670 negate_expr (TREE_OPERAND (t, 1)));
671 break;
672
673 case NOP_EXPR:
674 /* Convert -((double)float) into (double)(-float). */
675 if (TREE_CODE (type) == REAL_TYPE)
676 {
677 tem = strip_float_extensions (t);
678 if (tem != t && negate_expr_p (tem))
679 return fold_convert_loc (loc, type, negate_expr (tem));
680 }
681 break;
682
683 case CALL_EXPR:
684 /* Negate -f(x) as f(-x). */
685 if (negate_mathfn_p (get_call_combined_fn (t))
686 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
687 {
688 tree fndecl, arg;
689
690 fndecl = get_callee_fndecl (t);
691 arg = negate_expr (CALL_EXPR_ARG (t, 0));
692 return build_call_expr_loc (loc, fndecl, 1, arg);
693 }
694 break;
695
696 case RSHIFT_EXPR:
697 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
698 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
699 {
700 tree op1 = TREE_OPERAND (t, 1);
701 if (wi::to_wide (op1) == TYPE_PRECISION (type) - 1)
702 {
703 tree ntype = TYPE_UNSIGNED (type)
704 ? signed_type_for (type)
705 : unsigned_type_for (type);
706 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
707 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
708 return fold_convert_loc (loc, type, temp);
709 }
710 }
711 break;
712
713 default:
714 break;
715 }
716
717 return NULL_TREE;
718 }
719
720 /* A wrapper for fold_negate_expr_1. */
721
722 static tree
723 fold_negate_expr (location_t loc, tree t)
724 {
725 tree type = TREE_TYPE (t);
726 STRIP_SIGN_NOPS (t);
727 tree tem = fold_negate_expr_1 (loc, t);
728 if (tem == NULL_TREE)
729 return NULL_TREE;
730 return fold_convert_loc (loc, type, tem);
731 }
732
733 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
734 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
735 return NULL_TREE. */
736
737 static tree
738 negate_expr (tree t)
739 {
740 tree type, tem;
741 location_t loc;
742
743 if (t == NULL_TREE)
744 return NULL_TREE;
745
746 loc = EXPR_LOCATION (t);
747 type = TREE_TYPE (t);
748 STRIP_SIGN_NOPS (t);
749
750 tem = fold_negate_expr (loc, t);
751 if (!tem)
752 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
753 return fold_convert_loc (loc, type, tem);
754 }
755 \f
756 /* Split a tree IN into a constant, literal and variable parts that could be
757 combined with CODE to make IN. "constant" means an expression with
758 TREE_CONSTANT but that isn't an actual constant. CODE must be a
759 commutative arithmetic operation. Store the constant part into *CONP,
760 the literal in *LITP and return the variable part. If a part isn't
761 present, set it to null. If the tree does not decompose in this way,
762 return the entire tree as the variable part and the other parts as null.
763
764 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
765 case, we negate an operand that was subtracted. Except if it is a
766 literal for which we use *MINUS_LITP instead.
767
768 If NEGATE_P is true, we are negating all of IN, again except a literal
769 for which we use *MINUS_LITP instead. If a variable part is of pointer
770 type, it is negated after converting to TYPE. This prevents us from
771 generating illegal MINUS pointer expression. LOC is the location of
772 the converted variable part.
773
774 If IN is itself a literal or constant, return it as appropriate.
775
776 Note that we do not guarantee that any of the three values will be the
777 same type as IN, but they will have the same signedness and mode. */
778
779 static tree
780 split_tree (tree in, tree type, enum tree_code code,
781 tree *minus_varp, tree *conp, tree *minus_conp,
782 tree *litp, tree *minus_litp, int negate_p)
783 {
784 tree var = 0;
785 *minus_varp = 0;
786 *conp = 0;
787 *minus_conp = 0;
788 *litp = 0;
789 *minus_litp = 0;
790
791 /* Strip any conversions that don't change the machine mode or signedness. */
792 STRIP_SIGN_NOPS (in);
793
794 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
795 || TREE_CODE (in) == FIXED_CST)
796 *litp = in;
797 else if (TREE_CODE (in) == code
798 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
799 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
800 /* We can associate addition and subtraction together (even
801 though the C standard doesn't say so) for integers because
802 the value is not affected. For reals, the value might be
803 affected, so we can't. */
804 && ((code == PLUS_EXPR && TREE_CODE (in) == POINTER_PLUS_EXPR)
805 || (code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
806 || (code == MINUS_EXPR
807 && (TREE_CODE (in) == PLUS_EXPR
808 || TREE_CODE (in) == POINTER_PLUS_EXPR)))))
809 {
810 tree op0 = TREE_OPERAND (in, 0);
811 tree op1 = TREE_OPERAND (in, 1);
812 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
813 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
814
815 /* First see if either of the operands is a literal, then a constant. */
816 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
817 || TREE_CODE (op0) == FIXED_CST)
818 *litp = op0, op0 = 0;
819 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
820 || TREE_CODE (op1) == FIXED_CST)
821 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
822
823 if (op0 != 0 && TREE_CONSTANT (op0))
824 *conp = op0, op0 = 0;
825 else if (op1 != 0 && TREE_CONSTANT (op1))
826 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
827
828 /* If we haven't dealt with either operand, this is not a case we can
829 decompose. Otherwise, VAR is either of the ones remaining, if any. */
830 if (op0 != 0 && op1 != 0)
831 var = in;
832 else if (op0 != 0)
833 var = op0;
834 else
835 var = op1, neg_var_p = neg1_p;
836
837 /* Now do any needed negations. */
838 if (neg_litp_p)
839 *minus_litp = *litp, *litp = 0;
840 if (neg_conp_p && *conp)
841 *minus_conp = *conp, *conp = 0;
842 if (neg_var_p && var)
843 *minus_varp = var, var = 0;
844 }
845 else if (TREE_CONSTANT (in))
846 *conp = in;
847 else if (TREE_CODE (in) == BIT_NOT_EXPR
848 && code == PLUS_EXPR)
849 {
850 /* -1 - X is folded to ~X, undo that here. Do _not_ do this
851 when IN is constant. */
852 *litp = build_minus_one_cst (type);
853 *minus_varp = TREE_OPERAND (in, 0);
854 }
855 else
856 var = in;
857
858 if (negate_p)
859 {
860 if (*litp)
861 *minus_litp = *litp, *litp = 0;
862 else if (*minus_litp)
863 *litp = *minus_litp, *minus_litp = 0;
864 if (*conp)
865 *minus_conp = *conp, *conp = 0;
866 else if (*minus_conp)
867 *conp = *minus_conp, *minus_conp = 0;
868 if (var)
869 *minus_varp = var, var = 0;
870 else if (*minus_varp)
871 var = *minus_varp, *minus_varp = 0;
872 }
873
874 if (*litp
875 && TREE_OVERFLOW_P (*litp))
876 *litp = drop_tree_overflow (*litp);
877 if (*minus_litp
878 && TREE_OVERFLOW_P (*minus_litp))
879 *minus_litp = drop_tree_overflow (*minus_litp);
880
881 return var;
882 }
883
884 /* Re-associate trees split by the above function. T1 and T2 are
885 either expressions to associate or null. Return the new
886 expression, if any. LOC is the location of the new expression. If
887 we build an operation, do it in TYPE and with CODE. */
888
889 static tree
890 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
891 {
892 if (t1 == 0)
893 {
894 gcc_assert (t2 == 0 || code != MINUS_EXPR);
895 return t2;
896 }
897 else if (t2 == 0)
898 return t1;
899
900 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
901 try to fold this since we will have infinite recursion. But do
902 deal with any NEGATE_EXPRs. */
903 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
904 || TREE_CODE (t1) == PLUS_EXPR || TREE_CODE (t2) == PLUS_EXPR
905 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
906 {
907 if (code == PLUS_EXPR)
908 {
909 if (TREE_CODE (t1) == NEGATE_EXPR)
910 return build2_loc (loc, MINUS_EXPR, type,
911 fold_convert_loc (loc, type, t2),
912 fold_convert_loc (loc, type,
913 TREE_OPERAND (t1, 0)));
914 else if (TREE_CODE (t2) == NEGATE_EXPR)
915 return build2_loc (loc, MINUS_EXPR, type,
916 fold_convert_loc (loc, type, t1),
917 fold_convert_loc (loc, type,
918 TREE_OPERAND (t2, 0)));
919 else if (integer_zerop (t2))
920 return fold_convert_loc (loc, type, t1);
921 }
922 else if (code == MINUS_EXPR)
923 {
924 if (integer_zerop (t2))
925 return fold_convert_loc (loc, type, t1);
926 }
927
928 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
929 fold_convert_loc (loc, type, t2));
930 }
931
932 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
933 fold_convert_loc (loc, type, t2));
934 }
935 \f
936 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
937 for use in int_const_binop, size_binop and size_diffop. */
938
939 static bool
940 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
941 {
942 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
943 return false;
944 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
945 return false;
946
947 switch (code)
948 {
949 case LSHIFT_EXPR:
950 case RSHIFT_EXPR:
951 case LROTATE_EXPR:
952 case RROTATE_EXPR:
953 return true;
954
955 default:
956 break;
957 }
958
959 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
960 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
961 && TYPE_MODE (type1) == TYPE_MODE (type2);
962 }
963
964
965 /* Combine two integer constants PARG1 and PARG2 under operation CODE
966 to produce a new constant. Return NULL_TREE if we don't know how
967 to evaluate CODE at compile-time. */
968
969 static tree
970 int_const_binop_1 (enum tree_code code, const_tree parg1, const_tree parg2,
971 int overflowable)
972 {
973 wide_int res;
974 tree t;
975 tree type = TREE_TYPE (parg1);
976 signop sign = TYPE_SIGN (type);
977 bool overflow = false;
978
979 wi::tree_to_wide_ref arg1 = wi::to_wide (parg1);
980 wide_int arg2 = wi::to_wide (parg2, TYPE_PRECISION (type));
981
982 switch (code)
983 {
984 case BIT_IOR_EXPR:
985 res = wi::bit_or (arg1, arg2);
986 break;
987
988 case BIT_XOR_EXPR:
989 res = wi::bit_xor (arg1, arg2);
990 break;
991
992 case BIT_AND_EXPR:
993 res = wi::bit_and (arg1, arg2);
994 break;
995
996 case RSHIFT_EXPR:
997 case LSHIFT_EXPR:
998 if (wi::neg_p (arg2))
999 {
1000 arg2 = -arg2;
1001 if (code == RSHIFT_EXPR)
1002 code = LSHIFT_EXPR;
1003 else
1004 code = RSHIFT_EXPR;
1005 }
1006
1007 if (code == RSHIFT_EXPR)
1008 /* It's unclear from the C standard whether shifts can overflow.
1009 The following code ignores overflow; perhaps a C standard
1010 interpretation ruling is needed. */
1011 res = wi::rshift (arg1, arg2, sign);
1012 else
1013 res = wi::lshift (arg1, arg2);
1014 break;
1015
1016 case RROTATE_EXPR:
1017 case LROTATE_EXPR:
1018 if (wi::neg_p (arg2))
1019 {
1020 arg2 = -arg2;
1021 if (code == RROTATE_EXPR)
1022 code = LROTATE_EXPR;
1023 else
1024 code = RROTATE_EXPR;
1025 }
1026
1027 if (code == RROTATE_EXPR)
1028 res = wi::rrotate (arg1, arg2);
1029 else
1030 res = wi::lrotate (arg1, arg2);
1031 break;
1032
1033 case PLUS_EXPR:
1034 res = wi::add (arg1, arg2, sign, &overflow);
1035 break;
1036
1037 case MINUS_EXPR:
1038 res = wi::sub (arg1, arg2, sign, &overflow);
1039 break;
1040
1041 case MULT_EXPR:
1042 res = wi::mul (arg1, arg2, sign, &overflow);
1043 break;
1044
1045 case MULT_HIGHPART_EXPR:
1046 res = wi::mul_high (arg1, arg2, sign);
1047 break;
1048
1049 case TRUNC_DIV_EXPR:
1050 case EXACT_DIV_EXPR:
1051 if (arg2 == 0)
1052 return NULL_TREE;
1053 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1054 break;
1055
1056 case FLOOR_DIV_EXPR:
1057 if (arg2 == 0)
1058 return NULL_TREE;
1059 res = wi::div_floor (arg1, arg2, sign, &overflow);
1060 break;
1061
1062 case CEIL_DIV_EXPR:
1063 if (arg2 == 0)
1064 return NULL_TREE;
1065 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1066 break;
1067
1068 case ROUND_DIV_EXPR:
1069 if (arg2 == 0)
1070 return NULL_TREE;
1071 res = wi::div_round (arg1, arg2, sign, &overflow);
1072 break;
1073
1074 case TRUNC_MOD_EXPR:
1075 if (arg2 == 0)
1076 return NULL_TREE;
1077 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1078 break;
1079
1080 case FLOOR_MOD_EXPR:
1081 if (arg2 == 0)
1082 return NULL_TREE;
1083 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1084 break;
1085
1086 case CEIL_MOD_EXPR:
1087 if (arg2 == 0)
1088 return NULL_TREE;
1089 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1090 break;
1091
1092 case ROUND_MOD_EXPR:
1093 if (arg2 == 0)
1094 return NULL_TREE;
1095 res = wi::mod_round (arg1, arg2, sign, &overflow);
1096 break;
1097
1098 case MIN_EXPR:
1099 res = wi::min (arg1, arg2, sign);
1100 break;
1101
1102 case MAX_EXPR:
1103 res = wi::max (arg1, arg2, sign);
1104 break;
1105
1106 default:
1107 return NULL_TREE;
1108 }
1109
1110 t = force_fit_type (type, res, overflowable,
1111 (((sign == SIGNED || overflowable == -1)
1112 && overflow)
1113 | TREE_OVERFLOW (parg1) | TREE_OVERFLOW (parg2)));
1114
1115 return t;
1116 }
1117
1118 tree
1119 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1120 {
1121 return int_const_binop_1 (code, arg1, arg2, 1);
1122 }
1123
1124 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1125 constant. We assume ARG1 and ARG2 have the same data type, or at least
1126 are the same kind of constant and the same machine mode. Return zero if
1127 combining the constants is not allowed in the current operating mode. */
1128
1129 static tree
1130 const_binop (enum tree_code code, tree arg1, tree arg2)
1131 {
1132 /* Sanity check for the recursive cases. */
1133 if (!arg1 || !arg2)
1134 return NULL_TREE;
1135
1136 STRIP_NOPS (arg1);
1137 STRIP_NOPS (arg2);
1138
1139 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1140 {
1141 if (code == POINTER_PLUS_EXPR)
1142 return int_const_binop (PLUS_EXPR,
1143 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1144
1145 return int_const_binop (code, arg1, arg2);
1146 }
1147
1148 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1149 {
1150 machine_mode mode;
1151 REAL_VALUE_TYPE d1;
1152 REAL_VALUE_TYPE d2;
1153 REAL_VALUE_TYPE value;
1154 REAL_VALUE_TYPE result;
1155 bool inexact;
1156 tree t, type;
1157
1158 /* The following codes are handled by real_arithmetic. */
1159 switch (code)
1160 {
1161 case PLUS_EXPR:
1162 case MINUS_EXPR:
1163 case MULT_EXPR:
1164 case RDIV_EXPR:
1165 case MIN_EXPR:
1166 case MAX_EXPR:
1167 break;
1168
1169 default:
1170 return NULL_TREE;
1171 }
1172
1173 d1 = TREE_REAL_CST (arg1);
1174 d2 = TREE_REAL_CST (arg2);
1175
1176 type = TREE_TYPE (arg1);
1177 mode = TYPE_MODE (type);
1178
1179 /* Don't perform operation if we honor signaling NaNs and
1180 either operand is a signaling NaN. */
1181 if (HONOR_SNANS (mode)
1182 && (REAL_VALUE_ISSIGNALING_NAN (d1)
1183 || REAL_VALUE_ISSIGNALING_NAN (d2)))
1184 return NULL_TREE;
1185
1186 /* Don't perform operation if it would raise a division
1187 by zero exception. */
1188 if (code == RDIV_EXPR
1189 && real_equal (&d2, &dconst0)
1190 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1191 return NULL_TREE;
1192
1193 /* If either operand is a NaN, just return it. Otherwise, set up
1194 for floating-point trap; we return an overflow. */
1195 if (REAL_VALUE_ISNAN (d1))
1196 {
1197 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1198 is off. */
1199 d1.signalling = 0;
1200 t = build_real (type, d1);
1201 return t;
1202 }
1203 else if (REAL_VALUE_ISNAN (d2))
1204 {
1205 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1206 is off. */
1207 d2.signalling = 0;
1208 t = build_real (type, d2);
1209 return t;
1210 }
1211
1212 inexact = real_arithmetic (&value, code, &d1, &d2);
1213 real_convert (&result, mode, &value);
1214
1215 /* Don't constant fold this floating point operation if
1216 the result has overflowed and flag_trapping_math. */
1217 if (flag_trapping_math
1218 && MODE_HAS_INFINITIES (mode)
1219 && REAL_VALUE_ISINF (result)
1220 && !REAL_VALUE_ISINF (d1)
1221 && !REAL_VALUE_ISINF (d2))
1222 return NULL_TREE;
1223
1224 /* Don't constant fold this floating point operation if the
1225 result may dependent upon the run-time rounding mode and
1226 flag_rounding_math is set, or if GCC's software emulation
1227 is unable to accurately represent the result. */
1228 if ((flag_rounding_math
1229 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1230 && (inexact || !real_identical (&result, &value)))
1231 return NULL_TREE;
1232
1233 t = build_real (type, result);
1234
1235 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1236 return t;
1237 }
1238
1239 if (TREE_CODE (arg1) == FIXED_CST)
1240 {
1241 FIXED_VALUE_TYPE f1;
1242 FIXED_VALUE_TYPE f2;
1243 FIXED_VALUE_TYPE result;
1244 tree t, type;
1245 int sat_p;
1246 bool overflow_p;
1247
1248 /* The following codes are handled by fixed_arithmetic. */
1249 switch (code)
1250 {
1251 case PLUS_EXPR:
1252 case MINUS_EXPR:
1253 case MULT_EXPR:
1254 case TRUNC_DIV_EXPR:
1255 if (TREE_CODE (arg2) != FIXED_CST)
1256 return NULL_TREE;
1257 f2 = TREE_FIXED_CST (arg2);
1258 break;
1259
1260 case LSHIFT_EXPR:
1261 case RSHIFT_EXPR:
1262 {
1263 if (TREE_CODE (arg2) != INTEGER_CST)
1264 return NULL_TREE;
1265 wi::tree_to_wide_ref w2 = wi::to_wide (arg2);
1266 f2.data.high = w2.elt (1);
1267 f2.data.low = w2.ulow ();
1268 f2.mode = SImode;
1269 }
1270 break;
1271
1272 default:
1273 return NULL_TREE;
1274 }
1275
1276 f1 = TREE_FIXED_CST (arg1);
1277 type = TREE_TYPE (arg1);
1278 sat_p = TYPE_SATURATING (type);
1279 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1280 t = build_fixed (type, result);
1281 /* Propagate overflow flags. */
1282 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1283 TREE_OVERFLOW (t) = 1;
1284 return t;
1285 }
1286
1287 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1288 {
1289 tree type = TREE_TYPE (arg1);
1290 tree r1 = TREE_REALPART (arg1);
1291 tree i1 = TREE_IMAGPART (arg1);
1292 tree r2 = TREE_REALPART (arg2);
1293 tree i2 = TREE_IMAGPART (arg2);
1294 tree real, imag;
1295
1296 switch (code)
1297 {
1298 case PLUS_EXPR:
1299 case MINUS_EXPR:
1300 real = const_binop (code, r1, r2);
1301 imag = const_binop (code, i1, i2);
1302 break;
1303
1304 case MULT_EXPR:
1305 if (COMPLEX_FLOAT_TYPE_P (type))
1306 return do_mpc_arg2 (arg1, arg2, type,
1307 /* do_nonfinite= */ folding_initializer,
1308 mpc_mul);
1309
1310 real = const_binop (MINUS_EXPR,
1311 const_binop (MULT_EXPR, r1, r2),
1312 const_binop (MULT_EXPR, i1, i2));
1313 imag = const_binop (PLUS_EXPR,
1314 const_binop (MULT_EXPR, r1, i2),
1315 const_binop (MULT_EXPR, i1, r2));
1316 break;
1317
1318 case RDIV_EXPR:
1319 if (COMPLEX_FLOAT_TYPE_P (type))
1320 return do_mpc_arg2 (arg1, arg2, type,
1321 /* do_nonfinite= */ folding_initializer,
1322 mpc_div);
1323 /* Fallthru. */
1324 case TRUNC_DIV_EXPR:
1325 case CEIL_DIV_EXPR:
1326 case FLOOR_DIV_EXPR:
1327 case ROUND_DIV_EXPR:
1328 if (flag_complex_method == 0)
1329 {
1330 /* Keep this algorithm in sync with
1331 tree-complex.c:expand_complex_div_straight().
1332
1333 Expand complex division to scalars, straightforward algorithm.
1334 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1335 t = br*br + bi*bi
1336 */
1337 tree magsquared
1338 = const_binop (PLUS_EXPR,
1339 const_binop (MULT_EXPR, r2, r2),
1340 const_binop (MULT_EXPR, i2, i2));
1341 tree t1
1342 = const_binop (PLUS_EXPR,
1343 const_binop (MULT_EXPR, r1, r2),
1344 const_binop (MULT_EXPR, i1, i2));
1345 tree t2
1346 = const_binop (MINUS_EXPR,
1347 const_binop (MULT_EXPR, i1, r2),
1348 const_binop (MULT_EXPR, r1, i2));
1349
1350 real = const_binop (code, t1, magsquared);
1351 imag = const_binop (code, t2, magsquared);
1352 }
1353 else
1354 {
1355 /* Keep this algorithm in sync with
1356 tree-complex.c:expand_complex_div_wide().
1357
1358 Expand complex division to scalars, modified algorithm to minimize
1359 overflow with wide input ranges. */
1360 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1361 fold_abs_const (r2, TREE_TYPE (type)),
1362 fold_abs_const (i2, TREE_TYPE (type)));
1363
1364 if (integer_nonzerop (compare))
1365 {
1366 /* In the TRUE branch, we compute
1367 ratio = br/bi;
1368 div = (br * ratio) + bi;
1369 tr = (ar * ratio) + ai;
1370 ti = (ai * ratio) - ar;
1371 tr = tr / div;
1372 ti = ti / div; */
1373 tree ratio = const_binop (code, r2, i2);
1374 tree div = const_binop (PLUS_EXPR, i2,
1375 const_binop (MULT_EXPR, r2, ratio));
1376 real = const_binop (MULT_EXPR, r1, ratio);
1377 real = const_binop (PLUS_EXPR, real, i1);
1378 real = const_binop (code, real, div);
1379
1380 imag = const_binop (MULT_EXPR, i1, ratio);
1381 imag = const_binop (MINUS_EXPR, imag, r1);
1382 imag = const_binop (code, imag, div);
1383 }
1384 else
1385 {
1386 /* In the FALSE branch, we compute
1387 ratio = d/c;
1388 divisor = (d * ratio) + c;
1389 tr = (b * ratio) + a;
1390 ti = b - (a * ratio);
1391 tr = tr / div;
1392 ti = ti / div; */
1393 tree ratio = const_binop (code, i2, r2);
1394 tree div = const_binop (PLUS_EXPR, r2,
1395 const_binop (MULT_EXPR, i2, ratio));
1396
1397 real = const_binop (MULT_EXPR, i1, ratio);
1398 real = const_binop (PLUS_EXPR, real, r1);
1399 real = const_binop (code, real, div);
1400
1401 imag = const_binop (MULT_EXPR, r1, ratio);
1402 imag = const_binop (MINUS_EXPR, i1, imag);
1403 imag = const_binop (code, imag, div);
1404 }
1405 }
1406 break;
1407
1408 default:
1409 return NULL_TREE;
1410 }
1411
1412 if (real && imag)
1413 return build_complex (type, real, imag);
1414 }
1415
1416 if (TREE_CODE (arg1) == VECTOR_CST
1417 && TREE_CODE (arg2) == VECTOR_CST)
1418 {
1419 tree type = TREE_TYPE (arg1);
1420 int count = VECTOR_CST_NELTS (arg1), i;
1421
1422 auto_vec<tree, 32> elts (count);
1423 for (i = 0; i < count; i++)
1424 {
1425 tree elem1 = VECTOR_CST_ELT (arg1, i);
1426 tree elem2 = VECTOR_CST_ELT (arg2, i);
1427
1428 tree elt = const_binop (code, elem1, elem2);
1429
1430 /* It is possible that const_binop cannot handle the given
1431 code and return NULL_TREE */
1432 if (elt == NULL_TREE)
1433 return NULL_TREE;
1434 elts.quick_push (elt);
1435 }
1436
1437 return build_vector (type, elts);
1438 }
1439
1440 /* Shifts allow a scalar offset for a vector. */
1441 if (TREE_CODE (arg1) == VECTOR_CST
1442 && TREE_CODE (arg2) == INTEGER_CST)
1443 {
1444 tree type = TREE_TYPE (arg1);
1445 int count = VECTOR_CST_NELTS (arg1), i;
1446
1447 auto_vec<tree, 32> elts (count);
1448 for (i = 0; i < count; i++)
1449 {
1450 tree elem1 = VECTOR_CST_ELT (arg1, i);
1451
1452 tree elt = const_binop (code, elem1, arg2);
1453
1454 /* It is possible that const_binop cannot handle the given
1455 code and return NULL_TREE. */
1456 if (elt == NULL_TREE)
1457 return NULL_TREE;
1458 elts.quick_push (elt);
1459 }
1460
1461 return build_vector (type, elts);
1462 }
1463 return NULL_TREE;
1464 }
1465
1466 /* Overload that adds a TYPE parameter to be able to dispatch
1467 to fold_relational_const. */
1468
1469 tree
1470 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1471 {
1472 if (TREE_CODE_CLASS (code) == tcc_comparison)
1473 return fold_relational_const (code, type, arg1, arg2);
1474
1475 /* ??? Until we make the const_binop worker take the type of the
1476 result as argument put those cases that need it here. */
1477 switch (code)
1478 {
1479 case COMPLEX_EXPR:
1480 if ((TREE_CODE (arg1) == REAL_CST
1481 && TREE_CODE (arg2) == REAL_CST)
1482 || (TREE_CODE (arg1) == INTEGER_CST
1483 && TREE_CODE (arg2) == INTEGER_CST))
1484 return build_complex (type, arg1, arg2);
1485 return NULL_TREE;
1486
1487 case POINTER_DIFF_EXPR:
1488 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1489 {
1490 offset_int res = wi::sub (wi::to_offset (arg1),
1491 wi::to_offset (arg2));
1492 return force_fit_type (type, res, 1,
1493 TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1494 }
1495 return NULL_TREE;
1496
1497 case VEC_PACK_TRUNC_EXPR:
1498 case VEC_PACK_FIX_TRUNC_EXPR:
1499 {
1500 unsigned int out_nelts, in_nelts, i;
1501
1502 if (TREE_CODE (arg1) != VECTOR_CST
1503 || TREE_CODE (arg2) != VECTOR_CST)
1504 return NULL_TREE;
1505
1506 in_nelts = VECTOR_CST_NELTS (arg1);
1507 out_nelts = in_nelts * 2;
1508 gcc_assert (in_nelts == VECTOR_CST_NELTS (arg2)
1509 && out_nelts == TYPE_VECTOR_SUBPARTS (type));
1510
1511 tree_vector_builder elts (type, out_nelts, 1);
1512 for (i = 0; i < out_nelts; i++)
1513 {
1514 tree elt = (i < in_nelts
1515 ? VECTOR_CST_ELT (arg1, i)
1516 : VECTOR_CST_ELT (arg2, i - in_nelts));
1517 elt = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1518 ? NOP_EXPR : FIX_TRUNC_EXPR,
1519 TREE_TYPE (type), elt);
1520 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1521 return NULL_TREE;
1522 elts.quick_push (elt);
1523 }
1524
1525 return elts.build ();
1526 }
1527
1528 case VEC_WIDEN_MULT_LO_EXPR:
1529 case VEC_WIDEN_MULT_HI_EXPR:
1530 case VEC_WIDEN_MULT_EVEN_EXPR:
1531 case VEC_WIDEN_MULT_ODD_EXPR:
1532 {
1533 unsigned int out_nelts, in_nelts, out, ofs, scale;
1534
1535 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1536 return NULL_TREE;
1537
1538 in_nelts = VECTOR_CST_NELTS (arg1);
1539 out_nelts = in_nelts / 2;
1540 gcc_assert (in_nelts == VECTOR_CST_NELTS (arg2)
1541 && out_nelts == TYPE_VECTOR_SUBPARTS (type));
1542
1543 if (code == VEC_WIDEN_MULT_LO_EXPR)
1544 scale = 0, ofs = BYTES_BIG_ENDIAN ? out_nelts : 0;
1545 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1546 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : out_nelts;
1547 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1548 scale = 1, ofs = 0;
1549 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1550 scale = 1, ofs = 1;
1551
1552 tree_vector_builder elts (type, out_nelts, 1);
1553 for (out = 0; out < out_nelts; out++)
1554 {
1555 unsigned int in = (out << scale) + ofs;
1556 tree t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1557 VECTOR_CST_ELT (arg1, in));
1558 tree t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1559 VECTOR_CST_ELT (arg2, in));
1560
1561 if (t1 == NULL_TREE || t2 == NULL_TREE)
1562 return NULL_TREE;
1563 tree elt = const_binop (MULT_EXPR, t1, t2);
1564 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1565 return NULL_TREE;
1566 elts.quick_push (elt);
1567 }
1568
1569 return elts.build ();
1570 }
1571
1572 default:;
1573 }
1574
1575 if (TREE_CODE_CLASS (code) != tcc_binary)
1576 return NULL_TREE;
1577
1578 /* Make sure type and arg0 have the same saturating flag. */
1579 gcc_checking_assert (TYPE_SATURATING (type)
1580 == TYPE_SATURATING (TREE_TYPE (arg1)));
1581
1582 return const_binop (code, arg1, arg2);
1583 }
1584
1585 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1586 Return zero if computing the constants is not possible. */
1587
1588 tree
1589 const_unop (enum tree_code code, tree type, tree arg0)
1590 {
1591 /* Don't perform the operation, other than NEGATE and ABS, if
1592 flag_signaling_nans is on and the operand is a signaling NaN. */
1593 if (TREE_CODE (arg0) == REAL_CST
1594 && HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
1595 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0))
1596 && code != NEGATE_EXPR
1597 && code != ABS_EXPR)
1598 return NULL_TREE;
1599
1600 switch (code)
1601 {
1602 CASE_CONVERT:
1603 case FLOAT_EXPR:
1604 case FIX_TRUNC_EXPR:
1605 case FIXED_CONVERT_EXPR:
1606 return fold_convert_const (code, type, arg0);
1607
1608 case ADDR_SPACE_CONVERT_EXPR:
1609 /* If the source address is 0, and the source address space
1610 cannot have a valid object at 0, fold to dest type null. */
1611 if (integer_zerop (arg0)
1612 && !(targetm.addr_space.zero_address_valid
1613 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))))))
1614 return fold_convert_const (code, type, arg0);
1615 break;
1616
1617 case VIEW_CONVERT_EXPR:
1618 return fold_view_convert_expr (type, arg0);
1619
1620 case NEGATE_EXPR:
1621 {
1622 /* Can't call fold_negate_const directly here as that doesn't
1623 handle all cases and we might not be able to negate some
1624 constants. */
1625 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1626 if (tem && CONSTANT_CLASS_P (tem))
1627 return tem;
1628 break;
1629 }
1630
1631 case ABS_EXPR:
1632 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1633 return fold_abs_const (arg0, type);
1634 break;
1635
1636 case CONJ_EXPR:
1637 if (TREE_CODE (arg0) == COMPLEX_CST)
1638 {
1639 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1640 TREE_TYPE (type));
1641 return build_complex (type, TREE_REALPART (arg0), ipart);
1642 }
1643 break;
1644
1645 case BIT_NOT_EXPR:
1646 if (TREE_CODE (arg0) == INTEGER_CST)
1647 return fold_not_const (arg0, type);
1648 /* Perform BIT_NOT_EXPR on each element individually. */
1649 else if (TREE_CODE (arg0) == VECTOR_CST)
1650 {
1651 tree elem;
1652 unsigned count = VECTOR_CST_NELTS (arg0), i;
1653
1654 auto_vec<tree, 32> elements (count);
1655 for (i = 0; i < count; i++)
1656 {
1657 elem = VECTOR_CST_ELT (arg0, i);
1658 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1659 if (elem == NULL_TREE)
1660 break;
1661 elements.quick_push (elem);
1662 }
1663 if (i == count)
1664 return build_vector (type, elements);
1665 }
1666 break;
1667
1668 case TRUTH_NOT_EXPR:
1669 if (TREE_CODE (arg0) == INTEGER_CST)
1670 return constant_boolean_node (integer_zerop (arg0), type);
1671 break;
1672
1673 case REALPART_EXPR:
1674 if (TREE_CODE (arg0) == COMPLEX_CST)
1675 return fold_convert (type, TREE_REALPART (arg0));
1676 break;
1677
1678 case IMAGPART_EXPR:
1679 if (TREE_CODE (arg0) == COMPLEX_CST)
1680 return fold_convert (type, TREE_IMAGPART (arg0));
1681 break;
1682
1683 case VEC_UNPACK_LO_EXPR:
1684 case VEC_UNPACK_HI_EXPR:
1685 case VEC_UNPACK_FLOAT_LO_EXPR:
1686 case VEC_UNPACK_FLOAT_HI_EXPR:
1687 {
1688 unsigned int out_nelts, in_nelts, i;
1689 enum tree_code subcode;
1690
1691 if (TREE_CODE (arg0) != VECTOR_CST)
1692 return NULL_TREE;
1693
1694 in_nelts = VECTOR_CST_NELTS (arg0);
1695 out_nelts = in_nelts / 2;
1696 gcc_assert (out_nelts == TYPE_VECTOR_SUBPARTS (type));
1697
1698 unsigned int offset = 0;
1699 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1700 || code == VEC_UNPACK_FLOAT_LO_EXPR))
1701 offset = out_nelts;
1702
1703 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1704 subcode = NOP_EXPR;
1705 else
1706 subcode = FLOAT_EXPR;
1707
1708 tree_vector_builder elts (type, out_nelts, 1);
1709 for (i = 0; i < out_nelts; i++)
1710 {
1711 tree elt = fold_convert_const (subcode, TREE_TYPE (type),
1712 VECTOR_CST_ELT (arg0, i + offset));
1713 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1714 return NULL_TREE;
1715 elts.quick_push (elt);
1716 }
1717
1718 return elts.build ();
1719 }
1720
1721 default:
1722 break;
1723 }
1724
1725 return NULL_TREE;
1726 }
1727
1728 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1729 indicates which particular sizetype to create. */
1730
1731 tree
1732 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1733 {
1734 return build_int_cst (sizetype_tab[(int) kind], number);
1735 }
1736 \f
1737 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1738 is a tree code. The type of the result is taken from the operands.
1739 Both must be equivalent integer types, ala int_binop_types_match_p.
1740 If the operands are constant, so is the result. */
1741
1742 tree
1743 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1744 {
1745 tree type = TREE_TYPE (arg0);
1746
1747 if (arg0 == error_mark_node || arg1 == error_mark_node)
1748 return error_mark_node;
1749
1750 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1751 TREE_TYPE (arg1)));
1752
1753 /* Handle the special case of two integer constants faster. */
1754 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1755 {
1756 /* And some specific cases even faster than that. */
1757 if (code == PLUS_EXPR)
1758 {
1759 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1760 return arg1;
1761 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1762 return arg0;
1763 }
1764 else if (code == MINUS_EXPR)
1765 {
1766 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1767 return arg0;
1768 }
1769 else if (code == MULT_EXPR)
1770 {
1771 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1772 return arg1;
1773 }
1774
1775 /* Handle general case of two integer constants. For sizetype
1776 constant calculations we always want to know about overflow,
1777 even in the unsigned case. */
1778 return int_const_binop_1 (code, arg0, arg1, -1);
1779 }
1780
1781 return fold_build2_loc (loc, code, type, arg0, arg1);
1782 }
1783
1784 /* Given two values, either both of sizetype or both of bitsizetype,
1785 compute the difference between the two values. Return the value
1786 in signed type corresponding to the type of the operands. */
1787
1788 tree
1789 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1790 {
1791 tree type = TREE_TYPE (arg0);
1792 tree ctype;
1793
1794 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1795 TREE_TYPE (arg1)));
1796
1797 /* If the type is already signed, just do the simple thing. */
1798 if (!TYPE_UNSIGNED (type))
1799 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1800
1801 if (type == sizetype)
1802 ctype = ssizetype;
1803 else if (type == bitsizetype)
1804 ctype = sbitsizetype;
1805 else
1806 ctype = signed_type_for (type);
1807
1808 /* If either operand is not a constant, do the conversions to the signed
1809 type and subtract. The hardware will do the right thing with any
1810 overflow in the subtraction. */
1811 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1812 return size_binop_loc (loc, MINUS_EXPR,
1813 fold_convert_loc (loc, ctype, arg0),
1814 fold_convert_loc (loc, ctype, arg1));
1815
1816 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1817 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1818 overflow) and negate (which can't either). Special-case a result
1819 of zero while we're here. */
1820 if (tree_int_cst_equal (arg0, arg1))
1821 return build_int_cst (ctype, 0);
1822 else if (tree_int_cst_lt (arg1, arg0))
1823 return fold_convert_loc (loc, ctype,
1824 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1825 else
1826 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1827 fold_convert_loc (loc, ctype,
1828 size_binop_loc (loc,
1829 MINUS_EXPR,
1830 arg1, arg0)));
1831 }
1832 \f
1833 /* A subroutine of fold_convert_const handling conversions of an
1834 INTEGER_CST to another integer type. */
1835
1836 static tree
1837 fold_convert_const_int_from_int (tree type, const_tree arg1)
1838 {
1839 /* Given an integer constant, make new constant with new type,
1840 appropriately sign-extended or truncated. Use widest_int
1841 so that any extension is done according ARG1's type. */
1842 return force_fit_type (type, wi::to_widest (arg1),
1843 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1844 TREE_OVERFLOW (arg1));
1845 }
1846
1847 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1848 to an integer type. */
1849
1850 static tree
1851 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1852 {
1853 bool overflow = false;
1854 tree t;
1855
1856 /* The following code implements the floating point to integer
1857 conversion rules required by the Java Language Specification,
1858 that IEEE NaNs are mapped to zero and values that overflow
1859 the target precision saturate, i.e. values greater than
1860 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1861 are mapped to INT_MIN. These semantics are allowed by the
1862 C and C++ standards that simply state that the behavior of
1863 FP-to-integer conversion is unspecified upon overflow. */
1864
1865 wide_int val;
1866 REAL_VALUE_TYPE r;
1867 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1868
1869 switch (code)
1870 {
1871 case FIX_TRUNC_EXPR:
1872 real_trunc (&r, VOIDmode, &x);
1873 break;
1874
1875 default:
1876 gcc_unreachable ();
1877 }
1878
1879 /* If R is NaN, return zero and show we have an overflow. */
1880 if (REAL_VALUE_ISNAN (r))
1881 {
1882 overflow = true;
1883 val = wi::zero (TYPE_PRECISION (type));
1884 }
1885
1886 /* See if R is less than the lower bound or greater than the
1887 upper bound. */
1888
1889 if (! overflow)
1890 {
1891 tree lt = TYPE_MIN_VALUE (type);
1892 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1893 if (real_less (&r, &l))
1894 {
1895 overflow = true;
1896 val = wi::to_wide (lt);
1897 }
1898 }
1899
1900 if (! overflow)
1901 {
1902 tree ut = TYPE_MAX_VALUE (type);
1903 if (ut)
1904 {
1905 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1906 if (real_less (&u, &r))
1907 {
1908 overflow = true;
1909 val = wi::to_wide (ut);
1910 }
1911 }
1912 }
1913
1914 if (! overflow)
1915 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1916
1917 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1918 return t;
1919 }
1920
1921 /* A subroutine of fold_convert_const handling conversions of a
1922 FIXED_CST to an integer type. */
1923
1924 static tree
1925 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1926 {
1927 tree t;
1928 double_int temp, temp_trunc;
1929 scalar_mode mode;
1930
1931 /* Right shift FIXED_CST to temp by fbit. */
1932 temp = TREE_FIXED_CST (arg1).data;
1933 mode = TREE_FIXED_CST (arg1).mode;
1934 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1935 {
1936 temp = temp.rshift (GET_MODE_FBIT (mode),
1937 HOST_BITS_PER_DOUBLE_INT,
1938 SIGNED_FIXED_POINT_MODE_P (mode));
1939
1940 /* Left shift temp to temp_trunc by fbit. */
1941 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1942 HOST_BITS_PER_DOUBLE_INT,
1943 SIGNED_FIXED_POINT_MODE_P (mode));
1944 }
1945 else
1946 {
1947 temp = double_int_zero;
1948 temp_trunc = double_int_zero;
1949 }
1950
1951 /* If FIXED_CST is negative, we need to round the value toward 0.
1952 By checking if the fractional bits are not zero to add 1 to temp. */
1953 if (SIGNED_FIXED_POINT_MODE_P (mode)
1954 && temp_trunc.is_negative ()
1955 && TREE_FIXED_CST (arg1).data != temp_trunc)
1956 temp += double_int_one;
1957
1958 /* Given a fixed-point constant, make new constant with new type,
1959 appropriately sign-extended or truncated. */
1960 t = force_fit_type (type, temp, -1,
1961 (temp.is_negative ()
1962 && (TYPE_UNSIGNED (type)
1963 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1964 | TREE_OVERFLOW (arg1));
1965
1966 return t;
1967 }
1968
1969 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1970 to another floating point type. */
1971
1972 static tree
1973 fold_convert_const_real_from_real (tree type, const_tree arg1)
1974 {
1975 REAL_VALUE_TYPE value;
1976 tree t;
1977
1978 /* Don't perform the operation if flag_signaling_nans is on
1979 and the operand is a signaling NaN. */
1980 if (HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
1981 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1)))
1982 return NULL_TREE;
1983
1984 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1985 t = build_real (type, value);
1986
1987 /* If converting an infinity or NAN to a representation that doesn't
1988 have one, set the overflow bit so that we can produce some kind of
1989 error message at the appropriate point if necessary. It's not the
1990 most user-friendly message, but it's better than nothing. */
1991 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1992 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1993 TREE_OVERFLOW (t) = 1;
1994 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1995 && !MODE_HAS_NANS (TYPE_MODE (type)))
1996 TREE_OVERFLOW (t) = 1;
1997 /* Regular overflow, conversion produced an infinity in a mode that
1998 can't represent them. */
1999 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2000 && REAL_VALUE_ISINF (value)
2001 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2002 TREE_OVERFLOW (t) = 1;
2003 else
2004 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2005 return t;
2006 }
2007
2008 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2009 to a floating point type. */
2010
2011 static tree
2012 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2013 {
2014 REAL_VALUE_TYPE value;
2015 tree t;
2016
2017 real_convert_from_fixed (&value, SCALAR_FLOAT_TYPE_MODE (type),
2018 &TREE_FIXED_CST (arg1));
2019 t = build_real (type, value);
2020
2021 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2022 return t;
2023 }
2024
2025 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2026 to another fixed-point type. */
2027
2028 static tree
2029 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2030 {
2031 FIXED_VALUE_TYPE value;
2032 tree t;
2033 bool overflow_p;
2034
2035 overflow_p = fixed_convert (&value, SCALAR_TYPE_MODE (type),
2036 &TREE_FIXED_CST (arg1), TYPE_SATURATING (type));
2037 t = build_fixed (type, value);
2038
2039 /* Propagate overflow flags. */
2040 if (overflow_p | TREE_OVERFLOW (arg1))
2041 TREE_OVERFLOW (t) = 1;
2042 return t;
2043 }
2044
2045 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2046 to a fixed-point type. */
2047
2048 static tree
2049 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2050 {
2051 FIXED_VALUE_TYPE value;
2052 tree t;
2053 bool overflow_p;
2054 double_int di;
2055
2056 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2057
2058 di.low = TREE_INT_CST_ELT (arg1, 0);
2059 if (TREE_INT_CST_NUNITS (arg1) == 1)
2060 di.high = (HOST_WIDE_INT) di.low < 0 ? HOST_WIDE_INT_M1 : 0;
2061 else
2062 di.high = TREE_INT_CST_ELT (arg1, 1);
2063
2064 overflow_p = fixed_convert_from_int (&value, SCALAR_TYPE_MODE (type), di,
2065 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2066 TYPE_SATURATING (type));
2067 t = build_fixed (type, value);
2068
2069 /* Propagate overflow flags. */
2070 if (overflow_p | TREE_OVERFLOW (arg1))
2071 TREE_OVERFLOW (t) = 1;
2072 return t;
2073 }
2074
2075 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2076 to a fixed-point type. */
2077
2078 static tree
2079 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2080 {
2081 FIXED_VALUE_TYPE value;
2082 tree t;
2083 bool overflow_p;
2084
2085 overflow_p = fixed_convert_from_real (&value, SCALAR_TYPE_MODE (type),
2086 &TREE_REAL_CST (arg1),
2087 TYPE_SATURATING (type));
2088 t = build_fixed (type, value);
2089
2090 /* Propagate overflow flags. */
2091 if (overflow_p | TREE_OVERFLOW (arg1))
2092 TREE_OVERFLOW (t) = 1;
2093 return t;
2094 }
2095
2096 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2097 type TYPE. If no simplification can be done return NULL_TREE. */
2098
2099 static tree
2100 fold_convert_const (enum tree_code code, tree type, tree arg1)
2101 {
2102 if (TREE_TYPE (arg1) == type)
2103 return arg1;
2104
2105 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2106 || TREE_CODE (type) == OFFSET_TYPE)
2107 {
2108 if (TREE_CODE (arg1) == INTEGER_CST)
2109 return fold_convert_const_int_from_int (type, arg1);
2110 else if (TREE_CODE (arg1) == REAL_CST)
2111 return fold_convert_const_int_from_real (code, type, arg1);
2112 else if (TREE_CODE (arg1) == FIXED_CST)
2113 return fold_convert_const_int_from_fixed (type, arg1);
2114 }
2115 else if (TREE_CODE (type) == REAL_TYPE)
2116 {
2117 if (TREE_CODE (arg1) == INTEGER_CST)
2118 return build_real_from_int_cst (type, arg1);
2119 else if (TREE_CODE (arg1) == REAL_CST)
2120 return fold_convert_const_real_from_real (type, arg1);
2121 else if (TREE_CODE (arg1) == FIXED_CST)
2122 return fold_convert_const_real_from_fixed (type, arg1);
2123 }
2124 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2125 {
2126 if (TREE_CODE (arg1) == FIXED_CST)
2127 return fold_convert_const_fixed_from_fixed (type, arg1);
2128 else if (TREE_CODE (arg1) == INTEGER_CST)
2129 return fold_convert_const_fixed_from_int (type, arg1);
2130 else if (TREE_CODE (arg1) == REAL_CST)
2131 return fold_convert_const_fixed_from_real (type, arg1);
2132 }
2133 else if (TREE_CODE (type) == VECTOR_TYPE)
2134 {
2135 if (TREE_CODE (arg1) == VECTOR_CST
2136 && TYPE_VECTOR_SUBPARTS (type) == VECTOR_CST_NELTS (arg1))
2137 {
2138 int len = VECTOR_CST_NELTS (arg1);
2139 tree elttype = TREE_TYPE (type);
2140 auto_vec<tree, 32> v (len);
2141 for (int i = 0; i < len; ++i)
2142 {
2143 tree elt = VECTOR_CST_ELT (arg1, i);
2144 tree cvt = fold_convert_const (code, elttype, elt);
2145 if (cvt == NULL_TREE)
2146 return NULL_TREE;
2147 v.quick_push (cvt);
2148 }
2149 return build_vector (type, v);
2150 }
2151 }
2152 return NULL_TREE;
2153 }
2154
2155 /* Construct a vector of zero elements of vector type TYPE. */
2156
2157 static tree
2158 build_zero_vector (tree type)
2159 {
2160 tree t;
2161
2162 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2163 return build_vector_from_val (type, t);
2164 }
2165
2166 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2167
2168 bool
2169 fold_convertible_p (const_tree type, const_tree arg)
2170 {
2171 tree orig = TREE_TYPE (arg);
2172
2173 if (type == orig)
2174 return true;
2175
2176 if (TREE_CODE (arg) == ERROR_MARK
2177 || TREE_CODE (type) == ERROR_MARK
2178 || TREE_CODE (orig) == ERROR_MARK)
2179 return false;
2180
2181 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2182 return true;
2183
2184 switch (TREE_CODE (type))
2185 {
2186 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2187 case POINTER_TYPE: case REFERENCE_TYPE:
2188 case OFFSET_TYPE:
2189 return (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2190 || TREE_CODE (orig) == OFFSET_TYPE);
2191
2192 case REAL_TYPE:
2193 case FIXED_POINT_TYPE:
2194 case VECTOR_TYPE:
2195 case VOID_TYPE:
2196 return TREE_CODE (type) == TREE_CODE (orig);
2197
2198 default:
2199 return false;
2200 }
2201 }
2202
2203 /* Convert expression ARG to type TYPE. Used by the middle-end for
2204 simple conversions in preference to calling the front-end's convert. */
2205
2206 tree
2207 fold_convert_loc (location_t loc, tree type, tree arg)
2208 {
2209 tree orig = TREE_TYPE (arg);
2210 tree tem;
2211
2212 if (type == orig)
2213 return arg;
2214
2215 if (TREE_CODE (arg) == ERROR_MARK
2216 || TREE_CODE (type) == ERROR_MARK
2217 || TREE_CODE (orig) == ERROR_MARK)
2218 return error_mark_node;
2219
2220 switch (TREE_CODE (type))
2221 {
2222 case POINTER_TYPE:
2223 case REFERENCE_TYPE:
2224 /* Handle conversions between pointers to different address spaces. */
2225 if (POINTER_TYPE_P (orig)
2226 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2227 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2228 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2229 /* fall through */
2230
2231 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2232 case OFFSET_TYPE:
2233 if (TREE_CODE (arg) == INTEGER_CST)
2234 {
2235 tem = fold_convert_const (NOP_EXPR, type, arg);
2236 if (tem != NULL_TREE)
2237 return tem;
2238 }
2239 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2240 || TREE_CODE (orig) == OFFSET_TYPE)
2241 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2242 if (TREE_CODE (orig) == COMPLEX_TYPE)
2243 return fold_convert_loc (loc, type,
2244 fold_build1_loc (loc, REALPART_EXPR,
2245 TREE_TYPE (orig), arg));
2246 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2247 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2248 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2249
2250 case REAL_TYPE:
2251 if (TREE_CODE (arg) == INTEGER_CST)
2252 {
2253 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2254 if (tem != NULL_TREE)
2255 return tem;
2256 }
2257 else if (TREE_CODE (arg) == REAL_CST)
2258 {
2259 tem = fold_convert_const (NOP_EXPR, type, arg);
2260 if (tem != NULL_TREE)
2261 return tem;
2262 }
2263 else if (TREE_CODE (arg) == FIXED_CST)
2264 {
2265 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2266 if (tem != NULL_TREE)
2267 return tem;
2268 }
2269
2270 switch (TREE_CODE (orig))
2271 {
2272 case INTEGER_TYPE:
2273 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2274 case POINTER_TYPE: case REFERENCE_TYPE:
2275 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2276
2277 case REAL_TYPE:
2278 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2279
2280 case FIXED_POINT_TYPE:
2281 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2282
2283 case COMPLEX_TYPE:
2284 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2285 return fold_convert_loc (loc, type, tem);
2286
2287 default:
2288 gcc_unreachable ();
2289 }
2290
2291 case FIXED_POINT_TYPE:
2292 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2293 || TREE_CODE (arg) == REAL_CST)
2294 {
2295 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2296 if (tem != NULL_TREE)
2297 goto fold_convert_exit;
2298 }
2299
2300 switch (TREE_CODE (orig))
2301 {
2302 case FIXED_POINT_TYPE:
2303 case INTEGER_TYPE:
2304 case ENUMERAL_TYPE:
2305 case BOOLEAN_TYPE:
2306 case REAL_TYPE:
2307 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2308
2309 case COMPLEX_TYPE:
2310 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2311 return fold_convert_loc (loc, type, tem);
2312
2313 default:
2314 gcc_unreachable ();
2315 }
2316
2317 case COMPLEX_TYPE:
2318 switch (TREE_CODE (orig))
2319 {
2320 case INTEGER_TYPE:
2321 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2322 case POINTER_TYPE: case REFERENCE_TYPE:
2323 case REAL_TYPE:
2324 case FIXED_POINT_TYPE:
2325 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2326 fold_convert_loc (loc, TREE_TYPE (type), arg),
2327 fold_convert_loc (loc, TREE_TYPE (type),
2328 integer_zero_node));
2329 case COMPLEX_TYPE:
2330 {
2331 tree rpart, ipart;
2332
2333 if (TREE_CODE (arg) == COMPLEX_EXPR)
2334 {
2335 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2336 TREE_OPERAND (arg, 0));
2337 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2338 TREE_OPERAND (arg, 1));
2339 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2340 }
2341
2342 arg = save_expr (arg);
2343 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2344 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2345 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2346 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2347 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2348 }
2349
2350 default:
2351 gcc_unreachable ();
2352 }
2353
2354 case VECTOR_TYPE:
2355 if (integer_zerop (arg))
2356 return build_zero_vector (type);
2357 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2358 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2359 || TREE_CODE (orig) == VECTOR_TYPE);
2360 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2361
2362 case VOID_TYPE:
2363 tem = fold_ignored_result (arg);
2364 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2365
2366 default:
2367 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2368 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2369 gcc_unreachable ();
2370 }
2371 fold_convert_exit:
2372 protected_set_expr_location_unshare (tem, loc);
2373 return tem;
2374 }
2375 \f
2376 /* Return false if expr can be assumed not to be an lvalue, true
2377 otherwise. */
2378
2379 static bool
2380 maybe_lvalue_p (const_tree x)
2381 {
2382 /* We only need to wrap lvalue tree codes. */
2383 switch (TREE_CODE (x))
2384 {
2385 case VAR_DECL:
2386 case PARM_DECL:
2387 case RESULT_DECL:
2388 case LABEL_DECL:
2389 case FUNCTION_DECL:
2390 case SSA_NAME:
2391
2392 case COMPONENT_REF:
2393 case MEM_REF:
2394 case INDIRECT_REF:
2395 case ARRAY_REF:
2396 case ARRAY_RANGE_REF:
2397 case BIT_FIELD_REF:
2398 case OBJ_TYPE_REF:
2399
2400 case REALPART_EXPR:
2401 case IMAGPART_EXPR:
2402 case PREINCREMENT_EXPR:
2403 case PREDECREMENT_EXPR:
2404 case SAVE_EXPR:
2405 case TRY_CATCH_EXPR:
2406 case WITH_CLEANUP_EXPR:
2407 case COMPOUND_EXPR:
2408 case MODIFY_EXPR:
2409 case TARGET_EXPR:
2410 case COND_EXPR:
2411 case BIND_EXPR:
2412 break;
2413
2414 default:
2415 /* Assume the worst for front-end tree codes. */
2416 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2417 break;
2418 return false;
2419 }
2420
2421 return true;
2422 }
2423
2424 /* Return an expr equal to X but certainly not valid as an lvalue. */
2425
2426 tree
2427 non_lvalue_loc (location_t loc, tree x)
2428 {
2429 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2430 us. */
2431 if (in_gimple_form)
2432 return x;
2433
2434 if (! maybe_lvalue_p (x))
2435 return x;
2436 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2437 }
2438
2439 /* When pedantic, return an expr equal to X but certainly not valid as a
2440 pedantic lvalue. Otherwise, return X. */
2441
2442 static tree
2443 pedantic_non_lvalue_loc (location_t loc, tree x)
2444 {
2445 return protected_set_expr_location_unshare (x, loc);
2446 }
2447 \f
2448 /* Given a tree comparison code, return the code that is the logical inverse.
2449 It is generally not safe to do this for floating-point comparisons, except
2450 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2451 ERROR_MARK in this case. */
2452
2453 enum tree_code
2454 invert_tree_comparison (enum tree_code code, bool honor_nans)
2455 {
2456 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2457 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2458 return ERROR_MARK;
2459
2460 switch (code)
2461 {
2462 case EQ_EXPR:
2463 return NE_EXPR;
2464 case NE_EXPR:
2465 return EQ_EXPR;
2466 case GT_EXPR:
2467 return honor_nans ? UNLE_EXPR : LE_EXPR;
2468 case GE_EXPR:
2469 return honor_nans ? UNLT_EXPR : LT_EXPR;
2470 case LT_EXPR:
2471 return honor_nans ? UNGE_EXPR : GE_EXPR;
2472 case LE_EXPR:
2473 return honor_nans ? UNGT_EXPR : GT_EXPR;
2474 case LTGT_EXPR:
2475 return UNEQ_EXPR;
2476 case UNEQ_EXPR:
2477 return LTGT_EXPR;
2478 case UNGT_EXPR:
2479 return LE_EXPR;
2480 case UNGE_EXPR:
2481 return LT_EXPR;
2482 case UNLT_EXPR:
2483 return GE_EXPR;
2484 case UNLE_EXPR:
2485 return GT_EXPR;
2486 case ORDERED_EXPR:
2487 return UNORDERED_EXPR;
2488 case UNORDERED_EXPR:
2489 return ORDERED_EXPR;
2490 default:
2491 gcc_unreachable ();
2492 }
2493 }
2494
2495 /* Similar, but return the comparison that results if the operands are
2496 swapped. This is safe for floating-point. */
2497
2498 enum tree_code
2499 swap_tree_comparison (enum tree_code code)
2500 {
2501 switch (code)
2502 {
2503 case EQ_EXPR:
2504 case NE_EXPR:
2505 case ORDERED_EXPR:
2506 case UNORDERED_EXPR:
2507 case LTGT_EXPR:
2508 case UNEQ_EXPR:
2509 return code;
2510 case GT_EXPR:
2511 return LT_EXPR;
2512 case GE_EXPR:
2513 return LE_EXPR;
2514 case LT_EXPR:
2515 return GT_EXPR;
2516 case LE_EXPR:
2517 return GE_EXPR;
2518 case UNGT_EXPR:
2519 return UNLT_EXPR;
2520 case UNGE_EXPR:
2521 return UNLE_EXPR;
2522 case UNLT_EXPR:
2523 return UNGT_EXPR;
2524 case UNLE_EXPR:
2525 return UNGE_EXPR;
2526 default:
2527 gcc_unreachable ();
2528 }
2529 }
2530
2531
2532 /* Convert a comparison tree code from an enum tree_code representation
2533 into a compcode bit-based encoding. This function is the inverse of
2534 compcode_to_comparison. */
2535
2536 static enum comparison_code
2537 comparison_to_compcode (enum tree_code code)
2538 {
2539 switch (code)
2540 {
2541 case LT_EXPR:
2542 return COMPCODE_LT;
2543 case EQ_EXPR:
2544 return COMPCODE_EQ;
2545 case LE_EXPR:
2546 return COMPCODE_LE;
2547 case GT_EXPR:
2548 return COMPCODE_GT;
2549 case NE_EXPR:
2550 return COMPCODE_NE;
2551 case GE_EXPR:
2552 return COMPCODE_GE;
2553 case ORDERED_EXPR:
2554 return COMPCODE_ORD;
2555 case UNORDERED_EXPR:
2556 return COMPCODE_UNORD;
2557 case UNLT_EXPR:
2558 return COMPCODE_UNLT;
2559 case UNEQ_EXPR:
2560 return COMPCODE_UNEQ;
2561 case UNLE_EXPR:
2562 return COMPCODE_UNLE;
2563 case UNGT_EXPR:
2564 return COMPCODE_UNGT;
2565 case LTGT_EXPR:
2566 return COMPCODE_LTGT;
2567 case UNGE_EXPR:
2568 return COMPCODE_UNGE;
2569 default:
2570 gcc_unreachable ();
2571 }
2572 }
2573
2574 /* Convert a compcode bit-based encoding of a comparison operator back
2575 to GCC's enum tree_code representation. This function is the
2576 inverse of comparison_to_compcode. */
2577
2578 static enum tree_code
2579 compcode_to_comparison (enum comparison_code code)
2580 {
2581 switch (code)
2582 {
2583 case COMPCODE_LT:
2584 return LT_EXPR;
2585 case COMPCODE_EQ:
2586 return EQ_EXPR;
2587 case COMPCODE_LE:
2588 return LE_EXPR;
2589 case COMPCODE_GT:
2590 return GT_EXPR;
2591 case COMPCODE_NE:
2592 return NE_EXPR;
2593 case COMPCODE_GE:
2594 return GE_EXPR;
2595 case COMPCODE_ORD:
2596 return ORDERED_EXPR;
2597 case COMPCODE_UNORD:
2598 return UNORDERED_EXPR;
2599 case COMPCODE_UNLT:
2600 return UNLT_EXPR;
2601 case COMPCODE_UNEQ:
2602 return UNEQ_EXPR;
2603 case COMPCODE_UNLE:
2604 return UNLE_EXPR;
2605 case COMPCODE_UNGT:
2606 return UNGT_EXPR;
2607 case COMPCODE_LTGT:
2608 return LTGT_EXPR;
2609 case COMPCODE_UNGE:
2610 return UNGE_EXPR;
2611 default:
2612 gcc_unreachable ();
2613 }
2614 }
2615
2616 /* Return a tree for the comparison which is the combination of
2617 doing the AND or OR (depending on CODE) of the two operations LCODE
2618 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2619 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2620 if this makes the transformation invalid. */
2621
2622 tree
2623 combine_comparisons (location_t loc,
2624 enum tree_code code, enum tree_code lcode,
2625 enum tree_code rcode, tree truth_type,
2626 tree ll_arg, tree lr_arg)
2627 {
2628 bool honor_nans = HONOR_NANS (ll_arg);
2629 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2630 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2631 int compcode;
2632
2633 switch (code)
2634 {
2635 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2636 compcode = lcompcode & rcompcode;
2637 break;
2638
2639 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2640 compcode = lcompcode | rcompcode;
2641 break;
2642
2643 default:
2644 return NULL_TREE;
2645 }
2646
2647 if (!honor_nans)
2648 {
2649 /* Eliminate unordered comparisons, as well as LTGT and ORD
2650 which are not used unless the mode has NaNs. */
2651 compcode &= ~COMPCODE_UNORD;
2652 if (compcode == COMPCODE_LTGT)
2653 compcode = COMPCODE_NE;
2654 else if (compcode == COMPCODE_ORD)
2655 compcode = COMPCODE_TRUE;
2656 }
2657 else if (flag_trapping_math)
2658 {
2659 /* Check that the original operation and the optimized ones will trap
2660 under the same condition. */
2661 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2662 && (lcompcode != COMPCODE_EQ)
2663 && (lcompcode != COMPCODE_ORD);
2664 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2665 && (rcompcode != COMPCODE_EQ)
2666 && (rcompcode != COMPCODE_ORD);
2667 bool trap = (compcode & COMPCODE_UNORD) == 0
2668 && (compcode != COMPCODE_EQ)
2669 && (compcode != COMPCODE_ORD);
2670
2671 /* In a short-circuited boolean expression the LHS might be
2672 such that the RHS, if evaluated, will never trap. For
2673 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2674 if neither x nor y is NaN. (This is a mixed blessing: for
2675 example, the expression above will never trap, hence
2676 optimizing it to x < y would be invalid). */
2677 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2678 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2679 rtrap = false;
2680
2681 /* If the comparison was short-circuited, and only the RHS
2682 trapped, we may now generate a spurious trap. */
2683 if (rtrap && !ltrap
2684 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2685 return NULL_TREE;
2686
2687 /* If we changed the conditions that cause a trap, we lose. */
2688 if ((ltrap || rtrap) != trap)
2689 return NULL_TREE;
2690 }
2691
2692 if (compcode == COMPCODE_TRUE)
2693 return constant_boolean_node (true, truth_type);
2694 else if (compcode == COMPCODE_FALSE)
2695 return constant_boolean_node (false, truth_type);
2696 else
2697 {
2698 enum tree_code tcode;
2699
2700 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2701 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2702 }
2703 }
2704 \f
2705 /* Return nonzero if two operands (typically of the same tree node)
2706 are necessarily equal. FLAGS modifies behavior as follows:
2707
2708 If OEP_ONLY_CONST is set, only return nonzero for constants.
2709 This function tests whether the operands are indistinguishable;
2710 it does not test whether they are equal using C's == operation.
2711 The distinction is important for IEEE floating point, because
2712 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2713 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2714
2715 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2716 even though it may hold multiple values during a function.
2717 This is because a GCC tree node guarantees that nothing else is
2718 executed between the evaluation of its "operands" (which may often
2719 be evaluated in arbitrary order). Hence if the operands themselves
2720 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2721 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2722 unset means assuming isochronic (or instantaneous) tree equivalence.
2723 Unless comparing arbitrary expression trees, such as from different
2724 statements, this flag can usually be left unset.
2725
2726 If OEP_PURE_SAME is set, then pure functions with identical arguments
2727 are considered the same. It is used when the caller has other ways
2728 to ensure that global memory is unchanged in between.
2729
2730 If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
2731 not values of expressions.
2732
2733 If OEP_LEXICOGRAPHIC is set, then also handle expressions with side-effects
2734 such as MODIFY_EXPR, RETURN_EXPR, as well as STATEMENT_LISTs.
2735
2736 Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
2737 any operand with side effect. This is unnecesarily conservative in the
2738 case we know that arg0 and arg1 are in disjoint code paths (such as in
2739 ?: operator). In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
2740 addresses with TREE_CONSTANT flag set so we know that &var == &var
2741 even if var is volatile. */
2742
2743 int
2744 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2745 {
2746 /* When checking, verify at the outermost operand_equal_p call that
2747 if operand_equal_p returns non-zero then ARG0 and ARG1 has the same
2748 hash value. */
2749 if (flag_checking && !(flags & OEP_NO_HASH_CHECK))
2750 {
2751 if (operand_equal_p (arg0, arg1, flags | OEP_NO_HASH_CHECK))
2752 {
2753 if (arg0 != arg1)
2754 {
2755 inchash::hash hstate0 (0), hstate1 (0);
2756 inchash::add_expr (arg0, hstate0, flags | OEP_HASH_CHECK);
2757 inchash::add_expr (arg1, hstate1, flags | OEP_HASH_CHECK);
2758 hashval_t h0 = hstate0.end ();
2759 hashval_t h1 = hstate1.end ();
2760 gcc_assert (h0 == h1);
2761 }
2762 return 1;
2763 }
2764 else
2765 return 0;
2766 }
2767
2768 /* If either is ERROR_MARK, they aren't equal. */
2769 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2770 || TREE_TYPE (arg0) == error_mark_node
2771 || TREE_TYPE (arg1) == error_mark_node)
2772 return 0;
2773
2774 /* Similar, if either does not have a type (like a released SSA name),
2775 they aren't equal. */
2776 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2777 return 0;
2778
2779 /* We cannot consider pointers to different address space equal. */
2780 if (POINTER_TYPE_P (TREE_TYPE (arg0))
2781 && POINTER_TYPE_P (TREE_TYPE (arg1))
2782 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2783 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2784 return 0;
2785
2786 /* Check equality of integer constants before bailing out due to
2787 precision differences. */
2788 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2789 {
2790 /* Address of INTEGER_CST is not defined; check that we did not forget
2791 to drop the OEP_ADDRESS_OF flags. */
2792 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2793 return tree_int_cst_equal (arg0, arg1);
2794 }
2795
2796 if (!(flags & OEP_ADDRESS_OF))
2797 {
2798 /* If both types don't have the same signedness, then we can't consider
2799 them equal. We must check this before the STRIP_NOPS calls
2800 because they may change the signedness of the arguments. As pointers
2801 strictly don't have a signedness, require either two pointers or
2802 two non-pointers as well. */
2803 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2804 || POINTER_TYPE_P (TREE_TYPE (arg0))
2805 != POINTER_TYPE_P (TREE_TYPE (arg1)))
2806 return 0;
2807
2808 /* If both types don't have the same precision, then it is not safe
2809 to strip NOPs. */
2810 if (element_precision (TREE_TYPE (arg0))
2811 != element_precision (TREE_TYPE (arg1)))
2812 return 0;
2813
2814 STRIP_NOPS (arg0);
2815 STRIP_NOPS (arg1);
2816 }
2817 #if 0
2818 /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
2819 sanity check once the issue is solved. */
2820 else
2821 /* Addresses of conversions and SSA_NAMEs (and many other things)
2822 are not defined. Check that we did not forget to drop the
2823 OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
2824 gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
2825 && TREE_CODE (arg0) != SSA_NAME);
2826 #endif
2827
2828 /* In case both args are comparisons but with different comparison
2829 code, try to swap the comparison operands of one arg to produce
2830 a match and compare that variant. */
2831 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2832 && COMPARISON_CLASS_P (arg0)
2833 && COMPARISON_CLASS_P (arg1))
2834 {
2835 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2836
2837 if (TREE_CODE (arg0) == swap_code)
2838 return operand_equal_p (TREE_OPERAND (arg0, 0),
2839 TREE_OPERAND (arg1, 1), flags)
2840 && operand_equal_p (TREE_OPERAND (arg0, 1),
2841 TREE_OPERAND (arg1, 0), flags);
2842 }
2843
2844 if (TREE_CODE (arg0) != TREE_CODE (arg1))
2845 {
2846 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2847 if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
2848 ;
2849 else if (flags & OEP_ADDRESS_OF)
2850 {
2851 /* If we are interested in comparing addresses ignore
2852 MEM_REF wrappings of the base that can appear just for
2853 TBAA reasons. */
2854 if (TREE_CODE (arg0) == MEM_REF
2855 && DECL_P (arg1)
2856 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
2857 && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
2858 && integer_zerop (TREE_OPERAND (arg0, 1)))
2859 return 1;
2860 else if (TREE_CODE (arg1) == MEM_REF
2861 && DECL_P (arg0)
2862 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
2863 && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
2864 && integer_zerop (TREE_OPERAND (arg1, 1)))
2865 return 1;
2866 return 0;
2867 }
2868 else
2869 return 0;
2870 }
2871
2872 /* When not checking adddresses, this is needed for conversions and for
2873 COMPONENT_REF. Might as well play it safe and always test this. */
2874 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2875 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2876 || (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))
2877 && !(flags & OEP_ADDRESS_OF)))
2878 return 0;
2879
2880 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2881 We don't care about side effects in that case because the SAVE_EXPR
2882 takes care of that for us. In all other cases, two expressions are
2883 equal if they have no side effects. If we have two identical
2884 expressions with side effects that should be treated the same due
2885 to the only side effects being identical SAVE_EXPR's, that will
2886 be detected in the recursive calls below.
2887 If we are taking an invariant address of two identical objects
2888 they are necessarily equal as well. */
2889 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2890 && (TREE_CODE (arg0) == SAVE_EXPR
2891 || (flags & OEP_MATCH_SIDE_EFFECTS)
2892 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2893 return 1;
2894
2895 /* Next handle constant cases, those for which we can return 1 even
2896 if ONLY_CONST is set. */
2897 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2898 switch (TREE_CODE (arg0))
2899 {
2900 case INTEGER_CST:
2901 return tree_int_cst_equal (arg0, arg1);
2902
2903 case FIXED_CST:
2904 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2905 TREE_FIXED_CST (arg1));
2906
2907 case REAL_CST:
2908 if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
2909 return 1;
2910
2911
2912 if (!HONOR_SIGNED_ZEROS (arg0))
2913 {
2914 /* If we do not distinguish between signed and unsigned zero,
2915 consider them equal. */
2916 if (real_zerop (arg0) && real_zerop (arg1))
2917 return 1;
2918 }
2919 return 0;
2920
2921 case VECTOR_CST:
2922 {
2923 unsigned i;
2924
2925 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2926 return 0;
2927
2928 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2929 {
2930 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2931 VECTOR_CST_ELT (arg1, i), flags))
2932 return 0;
2933 }
2934 return 1;
2935 }
2936
2937 case COMPLEX_CST:
2938 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2939 flags)
2940 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2941 flags));
2942
2943 case STRING_CST:
2944 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2945 && ! memcmp (TREE_STRING_POINTER (arg0),
2946 TREE_STRING_POINTER (arg1),
2947 TREE_STRING_LENGTH (arg0)));
2948
2949 case ADDR_EXPR:
2950 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2951 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2952 flags | OEP_ADDRESS_OF
2953 | OEP_MATCH_SIDE_EFFECTS);
2954 case CONSTRUCTOR:
2955 /* In GIMPLE empty constructors are allowed in initializers of
2956 aggregates. */
2957 return !CONSTRUCTOR_NELTS (arg0) && !CONSTRUCTOR_NELTS (arg1);
2958 default:
2959 break;
2960 }
2961
2962 if (flags & OEP_ONLY_CONST)
2963 return 0;
2964
2965 /* Define macros to test an operand from arg0 and arg1 for equality and a
2966 variant that allows null and views null as being different from any
2967 non-null value. In the latter case, if either is null, the both
2968 must be; otherwise, do the normal comparison. */
2969 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2970 TREE_OPERAND (arg1, N), flags)
2971
2972 #define OP_SAME_WITH_NULL(N) \
2973 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2974 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2975
2976 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2977 {
2978 case tcc_unary:
2979 /* Two conversions are equal only if signedness and modes match. */
2980 switch (TREE_CODE (arg0))
2981 {
2982 CASE_CONVERT:
2983 case FIX_TRUNC_EXPR:
2984 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2985 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2986 return 0;
2987 break;
2988 default:
2989 break;
2990 }
2991
2992 return OP_SAME (0);
2993
2994
2995 case tcc_comparison:
2996 case tcc_binary:
2997 if (OP_SAME (0) && OP_SAME (1))
2998 return 1;
2999
3000 /* For commutative ops, allow the other order. */
3001 return (commutative_tree_code (TREE_CODE (arg0))
3002 && operand_equal_p (TREE_OPERAND (arg0, 0),
3003 TREE_OPERAND (arg1, 1), flags)
3004 && operand_equal_p (TREE_OPERAND (arg0, 1),
3005 TREE_OPERAND (arg1, 0), flags));
3006
3007 case tcc_reference:
3008 /* If either of the pointer (or reference) expressions we are
3009 dereferencing contain a side effect, these cannot be equal,
3010 but their addresses can be. */
3011 if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0
3012 && (TREE_SIDE_EFFECTS (arg0)
3013 || TREE_SIDE_EFFECTS (arg1)))
3014 return 0;
3015
3016 switch (TREE_CODE (arg0))
3017 {
3018 case INDIRECT_REF:
3019 if (!(flags & OEP_ADDRESS_OF)
3020 && (TYPE_ALIGN (TREE_TYPE (arg0))
3021 != TYPE_ALIGN (TREE_TYPE (arg1))))
3022 return 0;
3023 flags &= ~OEP_ADDRESS_OF;
3024 return OP_SAME (0);
3025
3026 case IMAGPART_EXPR:
3027 /* Require the same offset. */
3028 if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3029 TYPE_SIZE (TREE_TYPE (arg1)),
3030 flags & ~OEP_ADDRESS_OF))
3031 return 0;
3032
3033 /* Fallthru. */
3034 case REALPART_EXPR:
3035 case VIEW_CONVERT_EXPR:
3036 return OP_SAME (0);
3037
3038 case TARGET_MEM_REF:
3039 case MEM_REF:
3040 if (!(flags & OEP_ADDRESS_OF))
3041 {
3042 /* Require equal access sizes */
3043 if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1))
3044 && (!TYPE_SIZE (TREE_TYPE (arg0))
3045 || !TYPE_SIZE (TREE_TYPE (arg1))
3046 || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3047 TYPE_SIZE (TREE_TYPE (arg1)),
3048 flags)))
3049 return 0;
3050 /* Verify that access happens in similar types. */
3051 if (!types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3052 return 0;
3053 /* Verify that accesses are TBAA compatible. */
3054 if (!alias_ptr_types_compatible_p
3055 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
3056 TREE_TYPE (TREE_OPERAND (arg1, 1)))
3057 || (MR_DEPENDENCE_CLIQUE (arg0)
3058 != MR_DEPENDENCE_CLIQUE (arg1))
3059 || (MR_DEPENDENCE_BASE (arg0)
3060 != MR_DEPENDENCE_BASE (arg1)))
3061 return 0;
3062 /* Verify that alignment is compatible. */
3063 if (TYPE_ALIGN (TREE_TYPE (arg0))
3064 != TYPE_ALIGN (TREE_TYPE (arg1)))
3065 return 0;
3066 }
3067 flags &= ~OEP_ADDRESS_OF;
3068 return (OP_SAME (0) && OP_SAME (1)
3069 /* TARGET_MEM_REF require equal extra operands. */
3070 && (TREE_CODE (arg0) != TARGET_MEM_REF
3071 || (OP_SAME_WITH_NULL (2)
3072 && OP_SAME_WITH_NULL (3)
3073 && OP_SAME_WITH_NULL (4))));
3074
3075 case ARRAY_REF:
3076 case ARRAY_RANGE_REF:
3077 if (!OP_SAME (0))
3078 return 0;
3079 flags &= ~OEP_ADDRESS_OF;
3080 /* Compare the array index by value if it is constant first as we
3081 may have different types but same value here. */
3082 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3083 TREE_OPERAND (arg1, 1))
3084 || OP_SAME (1))
3085 && OP_SAME_WITH_NULL (2)
3086 && OP_SAME_WITH_NULL (3)
3087 /* Compare low bound and element size as with OEP_ADDRESS_OF
3088 we have to account for the offset of the ref. */
3089 && (TREE_TYPE (TREE_OPERAND (arg0, 0))
3090 == TREE_TYPE (TREE_OPERAND (arg1, 0))
3091 || (operand_equal_p (array_ref_low_bound
3092 (CONST_CAST_TREE (arg0)),
3093 array_ref_low_bound
3094 (CONST_CAST_TREE (arg1)), flags)
3095 && operand_equal_p (array_ref_element_size
3096 (CONST_CAST_TREE (arg0)),
3097 array_ref_element_size
3098 (CONST_CAST_TREE (arg1)),
3099 flags))));
3100
3101 case COMPONENT_REF:
3102 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3103 may be NULL when we're called to compare MEM_EXPRs. */
3104 if (!OP_SAME_WITH_NULL (0)
3105 || !OP_SAME (1))
3106 return 0;
3107 flags &= ~OEP_ADDRESS_OF;
3108 return OP_SAME_WITH_NULL (2);
3109
3110 case BIT_FIELD_REF:
3111 if (!OP_SAME (0))
3112 return 0;
3113 flags &= ~OEP_ADDRESS_OF;
3114 return OP_SAME (1) && OP_SAME (2);
3115
3116 default:
3117 return 0;
3118 }
3119
3120 case tcc_expression:
3121 switch (TREE_CODE (arg0))
3122 {
3123 case ADDR_EXPR:
3124 /* Be sure we pass right ADDRESS_OF flag. */
3125 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3126 return operand_equal_p (TREE_OPERAND (arg0, 0),
3127 TREE_OPERAND (arg1, 0),
3128 flags | OEP_ADDRESS_OF);
3129
3130 case TRUTH_NOT_EXPR:
3131 return OP_SAME (0);
3132
3133 case TRUTH_ANDIF_EXPR:
3134 case TRUTH_ORIF_EXPR:
3135 return OP_SAME (0) && OP_SAME (1);
3136
3137 case FMA_EXPR:
3138 case WIDEN_MULT_PLUS_EXPR:
3139 case WIDEN_MULT_MINUS_EXPR:
3140 if (!OP_SAME (2))
3141 return 0;
3142 /* The multiplcation operands are commutative. */
3143 /* FALLTHRU */
3144
3145 case TRUTH_AND_EXPR:
3146 case TRUTH_OR_EXPR:
3147 case TRUTH_XOR_EXPR:
3148 if (OP_SAME (0) && OP_SAME (1))
3149 return 1;
3150
3151 /* Otherwise take into account this is a commutative operation. */
3152 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3153 TREE_OPERAND (arg1, 1), flags)
3154 && operand_equal_p (TREE_OPERAND (arg0, 1),
3155 TREE_OPERAND (arg1, 0), flags));
3156
3157 case COND_EXPR:
3158 if (! OP_SAME (1) || ! OP_SAME_WITH_NULL (2))
3159 return 0;
3160 flags &= ~OEP_ADDRESS_OF;
3161 return OP_SAME (0);
3162
3163 case BIT_INSERT_EXPR:
3164 /* BIT_INSERT_EXPR has an implict operand as the type precision
3165 of op1. Need to check to make sure they are the same. */
3166 if (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
3167 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
3168 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 1)))
3169 != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 1))))
3170 return false;
3171 /* FALLTHRU */
3172
3173 case VEC_COND_EXPR:
3174 case DOT_PROD_EXPR:
3175 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3176
3177 case MODIFY_EXPR:
3178 case INIT_EXPR:
3179 case COMPOUND_EXPR:
3180 case PREDECREMENT_EXPR:
3181 case PREINCREMENT_EXPR:
3182 case POSTDECREMENT_EXPR:
3183 case POSTINCREMENT_EXPR:
3184 if (flags & OEP_LEXICOGRAPHIC)
3185 return OP_SAME (0) && OP_SAME (1);
3186 return 0;
3187
3188 case CLEANUP_POINT_EXPR:
3189 case EXPR_STMT:
3190 if (flags & OEP_LEXICOGRAPHIC)
3191 return OP_SAME (0);
3192 return 0;
3193
3194 default:
3195 return 0;
3196 }
3197
3198 case tcc_vl_exp:
3199 switch (TREE_CODE (arg0))
3200 {
3201 case CALL_EXPR:
3202 if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3203 != (CALL_EXPR_FN (arg1) == NULL_TREE))
3204 /* If not both CALL_EXPRs are either internal or normal function
3205 functions, then they are not equal. */
3206 return 0;
3207 else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3208 {
3209 /* If the CALL_EXPRs call different internal functions, then they
3210 are not equal. */
3211 if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3212 return 0;
3213 }
3214 else
3215 {
3216 /* If the CALL_EXPRs call different functions, then they are not
3217 equal. */
3218 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3219 flags))
3220 return 0;
3221 }
3222
3223 /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */
3224 {
3225 unsigned int cef = call_expr_flags (arg0);
3226 if (flags & OEP_PURE_SAME)
3227 cef &= ECF_CONST | ECF_PURE;
3228 else
3229 cef &= ECF_CONST;
3230 if (!cef && !(flags & OEP_LEXICOGRAPHIC))
3231 return 0;
3232 }
3233
3234 /* Now see if all the arguments are the same. */
3235 {
3236 const_call_expr_arg_iterator iter0, iter1;
3237 const_tree a0, a1;
3238 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3239 a1 = first_const_call_expr_arg (arg1, &iter1);
3240 a0 && a1;
3241 a0 = next_const_call_expr_arg (&iter0),
3242 a1 = next_const_call_expr_arg (&iter1))
3243 if (! operand_equal_p (a0, a1, flags))
3244 return 0;
3245
3246 /* If we get here and both argument lists are exhausted
3247 then the CALL_EXPRs are equal. */
3248 return ! (a0 || a1);
3249 }
3250 default:
3251 return 0;
3252 }
3253
3254 case tcc_declaration:
3255 /* Consider __builtin_sqrt equal to sqrt. */
3256 return (TREE_CODE (arg0) == FUNCTION_DECL
3257 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3258 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3259 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3260
3261 case tcc_exceptional:
3262 if (TREE_CODE (arg0) == CONSTRUCTOR)
3263 {
3264 /* In GIMPLE constructors are used only to build vectors from
3265 elements. Individual elements in the constructor must be
3266 indexed in increasing order and form an initial sequence.
3267
3268 We make no effort to compare constructors in generic.
3269 (see sem_variable::equals in ipa-icf which can do so for
3270 constants). */
3271 if (!VECTOR_TYPE_P (TREE_TYPE (arg0))
3272 || !VECTOR_TYPE_P (TREE_TYPE (arg1)))
3273 return 0;
3274
3275 /* Be sure that vectors constructed have the same representation.
3276 We only tested element precision and modes to match.
3277 Vectors may be BLKmode and thus also check that the number of
3278 parts match. */
3279 if (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))
3280 != TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)))
3281 return 0;
3282
3283 vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3284 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3285 unsigned int len = vec_safe_length (v0);
3286
3287 if (len != vec_safe_length (v1))
3288 return 0;
3289
3290 for (unsigned int i = 0; i < len; i++)
3291 {
3292 constructor_elt *c0 = &(*v0)[i];
3293 constructor_elt *c1 = &(*v1)[i];
3294
3295 if (!operand_equal_p (c0->value, c1->value, flags)
3296 /* In GIMPLE the indexes can be either NULL or matching i.
3297 Double check this so we won't get false
3298 positives for GENERIC. */
3299 || (c0->index
3300 && (TREE_CODE (c0->index) != INTEGER_CST
3301 || !compare_tree_int (c0->index, i)))
3302 || (c1->index
3303 && (TREE_CODE (c1->index) != INTEGER_CST
3304 || !compare_tree_int (c1->index, i))))
3305 return 0;
3306 }
3307 return 1;
3308 }
3309 else if (TREE_CODE (arg0) == STATEMENT_LIST
3310 && (flags & OEP_LEXICOGRAPHIC))
3311 {
3312 /* Compare the STATEMENT_LISTs. */
3313 tree_stmt_iterator tsi1, tsi2;
3314 tree body1 = CONST_CAST_TREE (arg0);
3315 tree body2 = CONST_CAST_TREE (arg1);
3316 for (tsi1 = tsi_start (body1), tsi2 = tsi_start (body2); ;
3317 tsi_next (&tsi1), tsi_next (&tsi2))
3318 {
3319 /* The lists don't have the same number of statements. */
3320 if (tsi_end_p (tsi1) ^ tsi_end_p (tsi2))
3321 return 0;
3322 if (tsi_end_p (tsi1) && tsi_end_p (tsi2))
3323 return 1;
3324 if (!operand_equal_p (tsi_stmt (tsi1), tsi_stmt (tsi2),
3325 OEP_LEXICOGRAPHIC))
3326 return 0;
3327 }
3328 }
3329 return 0;
3330
3331 case tcc_statement:
3332 switch (TREE_CODE (arg0))
3333 {
3334 case RETURN_EXPR:
3335 if (flags & OEP_LEXICOGRAPHIC)
3336 return OP_SAME_WITH_NULL (0);
3337 return 0;
3338 default:
3339 return 0;
3340 }
3341
3342 default:
3343 return 0;
3344 }
3345
3346 #undef OP_SAME
3347 #undef OP_SAME_WITH_NULL
3348 }
3349 \f
3350 /* Similar to operand_equal_p, but see if ARG0 might be a variant of ARG1
3351 with a different signedness or a narrower precision. */
3352
3353 static bool
3354 operand_equal_for_comparison_p (tree arg0, tree arg1)
3355 {
3356 if (operand_equal_p (arg0, arg1, 0))
3357 return true;
3358
3359 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3360 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3361 return false;
3362
3363 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3364 and see if the inner values are the same. This removes any
3365 signedness comparison, which doesn't matter here. */
3366 tree op0 = arg0;
3367 tree op1 = arg1;
3368 STRIP_NOPS (op0);
3369 STRIP_NOPS (op1);
3370 if (operand_equal_p (op0, op1, 0))
3371 return true;
3372
3373 /* Discard a single widening conversion from ARG1 and see if the inner
3374 value is the same as ARG0. */
3375 if (CONVERT_EXPR_P (arg1)
3376 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0)))
3377 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))
3378 < TYPE_PRECISION (TREE_TYPE (arg1))
3379 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
3380 return true;
3381
3382 return false;
3383 }
3384 \f
3385 /* See if ARG is an expression that is either a comparison or is performing
3386 arithmetic on comparisons. The comparisons must only be comparing
3387 two different values, which will be stored in *CVAL1 and *CVAL2; if
3388 they are nonzero it means that some operands have already been found.
3389 No variables may be used anywhere else in the expression except in the
3390 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3391 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3392
3393 If this is true, return 1. Otherwise, return zero. */
3394
3395 static int
3396 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3397 {
3398 enum tree_code code = TREE_CODE (arg);
3399 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3400
3401 /* We can handle some of the tcc_expression cases here. */
3402 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3403 tclass = tcc_unary;
3404 else if (tclass == tcc_expression
3405 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3406 || code == COMPOUND_EXPR))
3407 tclass = tcc_binary;
3408
3409 else if (tclass == tcc_expression && code == SAVE_EXPR
3410 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3411 {
3412 /* If we've already found a CVAL1 or CVAL2, this expression is
3413 two complex to handle. */
3414 if (*cval1 || *cval2)
3415 return 0;
3416
3417 tclass = tcc_unary;
3418 *save_p = 1;
3419 }
3420
3421 switch (tclass)
3422 {
3423 case tcc_unary:
3424 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3425
3426 case tcc_binary:
3427 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3428 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3429 cval1, cval2, save_p));
3430
3431 case tcc_constant:
3432 return 1;
3433
3434 case tcc_expression:
3435 if (code == COND_EXPR)
3436 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3437 cval1, cval2, save_p)
3438 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3439 cval1, cval2, save_p)
3440 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3441 cval1, cval2, save_p));
3442 return 0;
3443
3444 case tcc_comparison:
3445 /* First see if we can handle the first operand, then the second. For
3446 the second operand, we know *CVAL1 can't be zero. It must be that
3447 one side of the comparison is each of the values; test for the
3448 case where this isn't true by failing if the two operands
3449 are the same. */
3450
3451 if (operand_equal_p (TREE_OPERAND (arg, 0),
3452 TREE_OPERAND (arg, 1), 0))
3453 return 0;
3454
3455 if (*cval1 == 0)
3456 *cval1 = TREE_OPERAND (arg, 0);
3457 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3458 ;
3459 else if (*cval2 == 0)
3460 *cval2 = TREE_OPERAND (arg, 0);
3461 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3462 ;
3463 else
3464 return 0;
3465
3466 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3467 ;
3468 else if (*cval2 == 0)
3469 *cval2 = TREE_OPERAND (arg, 1);
3470 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3471 ;
3472 else
3473 return 0;
3474
3475 return 1;
3476
3477 default:
3478 return 0;
3479 }
3480 }
3481 \f
3482 /* ARG is a tree that is known to contain just arithmetic operations and
3483 comparisons. Evaluate the operations in the tree substituting NEW0 for
3484 any occurrence of OLD0 as an operand of a comparison and likewise for
3485 NEW1 and OLD1. */
3486
3487 static tree
3488 eval_subst (location_t loc, tree arg, tree old0, tree new0,
3489 tree old1, tree new1)
3490 {
3491 tree type = TREE_TYPE (arg);
3492 enum tree_code code = TREE_CODE (arg);
3493 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3494
3495 /* We can handle some of the tcc_expression cases here. */
3496 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3497 tclass = tcc_unary;
3498 else if (tclass == tcc_expression
3499 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3500 tclass = tcc_binary;
3501
3502 switch (tclass)
3503 {
3504 case tcc_unary:
3505 return fold_build1_loc (loc, code, type,
3506 eval_subst (loc, TREE_OPERAND (arg, 0),
3507 old0, new0, old1, new1));
3508
3509 case tcc_binary:
3510 return fold_build2_loc (loc, code, type,
3511 eval_subst (loc, TREE_OPERAND (arg, 0),
3512 old0, new0, old1, new1),
3513 eval_subst (loc, TREE_OPERAND (arg, 1),
3514 old0, new0, old1, new1));
3515
3516 case tcc_expression:
3517 switch (code)
3518 {
3519 case SAVE_EXPR:
3520 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3521 old1, new1);
3522
3523 case COMPOUND_EXPR:
3524 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3525 old1, new1);
3526
3527 case COND_EXPR:
3528 return fold_build3_loc (loc, code, type,
3529 eval_subst (loc, TREE_OPERAND (arg, 0),
3530 old0, new0, old1, new1),
3531 eval_subst (loc, TREE_OPERAND (arg, 1),
3532 old0, new0, old1, new1),
3533 eval_subst (loc, TREE_OPERAND (arg, 2),
3534 old0, new0, old1, new1));
3535 default:
3536 break;
3537 }
3538 /* Fall through - ??? */
3539
3540 case tcc_comparison:
3541 {
3542 tree arg0 = TREE_OPERAND (arg, 0);
3543 tree arg1 = TREE_OPERAND (arg, 1);
3544
3545 /* We need to check both for exact equality and tree equality. The
3546 former will be true if the operand has a side-effect. In that
3547 case, we know the operand occurred exactly once. */
3548
3549 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3550 arg0 = new0;
3551 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3552 arg0 = new1;
3553
3554 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3555 arg1 = new0;
3556 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3557 arg1 = new1;
3558
3559 return fold_build2_loc (loc, code, type, arg0, arg1);
3560 }
3561
3562 default:
3563 return arg;
3564 }
3565 }
3566 \f
3567 /* Return a tree for the case when the result of an expression is RESULT
3568 converted to TYPE and OMITTED was previously an operand of the expression
3569 but is now not needed (e.g., we folded OMITTED * 0).
3570
3571 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3572 the conversion of RESULT to TYPE. */
3573
3574 tree
3575 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3576 {
3577 tree t = fold_convert_loc (loc, type, result);
3578
3579 /* If the resulting operand is an empty statement, just return the omitted
3580 statement casted to void. */
3581 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3582 return build1_loc (loc, NOP_EXPR, void_type_node,
3583 fold_ignored_result (omitted));
3584
3585 if (TREE_SIDE_EFFECTS (omitted))
3586 return build2_loc (loc, COMPOUND_EXPR, type,
3587 fold_ignored_result (omitted), t);
3588
3589 return non_lvalue_loc (loc, t);
3590 }
3591
3592 /* Return a tree for the case when the result of an expression is RESULT
3593 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3594 of the expression but are now not needed.
3595
3596 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3597 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3598 evaluated before OMITTED2. Otherwise, if neither has side effects,
3599 just do the conversion of RESULT to TYPE. */
3600
3601 tree
3602 omit_two_operands_loc (location_t loc, tree type, tree result,
3603 tree omitted1, tree omitted2)
3604 {
3605 tree t = fold_convert_loc (loc, type, result);
3606
3607 if (TREE_SIDE_EFFECTS (omitted2))
3608 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3609 if (TREE_SIDE_EFFECTS (omitted1))
3610 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3611
3612 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3613 }
3614
3615 \f
3616 /* Return a simplified tree node for the truth-negation of ARG. This
3617 never alters ARG itself. We assume that ARG is an operation that
3618 returns a truth value (0 or 1).
3619
3620 FIXME: one would think we would fold the result, but it causes
3621 problems with the dominator optimizer. */
3622
3623 static tree
3624 fold_truth_not_expr (location_t loc, tree arg)
3625 {
3626 tree type = TREE_TYPE (arg);
3627 enum tree_code code = TREE_CODE (arg);
3628 location_t loc1, loc2;
3629
3630 /* If this is a comparison, we can simply invert it, except for
3631 floating-point non-equality comparisons, in which case we just
3632 enclose a TRUTH_NOT_EXPR around what we have. */
3633
3634 if (TREE_CODE_CLASS (code) == tcc_comparison)
3635 {
3636 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3637 if (FLOAT_TYPE_P (op_type)
3638 && flag_trapping_math
3639 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3640 && code != NE_EXPR && code != EQ_EXPR)
3641 return NULL_TREE;
3642
3643 code = invert_tree_comparison (code, HONOR_NANS (op_type));
3644 if (code == ERROR_MARK)
3645 return NULL_TREE;
3646
3647 tree ret = build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3648 TREE_OPERAND (arg, 1));
3649 if (TREE_NO_WARNING (arg))
3650 TREE_NO_WARNING (ret) = 1;
3651 return ret;
3652 }
3653
3654 switch (code)
3655 {
3656 case INTEGER_CST:
3657 return constant_boolean_node (integer_zerop (arg), type);
3658
3659 case TRUTH_AND_EXPR:
3660 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3661 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3662 return build2_loc (loc, TRUTH_OR_EXPR, type,
3663 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3664 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3665
3666 case TRUTH_OR_EXPR:
3667 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3668 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3669 return build2_loc (loc, TRUTH_AND_EXPR, type,
3670 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3671 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3672
3673 case TRUTH_XOR_EXPR:
3674 /* Here we can invert either operand. We invert the first operand
3675 unless the second operand is a TRUTH_NOT_EXPR in which case our
3676 result is the XOR of the first operand with the inside of the
3677 negation of the second operand. */
3678
3679 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3680 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3681 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3682 else
3683 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3684 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3685 TREE_OPERAND (arg, 1));
3686
3687 case TRUTH_ANDIF_EXPR:
3688 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3689 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3690 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3691 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3692 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3693
3694 case TRUTH_ORIF_EXPR:
3695 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3696 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3697 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3698 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3699 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3700
3701 case TRUTH_NOT_EXPR:
3702 return TREE_OPERAND (arg, 0);
3703
3704 case COND_EXPR:
3705 {
3706 tree arg1 = TREE_OPERAND (arg, 1);
3707 tree arg2 = TREE_OPERAND (arg, 2);
3708
3709 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3710 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3711
3712 /* A COND_EXPR may have a throw as one operand, which
3713 then has void type. Just leave void operands
3714 as they are. */
3715 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3716 VOID_TYPE_P (TREE_TYPE (arg1))
3717 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3718 VOID_TYPE_P (TREE_TYPE (arg2))
3719 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3720 }
3721
3722 case COMPOUND_EXPR:
3723 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3724 return build2_loc (loc, COMPOUND_EXPR, type,
3725 TREE_OPERAND (arg, 0),
3726 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3727
3728 case NON_LVALUE_EXPR:
3729 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3730 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3731
3732 CASE_CONVERT:
3733 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3734 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3735
3736 /* fall through */
3737
3738 case FLOAT_EXPR:
3739 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3740 return build1_loc (loc, TREE_CODE (arg), type,
3741 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3742
3743 case BIT_AND_EXPR:
3744 if (!integer_onep (TREE_OPERAND (arg, 1)))
3745 return NULL_TREE;
3746 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3747
3748 case SAVE_EXPR:
3749 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3750
3751 case CLEANUP_POINT_EXPR:
3752 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3753 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3754 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3755
3756 default:
3757 return NULL_TREE;
3758 }
3759 }
3760
3761 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3762 assume that ARG is an operation that returns a truth value (0 or 1
3763 for scalars, 0 or -1 for vectors). Return the folded expression if
3764 folding is successful. Otherwise, return NULL_TREE. */
3765
3766 static tree
3767 fold_invert_truthvalue (location_t loc, tree arg)
3768 {
3769 tree type = TREE_TYPE (arg);
3770 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3771 ? BIT_NOT_EXPR
3772 : TRUTH_NOT_EXPR,
3773 type, arg);
3774 }
3775
3776 /* Return a simplified tree node for the truth-negation of ARG. This
3777 never alters ARG itself. We assume that ARG is an operation that
3778 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3779
3780 tree
3781 invert_truthvalue_loc (location_t loc, tree arg)
3782 {
3783 if (TREE_CODE (arg) == ERROR_MARK)
3784 return arg;
3785
3786 tree type = TREE_TYPE (arg);
3787 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3788 ? BIT_NOT_EXPR
3789 : TRUTH_NOT_EXPR,
3790 type, arg);
3791 }
3792 \f
3793 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3794 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero
3795 and uses reverse storage order if REVERSEP is nonzero. ORIG_INNER
3796 is the original memory reference used to preserve the alias set of
3797 the access. */
3798
3799 static tree
3800 make_bit_field_ref (location_t loc, tree inner, tree orig_inner, tree type,
3801 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
3802 int unsignedp, int reversep)
3803 {
3804 tree result, bftype;
3805
3806 /* Attempt not to lose the access path if possible. */
3807 if (TREE_CODE (orig_inner) == COMPONENT_REF)
3808 {
3809 tree ninner = TREE_OPERAND (orig_inner, 0);
3810 machine_mode nmode;
3811 HOST_WIDE_INT nbitsize, nbitpos;
3812 tree noffset;
3813 int nunsignedp, nreversep, nvolatilep = 0;
3814 tree base = get_inner_reference (ninner, &nbitsize, &nbitpos,
3815 &noffset, &nmode, &nunsignedp,
3816 &nreversep, &nvolatilep);
3817 if (base == inner
3818 && noffset == NULL_TREE
3819 && nbitsize >= bitsize
3820 && nbitpos <= bitpos
3821 && bitpos + bitsize <= nbitpos + nbitsize
3822 && !reversep
3823 && !nreversep
3824 && !nvolatilep)
3825 {
3826 inner = ninner;
3827 bitpos -= nbitpos;
3828 }
3829 }
3830
3831 alias_set_type iset = get_alias_set (orig_inner);
3832 if (iset == 0 && get_alias_set (inner) != iset)
3833 inner = fold_build2 (MEM_REF, TREE_TYPE (inner),
3834 build_fold_addr_expr (inner),
3835 build_int_cst (ptr_type_node, 0));
3836
3837 if (bitpos == 0 && !reversep)
3838 {
3839 tree size = TYPE_SIZE (TREE_TYPE (inner));
3840 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3841 || POINTER_TYPE_P (TREE_TYPE (inner)))
3842 && tree_fits_shwi_p (size)
3843 && tree_to_shwi (size) == bitsize)
3844 return fold_convert_loc (loc, type, inner);
3845 }
3846
3847 bftype = type;
3848 if (TYPE_PRECISION (bftype) != bitsize
3849 || TYPE_UNSIGNED (bftype) == !unsignedp)
3850 bftype = build_nonstandard_integer_type (bitsize, 0);
3851
3852 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3853 bitsize_int (bitsize), bitsize_int (bitpos));
3854 REF_REVERSE_STORAGE_ORDER (result) = reversep;
3855
3856 if (bftype != type)
3857 result = fold_convert_loc (loc, type, result);
3858
3859 return result;
3860 }
3861
3862 /* Optimize a bit-field compare.
3863
3864 There are two cases: First is a compare against a constant and the
3865 second is a comparison of two items where the fields are at the same
3866 bit position relative to the start of a chunk (byte, halfword, word)
3867 large enough to contain it. In these cases we can avoid the shift
3868 implicit in bitfield extractions.
3869
3870 For constants, we emit a compare of the shifted constant with the
3871 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3872 compared. For two fields at the same position, we do the ANDs with the
3873 similar mask and compare the result of the ANDs.
3874
3875 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3876 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3877 are the left and right operands of the comparison, respectively.
3878
3879 If the optimization described above can be done, we return the resulting
3880 tree. Otherwise we return zero. */
3881
3882 static tree
3883 optimize_bit_field_compare (location_t loc, enum tree_code code,
3884 tree compare_type, tree lhs, tree rhs)
3885 {
3886 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3887 tree type = TREE_TYPE (lhs);
3888 tree unsigned_type;
3889 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3890 machine_mode lmode, rmode;
3891 scalar_int_mode nmode;
3892 int lunsignedp, runsignedp;
3893 int lreversep, rreversep;
3894 int lvolatilep = 0, rvolatilep = 0;
3895 tree linner, rinner = NULL_TREE;
3896 tree mask;
3897 tree offset;
3898
3899 /* Get all the information about the extractions being done. If the bit size
3900 if the same as the size of the underlying object, we aren't doing an
3901 extraction at all and so can do nothing. We also don't want to
3902 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3903 then will no longer be able to replace it. */
3904 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3905 &lunsignedp, &lreversep, &lvolatilep);
3906 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3907 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3908 return 0;
3909
3910 if (const_p)
3911 rreversep = lreversep;
3912 else
3913 {
3914 /* If this is not a constant, we can only do something if bit positions,
3915 sizes, signedness and storage order are the same. */
3916 rinner
3917 = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3918 &runsignedp, &rreversep, &rvolatilep);
3919
3920 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3921 || lunsignedp != runsignedp || lreversep != rreversep || offset != 0
3922 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3923 return 0;
3924 }
3925
3926 /* Honor the C++ memory model and mimic what RTL expansion does. */
3927 unsigned HOST_WIDE_INT bitstart = 0;
3928 unsigned HOST_WIDE_INT bitend = 0;
3929 if (TREE_CODE (lhs) == COMPONENT_REF)
3930 {
3931 get_bit_range (&bitstart, &bitend, lhs, &lbitpos, &offset);
3932 if (offset != NULL_TREE)
3933 return 0;
3934 }
3935
3936 /* See if we can find a mode to refer to this field. We should be able to,
3937 but fail if we can't. */
3938 if (!get_best_mode (lbitsize, lbitpos, bitstart, bitend,
3939 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3940 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3941 TYPE_ALIGN (TREE_TYPE (rinner))),
3942 BITS_PER_WORD, false, &nmode))
3943 return 0;
3944
3945 /* Set signed and unsigned types of the precision of this mode for the
3946 shifts below. */
3947 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3948
3949 /* Compute the bit position and size for the new reference and our offset
3950 within it. If the new reference is the same size as the original, we
3951 won't optimize anything, so return zero. */
3952 nbitsize = GET_MODE_BITSIZE (nmode);
3953 nbitpos = lbitpos & ~ (nbitsize - 1);
3954 lbitpos -= nbitpos;
3955 if (nbitsize == lbitsize)
3956 return 0;
3957
3958 if (lreversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
3959 lbitpos = nbitsize - lbitsize - lbitpos;
3960
3961 /* Make the mask to be used against the extracted field. */
3962 mask = build_int_cst_type (unsigned_type, -1);
3963 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3964 mask = const_binop (RSHIFT_EXPR, mask,
3965 size_int (nbitsize - lbitsize - lbitpos));
3966
3967 if (! const_p)
3968 {
3969 if (nbitpos < 0)
3970 return 0;
3971
3972 /* If not comparing with constant, just rework the comparison
3973 and return. */
3974 tree t1 = make_bit_field_ref (loc, linner, lhs, unsigned_type,
3975 nbitsize, nbitpos, 1, lreversep);
3976 t1 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t1, mask);
3977 tree t2 = make_bit_field_ref (loc, rinner, rhs, unsigned_type,
3978 nbitsize, nbitpos, 1, rreversep);
3979 t2 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t2, mask);
3980 return fold_build2_loc (loc, code, compare_type, t1, t2);
3981 }
3982
3983 /* Otherwise, we are handling the constant case. See if the constant is too
3984 big for the field. Warn and return a tree for 0 (false) if so. We do
3985 this not only for its own sake, but to avoid having to test for this
3986 error case below. If we didn't, we might generate wrong code.
3987
3988 For unsigned fields, the constant shifted right by the field length should
3989 be all zero. For signed fields, the high-order bits should agree with
3990 the sign bit. */
3991
3992 if (lunsignedp)
3993 {
3994 if (wi::lrshift (wi::to_wide (rhs), lbitsize) != 0)
3995 {
3996 warning (0, "comparison is always %d due to width of bit-field",
3997 code == NE_EXPR);
3998 return constant_boolean_node (code == NE_EXPR, compare_type);
3999 }
4000 }
4001 else
4002 {
4003 wide_int tem = wi::arshift (wi::to_wide (rhs), lbitsize - 1);
4004 if (tem != 0 && tem != -1)
4005 {
4006 warning (0, "comparison is always %d due to width of bit-field",
4007 code == NE_EXPR);
4008 return constant_boolean_node (code == NE_EXPR, compare_type);
4009 }
4010 }
4011
4012 if (nbitpos < 0)
4013 return 0;
4014
4015 /* Single-bit compares should always be against zero. */
4016 if (lbitsize == 1 && ! integer_zerop (rhs))
4017 {
4018 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4019 rhs = build_int_cst (type, 0);
4020 }
4021
4022 /* Make a new bitfield reference, shift the constant over the
4023 appropriate number of bits and mask it with the computed mask
4024 (in case this was a signed field). If we changed it, make a new one. */
4025 lhs = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4026 nbitsize, nbitpos, 1, lreversep);
4027
4028 rhs = const_binop (BIT_AND_EXPR,
4029 const_binop (LSHIFT_EXPR,
4030 fold_convert_loc (loc, unsigned_type, rhs),
4031 size_int (lbitpos)),
4032 mask);
4033
4034 lhs = build2_loc (loc, code, compare_type,
4035 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
4036 return lhs;
4037 }
4038 \f
4039 /* Subroutine for fold_truth_andor_1: decode a field reference.
4040
4041 If EXP is a comparison reference, we return the innermost reference.
4042
4043 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4044 set to the starting bit number.
4045
4046 If the innermost field can be completely contained in a mode-sized
4047 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4048
4049 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4050 otherwise it is not changed.
4051
4052 *PUNSIGNEDP is set to the signedness of the field.
4053
4054 *PREVERSEP is set to the storage order of the field.
4055
4056 *PMASK is set to the mask used. This is either contained in a
4057 BIT_AND_EXPR or derived from the width of the field.
4058
4059 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4060
4061 Return 0 if this is not a component reference or is one that we can't
4062 do anything with. */
4063
4064 static tree
4065 decode_field_reference (location_t loc, tree *exp_, HOST_WIDE_INT *pbitsize,
4066 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
4067 int *punsignedp, int *preversep, int *pvolatilep,
4068 tree *pmask, tree *pand_mask)
4069 {
4070 tree exp = *exp_;
4071 tree outer_type = 0;
4072 tree and_mask = 0;
4073 tree mask, inner, offset;
4074 tree unsigned_type;
4075 unsigned int precision;
4076
4077 /* All the optimizations using this function assume integer fields.
4078 There are problems with FP fields since the type_for_size call
4079 below can fail for, e.g., XFmode. */
4080 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4081 return 0;
4082
4083 /* We are interested in the bare arrangement of bits, so strip everything
4084 that doesn't affect the machine mode. However, record the type of the
4085 outermost expression if it may matter below. */
4086 if (CONVERT_EXPR_P (exp)
4087 || TREE_CODE (exp) == NON_LVALUE_EXPR)
4088 outer_type = TREE_TYPE (exp);
4089 STRIP_NOPS (exp);
4090
4091 if (TREE_CODE (exp) == BIT_AND_EXPR)
4092 {
4093 and_mask = TREE_OPERAND (exp, 1);
4094 exp = TREE_OPERAND (exp, 0);
4095 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4096 if (TREE_CODE (and_mask) != INTEGER_CST)
4097 return 0;
4098 }
4099
4100 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
4101 punsignedp, preversep, pvolatilep);
4102 if ((inner == exp && and_mask == 0)
4103 || *pbitsize < 0 || offset != 0
4104 || TREE_CODE (inner) == PLACEHOLDER_EXPR
4105 /* Reject out-of-bound accesses (PR79731). */
4106 || (! AGGREGATE_TYPE_P (TREE_TYPE (inner))
4107 && compare_tree_int (TYPE_SIZE (TREE_TYPE (inner)),
4108 *pbitpos + *pbitsize) < 0))
4109 return 0;
4110
4111 *exp_ = exp;
4112
4113 /* If the number of bits in the reference is the same as the bitsize of
4114 the outer type, then the outer type gives the signedness. Otherwise
4115 (in case of a small bitfield) the signedness is unchanged. */
4116 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4117 *punsignedp = TYPE_UNSIGNED (outer_type);
4118
4119 /* Compute the mask to access the bitfield. */
4120 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4121 precision = TYPE_PRECISION (unsigned_type);
4122
4123 mask = build_int_cst_type (unsigned_type, -1);
4124
4125 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4126 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4127
4128 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4129 if (and_mask != 0)
4130 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4131 fold_convert_loc (loc, unsigned_type, and_mask), mask);
4132
4133 *pmask = mask;
4134 *pand_mask = and_mask;
4135 return inner;
4136 }
4137
4138 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4139 bit positions and MASK is SIGNED. */
4140
4141 static int
4142 all_ones_mask_p (const_tree mask, unsigned int size)
4143 {
4144 tree type = TREE_TYPE (mask);
4145 unsigned int precision = TYPE_PRECISION (type);
4146
4147 /* If this function returns true when the type of the mask is
4148 UNSIGNED, then there will be errors. In particular see
4149 gcc.c-torture/execute/990326-1.c. There does not appear to be
4150 any documentation paper trail as to why this is so. But the pre
4151 wide-int worked with that restriction and it has been preserved
4152 here. */
4153 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
4154 return false;
4155
4156 return wi::mask (size, false, precision) == wi::to_wide (mask);
4157 }
4158
4159 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4160 represents the sign bit of EXP's type. If EXP represents a sign
4161 or zero extension, also test VAL against the unextended type.
4162 The return value is the (sub)expression whose sign bit is VAL,
4163 or NULL_TREE otherwise. */
4164
4165 tree
4166 sign_bit_p (tree exp, const_tree val)
4167 {
4168 int width;
4169 tree t;
4170
4171 /* Tree EXP must have an integral type. */
4172 t = TREE_TYPE (exp);
4173 if (! INTEGRAL_TYPE_P (t))
4174 return NULL_TREE;
4175
4176 /* Tree VAL must be an integer constant. */
4177 if (TREE_CODE (val) != INTEGER_CST
4178 || TREE_OVERFLOW (val))
4179 return NULL_TREE;
4180
4181 width = TYPE_PRECISION (t);
4182 if (wi::only_sign_bit_p (wi::to_wide (val), width))
4183 return exp;
4184
4185 /* Handle extension from a narrower type. */
4186 if (TREE_CODE (exp) == NOP_EXPR
4187 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4188 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4189
4190 return NULL_TREE;
4191 }
4192
4193 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
4194 to be evaluated unconditionally. */
4195
4196 static int
4197 simple_operand_p (const_tree exp)
4198 {
4199 /* Strip any conversions that don't change the machine mode. */
4200 STRIP_NOPS (exp);
4201
4202 return (CONSTANT_CLASS_P (exp)
4203 || TREE_CODE (exp) == SSA_NAME
4204 || (DECL_P (exp)
4205 && ! TREE_ADDRESSABLE (exp)
4206 && ! TREE_THIS_VOLATILE (exp)
4207 && ! DECL_NONLOCAL (exp)
4208 /* Don't regard global variables as simple. They may be
4209 allocated in ways unknown to the compiler (shared memory,
4210 #pragma weak, etc). */
4211 && ! TREE_PUBLIC (exp)
4212 && ! DECL_EXTERNAL (exp)
4213 /* Weakrefs are not safe to be read, since they can be NULL.
4214 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4215 have DECL_WEAK flag set. */
4216 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4217 /* Loading a static variable is unduly expensive, but global
4218 registers aren't expensive. */
4219 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4220 }
4221
4222 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4223 to be evaluated unconditionally.
4224 I addition to simple_operand_p, we assume that comparisons, conversions,
4225 and logic-not operations are simple, if their operands are simple, too. */
4226
4227 static bool
4228 simple_operand_p_2 (tree exp)
4229 {
4230 enum tree_code code;
4231
4232 if (TREE_SIDE_EFFECTS (exp)
4233 || tree_could_trap_p (exp))
4234 return false;
4235
4236 while (CONVERT_EXPR_P (exp))
4237 exp = TREE_OPERAND (exp, 0);
4238
4239 code = TREE_CODE (exp);
4240
4241 if (TREE_CODE_CLASS (code) == tcc_comparison)
4242 return (simple_operand_p (TREE_OPERAND (exp, 0))
4243 && simple_operand_p (TREE_OPERAND (exp, 1)));
4244
4245 if (code == TRUTH_NOT_EXPR)
4246 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4247
4248 return simple_operand_p (exp);
4249 }
4250
4251 \f
4252 /* The following functions are subroutines to fold_range_test and allow it to
4253 try to change a logical combination of comparisons into a range test.
4254
4255 For example, both
4256 X == 2 || X == 3 || X == 4 || X == 5
4257 and
4258 X >= 2 && X <= 5
4259 are converted to
4260 (unsigned) (X - 2) <= 3
4261
4262 We describe each set of comparisons as being either inside or outside
4263 a range, using a variable named like IN_P, and then describe the
4264 range with a lower and upper bound. If one of the bounds is omitted,
4265 it represents either the highest or lowest value of the type.
4266
4267 In the comments below, we represent a range by two numbers in brackets
4268 preceded by a "+" to designate being inside that range, or a "-" to
4269 designate being outside that range, so the condition can be inverted by
4270 flipping the prefix. An omitted bound is represented by a "-". For
4271 example, "- [-, 10]" means being outside the range starting at the lowest
4272 possible value and ending at 10, in other words, being greater than 10.
4273 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4274 always false.
4275
4276 We set up things so that the missing bounds are handled in a consistent
4277 manner so neither a missing bound nor "true" and "false" need to be
4278 handled using a special case. */
4279
4280 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4281 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4282 and UPPER1_P are nonzero if the respective argument is an upper bound
4283 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4284 must be specified for a comparison. ARG1 will be converted to ARG0's
4285 type if both are specified. */
4286
4287 static tree
4288 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4289 tree arg1, int upper1_p)
4290 {
4291 tree tem;
4292 int result;
4293 int sgn0, sgn1;
4294
4295 /* If neither arg represents infinity, do the normal operation.
4296 Else, if not a comparison, return infinity. Else handle the special
4297 comparison rules. Note that most of the cases below won't occur, but
4298 are handled for consistency. */
4299
4300 if (arg0 != 0 && arg1 != 0)
4301 {
4302 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4303 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4304 STRIP_NOPS (tem);
4305 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4306 }
4307
4308 if (TREE_CODE_CLASS (code) != tcc_comparison)
4309 return 0;
4310
4311 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4312 for neither. In real maths, we cannot assume open ended ranges are
4313 the same. But, this is computer arithmetic, where numbers are finite.
4314 We can therefore make the transformation of any unbounded range with
4315 the value Z, Z being greater than any representable number. This permits
4316 us to treat unbounded ranges as equal. */
4317 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4318 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4319 switch (code)
4320 {
4321 case EQ_EXPR:
4322 result = sgn0 == sgn1;
4323 break;
4324 case NE_EXPR:
4325 result = sgn0 != sgn1;
4326 break;
4327 case LT_EXPR:
4328 result = sgn0 < sgn1;
4329 break;
4330 case LE_EXPR:
4331 result = sgn0 <= sgn1;
4332 break;
4333 case GT_EXPR:
4334 result = sgn0 > sgn1;
4335 break;
4336 case GE_EXPR:
4337 result = sgn0 >= sgn1;
4338 break;
4339 default:
4340 gcc_unreachable ();
4341 }
4342
4343 return constant_boolean_node (result, type);
4344 }
4345 \f
4346 /* Helper routine for make_range. Perform one step for it, return
4347 new expression if the loop should continue or NULL_TREE if it should
4348 stop. */
4349
4350 tree
4351 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4352 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4353 bool *strict_overflow_p)
4354 {
4355 tree arg0_type = TREE_TYPE (arg0);
4356 tree n_low, n_high, low = *p_low, high = *p_high;
4357 int in_p = *p_in_p, n_in_p;
4358
4359 switch (code)
4360 {
4361 case TRUTH_NOT_EXPR:
4362 /* We can only do something if the range is testing for zero. */
4363 if (low == NULL_TREE || high == NULL_TREE
4364 || ! integer_zerop (low) || ! integer_zerop (high))
4365 return NULL_TREE;
4366 *p_in_p = ! in_p;
4367 return arg0;
4368
4369 case EQ_EXPR: case NE_EXPR:
4370 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4371 /* We can only do something if the range is testing for zero
4372 and if the second operand is an integer constant. Note that
4373 saying something is "in" the range we make is done by
4374 complementing IN_P since it will set in the initial case of
4375 being not equal to zero; "out" is leaving it alone. */
4376 if (low == NULL_TREE || high == NULL_TREE
4377 || ! integer_zerop (low) || ! integer_zerop (high)
4378 || TREE_CODE (arg1) != INTEGER_CST)
4379 return NULL_TREE;
4380
4381 switch (code)
4382 {
4383 case NE_EXPR: /* - [c, c] */
4384 low = high = arg1;
4385 break;
4386 case EQ_EXPR: /* + [c, c] */
4387 in_p = ! in_p, low = high = arg1;
4388 break;
4389 case GT_EXPR: /* - [-, c] */
4390 low = 0, high = arg1;
4391 break;
4392 case GE_EXPR: /* + [c, -] */
4393 in_p = ! in_p, low = arg1, high = 0;
4394 break;
4395 case LT_EXPR: /* - [c, -] */
4396 low = arg1, high = 0;
4397 break;
4398 case LE_EXPR: /* + [-, c] */
4399 in_p = ! in_p, low = 0, high = arg1;
4400 break;
4401 default:
4402 gcc_unreachable ();
4403 }
4404
4405 /* If this is an unsigned comparison, we also know that EXP is
4406 greater than or equal to zero. We base the range tests we make
4407 on that fact, so we record it here so we can parse existing
4408 range tests. We test arg0_type since often the return type
4409 of, e.g. EQ_EXPR, is boolean. */
4410 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4411 {
4412 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4413 in_p, low, high, 1,
4414 build_int_cst (arg0_type, 0),
4415 NULL_TREE))
4416 return NULL_TREE;
4417
4418 in_p = n_in_p, low = n_low, high = n_high;
4419
4420 /* If the high bound is missing, but we have a nonzero low
4421 bound, reverse the range so it goes from zero to the low bound
4422 minus 1. */
4423 if (high == 0 && low && ! integer_zerop (low))
4424 {
4425 in_p = ! in_p;
4426 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4427 build_int_cst (TREE_TYPE (low), 1), 0);
4428 low = build_int_cst (arg0_type, 0);
4429 }
4430 }
4431
4432 *p_low = low;
4433 *p_high = high;
4434 *p_in_p = in_p;
4435 return arg0;
4436
4437 case NEGATE_EXPR:
4438 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4439 low and high are non-NULL, then normalize will DTRT. */
4440 if (!TYPE_UNSIGNED (arg0_type)
4441 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4442 {
4443 if (low == NULL_TREE)
4444 low = TYPE_MIN_VALUE (arg0_type);
4445 if (high == NULL_TREE)
4446 high = TYPE_MAX_VALUE (arg0_type);
4447 }
4448
4449 /* (-x) IN [a,b] -> x in [-b, -a] */
4450 n_low = range_binop (MINUS_EXPR, exp_type,
4451 build_int_cst (exp_type, 0),
4452 0, high, 1);
4453 n_high = range_binop (MINUS_EXPR, exp_type,
4454 build_int_cst (exp_type, 0),
4455 0, low, 0);
4456 if (n_high != 0 && TREE_OVERFLOW (n_high))
4457 return NULL_TREE;
4458 goto normalize;
4459
4460 case BIT_NOT_EXPR:
4461 /* ~ X -> -X - 1 */
4462 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4463 build_int_cst (exp_type, 1));
4464
4465 case PLUS_EXPR:
4466 case MINUS_EXPR:
4467 if (TREE_CODE (arg1) != INTEGER_CST)
4468 return NULL_TREE;
4469
4470 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4471 move a constant to the other side. */
4472 if (!TYPE_UNSIGNED (arg0_type)
4473 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4474 return NULL_TREE;
4475
4476 /* If EXP is signed, any overflow in the computation is undefined,
4477 so we don't worry about it so long as our computations on
4478 the bounds don't overflow. For unsigned, overflow is defined
4479 and this is exactly the right thing. */
4480 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4481 arg0_type, low, 0, arg1, 0);
4482 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4483 arg0_type, high, 1, arg1, 0);
4484 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4485 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4486 return NULL_TREE;
4487
4488 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4489 *strict_overflow_p = true;
4490
4491 normalize:
4492 /* Check for an unsigned range which has wrapped around the maximum
4493 value thus making n_high < n_low, and normalize it. */
4494 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4495 {
4496 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4497 build_int_cst (TREE_TYPE (n_high), 1), 0);
4498 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4499 build_int_cst (TREE_TYPE (n_low), 1), 0);
4500
4501 /* If the range is of the form +/- [ x+1, x ], we won't
4502 be able to normalize it. But then, it represents the
4503 whole range or the empty set, so make it
4504 +/- [ -, - ]. */
4505 if (tree_int_cst_equal (n_low, low)
4506 && tree_int_cst_equal (n_high, high))
4507 low = high = 0;
4508 else
4509 in_p = ! in_p;
4510 }
4511 else
4512 low = n_low, high = n_high;
4513
4514 *p_low = low;
4515 *p_high = high;
4516 *p_in_p = in_p;
4517 return arg0;
4518
4519 CASE_CONVERT:
4520 case NON_LVALUE_EXPR:
4521 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4522 return NULL_TREE;
4523
4524 if (! INTEGRAL_TYPE_P (arg0_type)
4525 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4526 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4527 return NULL_TREE;
4528
4529 n_low = low, n_high = high;
4530
4531 if (n_low != 0)
4532 n_low = fold_convert_loc (loc, arg0_type, n_low);
4533
4534 if (n_high != 0)
4535 n_high = fold_convert_loc (loc, arg0_type, n_high);
4536
4537 /* If we're converting arg0 from an unsigned type, to exp,
4538 a signed type, we will be doing the comparison as unsigned.
4539 The tests above have already verified that LOW and HIGH
4540 are both positive.
4541
4542 So we have to ensure that we will handle large unsigned
4543 values the same way that the current signed bounds treat
4544 negative values. */
4545
4546 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4547 {
4548 tree high_positive;
4549 tree equiv_type;
4550 /* For fixed-point modes, we need to pass the saturating flag
4551 as the 2nd parameter. */
4552 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4553 equiv_type
4554 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4555 TYPE_SATURATING (arg0_type));
4556 else
4557 equiv_type
4558 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4559
4560 /* A range without an upper bound is, naturally, unbounded.
4561 Since convert would have cropped a very large value, use
4562 the max value for the destination type. */
4563 high_positive
4564 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4565 : TYPE_MAX_VALUE (arg0_type);
4566
4567 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4568 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4569 fold_convert_loc (loc, arg0_type,
4570 high_positive),
4571 build_int_cst (arg0_type, 1));
4572
4573 /* If the low bound is specified, "and" the range with the
4574 range for which the original unsigned value will be
4575 positive. */
4576 if (low != 0)
4577 {
4578 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4579 1, fold_convert_loc (loc, arg0_type,
4580 integer_zero_node),
4581 high_positive))
4582 return NULL_TREE;
4583
4584 in_p = (n_in_p == in_p);
4585 }
4586 else
4587 {
4588 /* Otherwise, "or" the range with the range of the input
4589 that will be interpreted as negative. */
4590 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4591 1, fold_convert_loc (loc, arg0_type,
4592 integer_zero_node),
4593 high_positive))
4594 return NULL_TREE;
4595
4596 in_p = (in_p != n_in_p);
4597 }
4598 }
4599
4600 *p_low = n_low;
4601 *p_high = n_high;
4602 *p_in_p = in_p;
4603 return arg0;
4604
4605 default:
4606 return NULL_TREE;
4607 }
4608 }
4609
4610 /* Given EXP, a logical expression, set the range it is testing into
4611 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4612 actually being tested. *PLOW and *PHIGH will be made of the same
4613 type as the returned expression. If EXP is not a comparison, we
4614 will most likely not be returning a useful value and range. Set
4615 *STRICT_OVERFLOW_P to true if the return value is only valid
4616 because signed overflow is undefined; otherwise, do not change
4617 *STRICT_OVERFLOW_P. */
4618
4619 tree
4620 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4621 bool *strict_overflow_p)
4622 {
4623 enum tree_code code;
4624 tree arg0, arg1 = NULL_TREE;
4625 tree exp_type, nexp;
4626 int in_p;
4627 tree low, high;
4628 location_t loc = EXPR_LOCATION (exp);
4629
4630 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4631 and see if we can refine the range. Some of the cases below may not
4632 happen, but it doesn't seem worth worrying about this. We "continue"
4633 the outer loop when we've changed something; otherwise we "break"
4634 the switch, which will "break" the while. */
4635
4636 in_p = 0;
4637 low = high = build_int_cst (TREE_TYPE (exp), 0);
4638
4639 while (1)
4640 {
4641 code = TREE_CODE (exp);
4642 exp_type = TREE_TYPE (exp);
4643 arg0 = NULL_TREE;
4644
4645 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4646 {
4647 if (TREE_OPERAND_LENGTH (exp) > 0)
4648 arg0 = TREE_OPERAND (exp, 0);
4649 if (TREE_CODE_CLASS (code) == tcc_binary
4650 || TREE_CODE_CLASS (code) == tcc_comparison
4651 || (TREE_CODE_CLASS (code) == tcc_expression
4652 && TREE_OPERAND_LENGTH (exp) > 1))
4653 arg1 = TREE_OPERAND (exp, 1);
4654 }
4655 if (arg0 == NULL_TREE)
4656 break;
4657
4658 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4659 &high, &in_p, strict_overflow_p);
4660 if (nexp == NULL_TREE)
4661 break;
4662 exp = nexp;
4663 }
4664
4665 /* If EXP is a constant, we can evaluate whether this is true or false. */
4666 if (TREE_CODE (exp) == INTEGER_CST)
4667 {
4668 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4669 exp, 0, low, 0))
4670 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4671 exp, 1, high, 1)));
4672 low = high = 0;
4673 exp = 0;
4674 }
4675
4676 *pin_p = in_p, *plow = low, *phigh = high;
4677 return exp;
4678 }
4679
4680 /* Returns TRUE if [LOW, HIGH] range check can be optimized to
4681 a bitwise check i.e. when
4682 LOW == 0xXX...X00...0
4683 HIGH == 0xXX...X11...1
4684 Return corresponding mask in MASK and stem in VALUE. */
4685
4686 static bool
4687 maskable_range_p (const_tree low, const_tree high, tree type, tree *mask,
4688 tree *value)
4689 {
4690 if (TREE_CODE (low) != INTEGER_CST
4691 || TREE_CODE (high) != INTEGER_CST)
4692 return false;
4693
4694 unsigned prec = TYPE_PRECISION (type);
4695 wide_int lo = wi::to_wide (low, prec);
4696 wide_int hi = wi::to_wide (high, prec);
4697
4698 wide_int end_mask = lo ^ hi;
4699 if ((end_mask & (end_mask + 1)) != 0
4700 || (lo & end_mask) != 0)
4701 return false;
4702
4703 wide_int stem_mask = ~end_mask;
4704 wide_int stem = lo & stem_mask;
4705 if (stem != (hi & stem_mask))
4706 return false;
4707
4708 *mask = wide_int_to_tree (type, stem_mask);
4709 *value = wide_int_to_tree (type, stem);
4710
4711 return true;
4712 }
4713 \f
4714 /* Helper routine for build_range_check and match.pd. Return the type to
4715 perform the check or NULL if it shouldn't be optimized. */
4716
4717 tree
4718 range_check_type (tree etype)
4719 {
4720 /* First make sure that arithmetics in this type is valid, then make sure
4721 that it wraps around. */
4722 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4723 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4724 TYPE_UNSIGNED (etype));
4725
4726 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4727 {
4728 tree utype, minv, maxv;
4729
4730 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4731 for the type in question, as we rely on this here. */
4732 utype = unsigned_type_for (etype);
4733 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4734 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4735 build_int_cst (TREE_TYPE (maxv), 1), 1);
4736 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4737
4738 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4739 minv, 1, maxv, 1)))
4740 etype = utype;
4741 else
4742 return NULL_TREE;
4743 }
4744 return etype;
4745 }
4746
4747 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4748 type, TYPE, return an expression to test if EXP is in (or out of, depending
4749 on IN_P) the range. Return 0 if the test couldn't be created. */
4750
4751 tree
4752 build_range_check (location_t loc, tree type, tree exp, int in_p,
4753 tree low, tree high)
4754 {
4755 tree etype = TREE_TYPE (exp), mask, value;
4756
4757 /* Disable this optimization for function pointer expressions
4758 on targets that require function pointer canonicalization. */
4759 if (targetm.have_canonicalize_funcptr_for_compare ()
4760 && TREE_CODE (etype) == POINTER_TYPE
4761 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4762 return NULL_TREE;
4763
4764 if (! in_p)
4765 {
4766 value = build_range_check (loc, type, exp, 1, low, high);
4767 if (value != 0)
4768 return invert_truthvalue_loc (loc, value);
4769
4770 return 0;
4771 }
4772
4773 if (low == 0 && high == 0)
4774 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4775
4776 if (low == 0)
4777 return fold_build2_loc (loc, LE_EXPR, type, exp,
4778 fold_convert_loc (loc, etype, high));
4779
4780 if (high == 0)
4781 return fold_build2_loc (loc, GE_EXPR, type, exp,
4782 fold_convert_loc (loc, etype, low));
4783
4784 if (operand_equal_p (low, high, 0))
4785 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4786 fold_convert_loc (loc, etype, low));
4787
4788 if (TREE_CODE (exp) == BIT_AND_EXPR
4789 && maskable_range_p (low, high, etype, &mask, &value))
4790 return fold_build2_loc (loc, EQ_EXPR, type,
4791 fold_build2_loc (loc, BIT_AND_EXPR, etype,
4792 exp, mask),
4793 value);
4794
4795 if (integer_zerop (low))
4796 {
4797 if (! TYPE_UNSIGNED (etype))
4798 {
4799 etype = unsigned_type_for (etype);
4800 high = fold_convert_loc (loc, etype, high);
4801 exp = fold_convert_loc (loc, etype, exp);
4802 }
4803 return build_range_check (loc, type, exp, 1, 0, high);
4804 }
4805
4806 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4807 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4808 {
4809 int prec = TYPE_PRECISION (etype);
4810
4811 if (wi::mask <widest_int> (prec - 1, false) == wi::to_widest (high))
4812 {
4813 if (TYPE_UNSIGNED (etype))
4814 {
4815 tree signed_etype = signed_type_for (etype);
4816 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4817 etype
4818 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4819 else
4820 etype = signed_etype;
4821 exp = fold_convert_loc (loc, etype, exp);
4822 }
4823 return fold_build2_loc (loc, GT_EXPR, type, exp,
4824 build_int_cst (etype, 0));
4825 }
4826 }
4827
4828 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4829 This requires wrap-around arithmetics for the type of the expression. */
4830 etype = range_check_type (etype);
4831 if (etype == NULL_TREE)
4832 return NULL_TREE;
4833
4834 if (POINTER_TYPE_P (etype))
4835 etype = unsigned_type_for (etype);
4836
4837 high = fold_convert_loc (loc, etype, high);
4838 low = fold_convert_loc (loc, etype, low);
4839 exp = fold_convert_loc (loc, etype, exp);
4840
4841 value = const_binop (MINUS_EXPR, high, low);
4842
4843 if (value != 0 && !TREE_OVERFLOW (value))
4844 return build_range_check (loc, type,
4845 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4846 1, build_int_cst (etype, 0), value);
4847
4848 return 0;
4849 }
4850 \f
4851 /* Return the predecessor of VAL in its type, handling the infinite case. */
4852
4853 static tree
4854 range_predecessor (tree val)
4855 {
4856 tree type = TREE_TYPE (val);
4857
4858 if (INTEGRAL_TYPE_P (type)
4859 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4860 return 0;
4861 else
4862 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4863 build_int_cst (TREE_TYPE (val), 1), 0);
4864 }
4865
4866 /* Return the successor of VAL in its type, handling the infinite case. */
4867
4868 static tree
4869 range_successor (tree val)
4870 {
4871 tree type = TREE_TYPE (val);
4872
4873 if (INTEGRAL_TYPE_P (type)
4874 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4875 return 0;
4876 else
4877 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4878 build_int_cst (TREE_TYPE (val), 1), 0);
4879 }
4880
4881 /* Given two ranges, see if we can merge them into one. Return 1 if we
4882 can, 0 if we can't. Set the output range into the specified parameters. */
4883
4884 bool
4885 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4886 tree high0, int in1_p, tree low1, tree high1)
4887 {
4888 int no_overlap;
4889 int subset;
4890 int temp;
4891 tree tem;
4892 int in_p;
4893 tree low, high;
4894 int lowequal = ((low0 == 0 && low1 == 0)
4895 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4896 low0, 0, low1, 0)));
4897 int highequal = ((high0 == 0 && high1 == 0)
4898 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4899 high0, 1, high1, 1)));
4900
4901 /* Make range 0 be the range that starts first, or ends last if they
4902 start at the same value. Swap them if it isn't. */
4903 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4904 low0, 0, low1, 0))
4905 || (lowequal
4906 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4907 high1, 1, high0, 1))))
4908 {
4909 temp = in0_p, in0_p = in1_p, in1_p = temp;
4910 tem = low0, low0 = low1, low1 = tem;
4911 tem = high0, high0 = high1, high1 = tem;
4912 }
4913
4914 /* Now flag two cases, whether the ranges are disjoint or whether the
4915 second range is totally subsumed in the first. Note that the tests
4916 below are simplified by the ones above. */
4917 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4918 high0, 1, low1, 0));
4919 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4920 high1, 1, high0, 1));
4921
4922 /* We now have four cases, depending on whether we are including or
4923 excluding the two ranges. */
4924 if (in0_p && in1_p)
4925 {
4926 /* If they don't overlap, the result is false. If the second range
4927 is a subset it is the result. Otherwise, the range is from the start
4928 of the second to the end of the first. */
4929 if (no_overlap)
4930 in_p = 0, low = high = 0;
4931 else if (subset)
4932 in_p = 1, low = low1, high = high1;
4933 else
4934 in_p = 1, low = low1, high = high0;
4935 }
4936
4937 else if (in0_p && ! in1_p)
4938 {
4939 /* If they don't overlap, the result is the first range. If they are
4940 equal, the result is false. If the second range is a subset of the
4941 first, and the ranges begin at the same place, we go from just after
4942 the end of the second range to the end of the first. If the second
4943 range is not a subset of the first, or if it is a subset and both
4944 ranges end at the same place, the range starts at the start of the
4945 first range and ends just before the second range.
4946 Otherwise, we can't describe this as a single range. */
4947 if (no_overlap)
4948 in_p = 1, low = low0, high = high0;
4949 else if (lowequal && highequal)
4950 in_p = 0, low = high = 0;
4951 else if (subset && lowequal)
4952 {
4953 low = range_successor (high1);
4954 high = high0;
4955 in_p = 1;
4956 if (low == 0)
4957 {
4958 /* We are in the weird situation where high0 > high1 but
4959 high1 has no successor. Punt. */
4960 return 0;
4961 }
4962 }
4963 else if (! subset || highequal)
4964 {
4965 low = low0;
4966 high = range_predecessor (low1);
4967 in_p = 1;
4968 if (high == 0)
4969 {
4970 /* low0 < low1 but low1 has no predecessor. Punt. */
4971 return 0;
4972 }
4973 }
4974 else
4975 return 0;
4976 }
4977
4978 else if (! in0_p && in1_p)
4979 {
4980 /* If they don't overlap, the result is the second range. If the second
4981 is a subset of the first, the result is false. Otherwise,
4982 the range starts just after the first range and ends at the
4983 end of the second. */
4984 if (no_overlap)
4985 in_p = 1, low = low1, high = high1;
4986 else if (subset || highequal)
4987 in_p = 0, low = high = 0;
4988 else
4989 {
4990 low = range_successor (high0);
4991 high = high1;
4992 in_p = 1;
4993 if (low == 0)
4994 {
4995 /* high1 > high0 but high0 has no successor. Punt. */
4996 return 0;
4997 }
4998 }
4999 }
5000
5001 else
5002 {
5003 /* The case where we are excluding both ranges. Here the complex case
5004 is if they don't overlap. In that case, the only time we have a
5005 range is if they are adjacent. If the second is a subset of the
5006 first, the result is the first. Otherwise, the range to exclude
5007 starts at the beginning of the first range and ends at the end of the
5008 second. */
5009 if (no_overlap)
5010 {
5011 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
5012 range_successor (high0),
5013 1, low1, 0)))
5014 in_p = 0, low = low0, high = high1;
5015 else
5016 {
5017 /* Canonicalize - [min, x] into - [-, x]. */
5018 if (low0 && TREE_CODE (low0) == INTEGER_CST)
5019 switch (TREE_CODE (TREE_TYPE (low0)))
5020 {
5021 case ENUMERAL_TYPE:
5022 if (TYPE_PRECISION (TREE_TYPE (low0))
5023 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
5024 break;
5025 /* FALLTHROUGH */
5026 case INTEGER_TYPE:
5027 if (tree_int_cst_equal (low0,
5028 TYPE_MIN_VALUE (TREE_TYPE (low0))))
5029 low0 = 0;
5030 break;
5031 case POINTER_TYPE:
5032 if (TYPE_UNSIGNED (TREE_TYPE (low0))
5033 && integer_zerop (low0))
5034 low0 = 0;
5035 break;
5036 default:
5037 break;
5038 }
5039
5040 /* Canonicalize - [x, max] into - [x, -]. */
5041 if (high1 && TREE_CODE (high1) == INTEGER_CST)
5042 switch (TREE_CODE (TREE_TYPE (high1)))
5043 {
5044 case ENUMERAL_TYPE:
5045 if (TYPE_PRECISION (TREE_TYPE (high1))
5046 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
5047 break;
5048 /* FALLTHROUGH */
5049 case INTEGER_TYPE:
5050 if (tree_int_cst_equal (high1,
5051 TYPE_MAX_VALUE (TREE_TYPE (high1))))
5052 high1 = 0;
5053 break;
5054 case POINTER_TYPE:
5055 if (TYPE_UNSIGNED (TREE_TYPE (high1))
5056 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
5057 high1, 1,
5058 build_int_cst (TREE_TYPE (high1), 1),
5059 1)))
5060 high1 = 0;
5061 break;
5062 default:
5063 break;
5064 }
5065
5066 /* The ranges might be also adjacent between the maximum and
5067 minimum values of the given type. For
5068 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5069 return + [x + 1, y - 1]. */
5070 if (low0 == 0 && high1 == 0)
5071 {
5072 low = range_successor (high0);
5073 high = range_predecessor (low1);
5074 if (low == 0 || high == 0)
5075 return 0;
5076
5077 in_p = 1;
5078 }
5079 else
5080 return 0;
5081 }
5082 }
5083 else if (subset)
5084 in_p = 0, low = low0, high = high0;
5085 else
5086 in_p = 0, low = low0, high = high1;
5087 }
5088
5089 *pin_p = in_p, *plow = low, *phigh = high;
5090 return 1;
5091 }
5092 \f
5093
5094 /* Subroutine of fold, looking inside expressions of the form
5095 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
5096 of the COND_EXPR. This function is being used also to optimize
5097 A op B ? C : A, by reversing the comparison first.
5098
5099 Return a folded expression whose code is not a COND_EXPR
5100 anymore, or NULL_TREE if no folding opportunity is found. */
5101
5102 static tree
5103 fold_cond_expr_with_comparison (location_t loc, tree type,
5104 tree arg0, tree arg1, tree arg2)
5105 {
5106 enum tree_code comp_code = TREE_CODE (arg0);
5107 tree arg00 = TREE_OPERAND (arg0, 0);
5108 tree arg01 = TREE_OPERAND (arg0, 1);
5109 tree arg1_type = TREE_TYPE (arg1);
5110 tree tem;
5111
5112 STRIP_NOPS (arg1);
5113 STRIP_NOPS (arg2);
5114
5115 /* If we have A op 0 ? A : -A, consider applying the following
5116 transformations:
5117
5118 A == 0? A : -A same as -A
5119 A != 0? A : -A same as A
5120 A >= 0? A : -A same as abs (A)
5121 A > 0? A : -A same as abs (A)
5122 A <= 0? A : -A same as -abs (A)
5123 A < 0? A : -A same as -abs (A)
5124
5125 None of these transformations work for modes with signed
5126 zeros. If A is +/-0, the first two transformations will
5127 change the sign of the result (from +0 to -0, or vice
5128 versa). The last four will fix the sign of the result,
5129 even though the original expressions could be positive or
5130 negative, depending on the sign of A.
5131
5132 Note that all these transformations are correct if A is
5133 NaN, since the two alternatives (A and -A) are also NaNs. */
5134 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5135 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5136 ? real_zerop (arg01)
5137 : integer_zerop (arg01))
5138 && ((TREE_CODE (arg2) == NEGATE_EXPR
5139 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5140 /* In the case that A is of the form X-Y, '-A' (arg2) may
5141 have already been folded to Y-X, check for that. */
5142 || (TREE_CODE (arg1) == MINUS_EXPR
5143 && TREE_CODE (arg2) == MINUS_EXPR
5144 && operand_equal_p (TREE_OPERAND (arg1, 0),
5145 TREE_OPERAND (arg2, 1), 0)
5146 && operand_equal_p (TREE_OPERAND (arg1, 1),
5147 TREE_OPERAND (arg2, 0), 0))))
5148 switch (comp_code)
5149 {
5150 case EQ_EXPR:
5151 case UNEQ_EXPR:
5152 tem = fold_convert_loc (loc, arg1_type, arg1);
5153 return fold_convert_loc (loc, type, negate_expr (tem));
5154 case NE_EXPR:
5155 case LTGT_EXPR:
5156 return fold_convert_loc (loc, type, arg1);
5157 case UNGE_EXPR:
5158 case UNGT_EXPR:
5159 if (flag_trapping_math)
5160 break;
5161 /* Fall through. */
5162 case GE_EXPR:
5163 case GT_EXPR:
5164 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5165 break;
5166 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5167 return fold_convert_loc (loc, type, tem);
5168 case UNLE_EXPR:
5169 case UNLT_EXPR:
5170 if (flag_trapping_math)
5171 break;
5172 /* FALLTHRU */
5173 case LE_EXPR:
5174 case LT_EXPR:
5175 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5176 break;
5177 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5178 return negate_expr (fold_convert_loc (loc, type, tem));
5179 default:
5180 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5181 break;
5182 }
5183
5184 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5185 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5186 both transformations are correct when A is NaN: A != 0
5187 is then true, and A == 0 is false. */
5188
5189 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5190 && integer_zerop (arg01) && integer_zerop (arg2))
5191 {
5192 if (comp_code == NE_EXPR)
5193 return fold_convert_loc (loc, type, arg1);
5194 else if (comp_code == EQ_EXPR)
5195 return build_zero_cst (type);
5196 }
5197
5198 /* Try some transformations of A op B ? A : B.
5199
5200 A == B? A : B same as B
5201 A != B? A : B same as A
5202 A >= B? A : B same as max (A, B)
5203 A > B? A : B same as max (B, A)
5204 A <= B? A : B same as min (A, B)
5205 A < B? A : B same as min (B, A)
5206
5207 As above, these transformations don't work in the presence
5208 of signed zeros. For example, if A and B are zeros of
5209 opposite sign, the first two transformations will change
5210 the sign of the result. In the last four, the original
5211 expressions give different results for (A=+0, B=-0) and
5212 (A=-0, B=+0), but the transformed expressions do not.
5213
5214 The first two transformations are correct if either A or B
5215 is a NaN. In the first transformation, the condition will
5216 be false, and B will indeed be chosen. In the case of the
5217 second transformation, the condition A != B will be true,
5218 and A will be chosen.
5219
5220 The conversions to max() and min() are not correct if B is
5221 a number and A is not. The conditions in the original
5222 expressions will be false, so all four give B. The min()
5223 and max() versions would give a NaN instead. */
5224 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5225 && operand_equal_for_comparison_p (arg01, arg2)
5226 /* Avoid these transformations if the COND_EXPR may be used
5227 as an lvalue in the C++ front-end. PR c++/19199. */
5228 && (in_gimple_form
5229 || VECTOR_TYPE_P (type)
5230 || (! lang_GNU_CXX ()
5231 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5232 || ! maybe_lvalue_p (arg1)
5233 || ! maybe_lvalue_p (arg2)))
5234 {
5235 tree comp_op0 = arg00;
5236 tree comp_op1 = arg01;
5237 tree comp_type = TREE_TYPE (comp_op0);
5238
5239 switch (comp_code)
5240 {
5241 case EQ_EXPR:
5242 return fold_convert_loc (loc, type, arg2);
5243 case NE_EXPR:
5244 return fold_convert_loc (loc, type, arg1);
5245 case LE_EXPR:
5246 case LT_EXPR:
5247 case UNLE_EXPR:
5248 case UNLT_EXPR:
5249 /* In C++ a ?: expression can be an lvalue, so put the
5250 operand which will be used if they are equal first
5251 so that we can convert this back to the
5252 corresponding COND_EXPR. */
5253 if (!HONOR_NANS (arg1))
5254 {
5255 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5256 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5257 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5258 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5259 : fold_build2_loc (loc, MIN_EXPR, comp_type,
5260 comp_op1, comp_op0);
5261 return fold_convert_loc (loc, type, tem);
5262 }
5263 break;
5264 case GE_EXPR:
5265 case GT_EXPR:
5266 case UNGE_EXPR:
5267 case UNGT_EXPR:
5268 if (!HONOR_NANS (arg1))
5269 {
5270 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5271 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5272 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5273 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5274 : fold_build2_loc (loc, MAX_EXPR, comp_type,
5275 comp_op1, comp_op0);
5276 return fold_convert_loc (loc, type, tem);
5277 }
5278 break;
5279 case UNEQ_EXPR:
5280 if (!HONOR_NANS (arg1))
5281 return fold_convert_loc (loc, type, arg2);
5282 break;
5283 case LTGT_EXPR:
5284 if (!HONOR_NANS (arg1))
5285 return fold_convert_loc (loc, type, arg1);
5286 break;
5287 default:
5288 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5289 break;
5290 }
5291 }
5292
5293 return NULL_TREE;
5294 }
5295
5296
5297 \f
5298 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5299 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5300 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5301 false) >= 2)
5302 #endif
5303
5304 /* EXP is some logical combination of boolean tests. See if we can
5305 merge it into some range test. Return the new tree if so. */
5306
5307 static tree
5308 fold_range_test (location_t loc, enum tree_code code, tree type,
5309 tree op0, tree op1)
5310 {
5311 int or_op = (code == TRUTH_ORIF_EXPR
5312 || code == TRUTH_OR_EXPR);
5313 int in0_p, in1_p, in_p;
5314 tree low0, low1, low, high0, high1, high;
5315 bool strict_overflow_p = false;
5316 tree tem, lhs, rhs;
5317 const char * const warnmsg = G_("assuming signed overflow does not occur "
5318 "when simplifying range test");
5319
5320 if (!INTEGRAL_TYPE_P (type))
5321 return 0;
5322
5323 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5324 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5325
5326 /* If this is an OR operation, invert both sides; we will invert
5327 again at the end. */
5328 if (or_op)
5329 in0_p = ! in0_p, in1_p = ! in1_p;
5330
5331 /* If both expressions are the same, if we can merge the ranges, and we
5332 can build the range test, return it or it inverted. If one of the
5333 ranges is always true or always false, consider it to be the same
5334 expression as the other. */
5335 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5336 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5337 in1_p, low1, high1)
5338 && 0 != (tem = (build_range_check (loc, type,
5339 lhs != 0 ? lhs
5340 : rhs != 0 ? rhs : integer_zero_node,
5341 in_p, low, high))))
5342 {
5343 if (strict_overflow_p)
5344 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5345 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5346 }
5347
5348 /* On machines where the branch cost is expensive, if this is a
5349 short-circuited branch and the underlying object on both sides
5350 is the same, make a non-short-circuit operation. */
5351 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5352 && !flag_sanitize_coverage
5353 && lhs != 0 && rhs != 0
5354 && (code == TRUTH_ANDIF_EXPR
5355 || code == TRUTH_ORIF_EXPR)
5356 && operand_equal_p (lhs, rhs, 0))
5357 {
5358 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5359 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5360 which cases we can't do this. */
5361 if (simple_operand_p (lhs))
5362 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5363 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5364 type, op0, op1);
5365
5366 else if (!lang_hooks.decls.global_bindings_p ()
5367 && !CONTAINS_PLACEHOLDER_P (lhs))
5368 {
5369 tree common = save_expr (lhs);
5370
5371 if (0 != (lhs = build_range_check (loc, type, common,
5372 or_op ? ! in0_p : in0_p,
5373 low0, high0))
5374 && (0 != (rhs = build_range_check (loc, type, common,
5375 or_op ? ! in1_p : in1_p,
5376 low1, high1))))
5377 {
5378 if (strict_overflow_p)
5379 fold_overflow_warning (warnmsg,
5380 WARN_STRICT_OVERFLOW_COMPARISON);
5381 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5382 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5383 type, lhs, rhs);
5384 }
5385 }
5386 }
5387
5388 return 0;
5389 }
5390 \f
5391 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5392 bit value. Arrange things so the extra bits will be set to zero if and
5393 only if C is signed-extended to its full width. If MASK is nonzero,
5394 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5395
5396 static tree
5397 unextend (tree c, int p, int unsignedp, tree mask)
5398 {
5399 tree type = TREE_TYPE (c);
5400 int modesize = GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (type));
5401 tree temp;
5402
5403 if (p == modesize || unsignedp)
5404 return c;
5405
5406 /* We work by getting just the sign bit into the low-order bit, then
5407 into the high-order bit, then sign-extend. We then XOR that value
5408 with C. */
5409 temp = build_int_cst (TREE_TYPE (c),
5410 wi::extract_uhwi (wi::to_wide (c), p - 1, 1));
5411
5412 /* We must use a signed type in order to get an arithmetic right shift.
5413 However, we must also avoid introducing accidental overflows, so that
5414 a subsequent call to integer_zerop will work. Hence we must
5415 do the type conversion here. At this point, the constant is either
5416 zero or one, and the conversion to a signed type can never overflow.
5417 We could get an overflow if this conversion is done anywhere else. */
5418 if (TYPE_UNSIGNED (type))
5419 temp = fold_convert (signed_type_for (type), temp);
5420
5421 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5422 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5423 if (mask != 0)
5424 temp = const_binop (BIT_AND_EXPR, temp,
5425 fold_convert (TREE_TYPE (c), mask));
5426 /* If necessary, convert the type back to match the type of C. */
5427 if (TYPE_UNSIGNED (type))
5428 temp = fold_convert (type, temp);
5429
5430 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5431 }
5432 \f
5433 /* For an expression that has the form
5434 (A && B) || ~B
5435 or
5436 (A || B) && ~B,
5437 we can drop one of the inner expressions and simplify to
5438 A || ~B
5439 or
5440 A && ~B
5441 LOC is the location of the resulting expression. OP is the inner
5442 logical operation; the left-hand side in the examples above, while CMPOP
5443 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5444 removing a condition that guards another, as in
5445 (A != NULL && A->...) || A == NULL
5446 which we must not transform. If RHS_ONLY is true, only eliminate the
5447 right-most operand of the inner logical operation. */
5448
5449 static tree
5450 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5451 bool rhs_only)
5452 {
5453 tree type = TREE_TYPE (cmpop);
5454 enum tree_code code = TREE_CODE (cmpop);
5455 enum tree_code truthop_code = TREE_CODE (op);
5456 tree lhs = TREE_OPERAND (op, 0);
5457 tree rhs = TREE_OPERAND (op, 1);
5458 tree orig_lhs = lhs, orig_rhs = rhs;
5459 enum tree_code rhs_code = TREE_CODE (rhs);
5460 enum tree_code lhs_code = TREE_CODE (lhs);
5461 enum tree_code inv_code;
5462
5463 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5464 return NULL_TREE;
5465
5466 if (TREE_CODE_CLASS (code) != tcc_comparison)
5467 return NULL_TREE;
5468
5469 if (rhs_code == truthop_code)
5470 {
5471 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5472 if (newrhs != NULL_TREE)
5473 {
5474 rhs = newrhs;
5475 rhs_code = TREE_CODE (rhs);
5476 }
5477 }
5478 if (lhs_code == truthop_code && !rhs_only)
5479 {
5480 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5481 if (newlhs != NULL_TREE)
5482 {
5483 lhs = newlhs;
5484 lhs_code = TREE_CODE (lhs);
5485 }
5486 }
5487
5488 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
5489 if (inv_code == rhs_code
5490 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5491 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5492 return lhs;
5493 if (!rhs_only && inv_code == lhs_code
5494 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5495 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5496 return rhs;
5497 if (rhs != orig_rhs || lhs != orig_lhs)
5498 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5499 lhs, rhs);
5500 return NULL_TREE;
5501 }
5502
5503 /* Find ways of folding logical expressions of LHS and RHS:
5504 Try to merge two comparisons to the same innermost item.
5505 Look for range tests like "ch >= '0' && ch <= '9'".
5506 Look for combinations of simple terms on machines with expensive branches
5507 and evaluate the RHS unconditionally.
5508
5509 For example, if we have p->a == 2 && p->b == 4 and we can make an
5510 object large enough to span both A and B, we can do this with a comparison
5511 against the object ANDed with the a mask.
5512
5513 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5514 operations to do this with one comparison.
5515
5516 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5517 function and the one above.
5518
5519 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5520 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5521
5522 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5523 two operands.
5524
5525 We return the simplified tree or 0 if no optimization is possible. */
5526
5527 static tree
5528 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5529 tree lhs, tree rhs)
5530 {
5531 /* If this is the "or" of two comparisons, we can do something if
5532 the comparisons are NE_EXPR. If this is the "and", we can do something
5533 if the comparisons are EQ_EXPR. I.e.,
5534 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5535
5536 WANTED_CODE is this operation code. For single bit fields, we can
5537 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5538 comparison for one-bit fields. */
5539
5540 enum tree_code wanted_code;
5541 enum tree_code lcode, rcode;
5542 tree ll_arg, lr_arg, rl_arg, rr_arg;
5543 tree ll_inner, lr_inner, rl_inner, rr_inner;
5544 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5545 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5546 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5547 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5548 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5549 int ll_reversep, lr_reversep, rl_reversep, rr_reversep;
5550 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5551 scalar_int_mode lnmode, rnmode;
5552 tree ll_mask, lr_mask, rl_mask, rr_mask;
5553 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5554 tree l_const, r_const;
5555 tree lntype, rntype, result;
5556 HOST_WIDE_INT first_bit, end_bit;
5557 int volatilep;
5558
5559 /* Start by getting the comparison codes. Fail if anything is volatile.
5560 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5561 it were surrounded with a NE_EXPR. */
5562
5563 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5564 return 0;
5565
5566 lcode = TREE_CODE (lhs);
5567 rcode = TREE_CODE (rhs);
5568
5569 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5570 {
5571 lhs = build2 (NE_EXPR, truth_type, lhs,
5572 build_int_cst (TREE_TYPE (lhs), 0));
5573 lcode = NE_EXPR;
5574 }
5575
5576 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5577 {
5578 rhs = build2 (NE_EXPR, truth_type, rhs,
5579 build_int_cst (TREE_TYPE (rhs), 0));
5580 rcode = NE_EXPR;
5581 }
5582
5583 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5584 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5585 return 0;
5586
5587 ll_arg = TREE_OPERAND (lhs, 0);
5588 lr_arg = TREE_OPERAND (lhs, 1);
5589 rl_arg = TREE_OPERAND (rhs, 0);
5590 rr_arg = TREE_OPERAND (rhs, 1);
5591
5592 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5593 if (simple_operand_p (ll_arg)
5594 && simple_operand_p (lr_arg))
5595 {
5596 if (operand_equal_p (ll_arg, rl_arg, 0)
5597 && operand_equal_p (lr_arg, rr_arg, 0))
5598 {
5599 result = combine_comparisons (loc, code, lcode, rcode,
5600 truth_type, ll_arg, lr_arg);
5601 if (result)
5602 return result;
5603 }
5604 else if (operand_equal_p (ll_arg, rr_arg, 0)
5605 && operand_equal_p (lr_arg, rl_arg, 0))
5606 {
5607 result = combine_comparisons (loc, code, lcode,
5608 swap_tree_comparison (rcode),
5609 truth_type, ll_arg, lr_arg);
5610 if (result)
5611 return result;
5612 }
5613 }
5614
5615 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5616 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5617
5618 /* If the RHS can be evaluated unconditionally and its operands are
5619 simple, it wins to evaluate the RHS unconditionally on machines
5620 with expensive branches. In this case, this isn't a comparison
5621 that can be merged. */
5622
5623 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5624 false) >= 2
5625 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5626 && simple_operand_p (rl_arg)
5627 && simple_operand_p (rr_arg))
5628 {
5629 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5630 if (code == TRUTH_OR_EXPR
5631 && lcode == NE_EXPR && integer_zerop (lr_arg)
5632 && rcode == NE_EXPR && integer_zerop (rr_arg)
5633 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5634 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5635 return build2_loc (loc, NE_EXPR, truth_type,
5636 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5637 ll_arg, rl_arg),
5638 build_int_cst (TREE_TYPE (ll_arg), 0));
5639
5640 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5641 if (code == TRUTH_AND_EXPR
5642 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5643 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5644 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5645 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5646 return build2_loc (loc, EQ_EXPR, truth_type,
5647 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5648 ll_arg, rl_arg),
5649 build_int_cst (TREE_TYPE (ll_arg), 0));
5650 }
5651
5652 /* See if the comparisons can be merged. Then get all the parameters for
5653 each side. */
5654
5655 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5656 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5657 return 0;
5658
5659 ll_reversep = lr_reversep = rl_reversep = rr_reversep = 0;
5660 volatilep = 0;
5661 ll_inner = decode_field_reference (loc, &ll_arg,
5662 &ll_bitsize, &ll_bitpos, &ll_mode,
5663 &ll_unsignedp, &ll_reversep, &volatilep,
5664 &ll_mask, &ll_and_mask);
5665 lr_inner = decode_field_reference (loc, &lr_arg,
5666 &lr_bitsize, &lr_bitpos, &lr_mode,
5667 &lr_unsignedp, &lr_reversep, &volatilep,
5668 &lr_mask, &lr_and_mask);
5669 rl_inner = decode_field_reference (loc, &rl_arg,
5670 &rl_bitsize, &rl_bitpos, &rl_mode,
5671 &rl_unsignedp, &rl_reversep, &volatilep,
5672 &rl_mask, &rl_and_mask);
5673 rr_inner = decode_field_reference (loc, &rr_arg,
5674 &rr_bitsize, &rr_bitpos, &rr_mode,
5675 &rr_unsignedp, &rr_reversep, &volatilep,
5676 &rr_mask, &rr_and_mask);
5677
5678 /* It must be true that the inner operation on the lhs of each
5679 comparison must be the same if we are to be able to do anything.
5680 Then see if we have constants. If not, the same must be true for
5681 the rhs's. */
5682 if (volatilep
5683 || ll_reversep != rl_reversep
5684 || ll_inner == 0 || rl_inner == 0
5685 || ! operand_equal_p (ll_inner, rl_inner, 0))
5686 return 0;
5687
5688 if (TREE_CODE (lr_arg) == INTEGER_CST
5689 && TREE_CODE (rr_arg) == INTEGER_CST)
5690 {
5691 l_const = lr_arg, r_const = rr_arg;
5692 lr_reversep = ll_reversep;
5693 }
5694 else if (lr_reversep != rr_reversep
5695 || lr_inner == 0 || rr_inner == 0
5696 || ! operand_equal_p (lr_inner, rr_inner, 0))
5697 return 0;
5698 else
5699 l_const = r_const = 0;
5700
5701 /* If either comparison code is not correct for our logical operation,
5702 fail. However, we can convert a one-bit comparison against zero into
5703 the opposite comparison against that bit being set in the field. */
5704
5705 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5706 if (lcode != wanted_code)
5707 {
5708 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5709 {
5710 /* Make the left operand unsigned, since we are only interested
5711 in the value of one bit. Otherwise we are doing the wrong
5712 thing below. */
5713 ll_unsignedp = 1;
5714 l_const = ll_mask;
5715 }
5716 else
5717 return 0;
5718 }
5719
5720 /* This is analogous to the code for l_const above. */
5721 if (rcode != wanted_code)
5722 {
5723 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5724 {
5725 rl_unsignedp = 1;
5726 r_const = rl_mask;
5727 }
5728 else
5729 return 0;
5730 }
5731
5732 /* See if we can find a mode that contains both fields being compared on
5733 the left. If we can't, fail. Otherwise, update all constants and masks
5734 to be relative to a field of that size. */
5735 first_bit = MIN (ll_bitpos, rl_bitpos);
5736 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5737 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5738 TYPE_ALIGN (TREE_TYPE (ll_inner)), BITS_PER_WORD,
5739 volatilep, &lnmode))
5740 return 0;
5741
5742 lnbitsize = GET_MODE_BITSIZE (lnmode);
5743 lnbitpos = first_bit & ~ (lnbitsize - 1);
5744 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5745 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5746
5747 if (ll_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5748 {
5749 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5750 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5751 }
5752
5753 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5754 size_int (xll_bitpos));
5755 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5756 size_int (xrl_bitpos));
5757
5758 if (l_const)
5759 {
5760 l_const = fold_convert_loc (loc, lntype, l_const);
5761 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5762 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5763 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5764 fold_build1_loc (loc, BIT_NOT_EXPR,
5765 lntype, ll_mask))))
5766 {
5767 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5768
5769 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5770 }
5771 }
5772 if (r_const)
5773 {
5774 r_const = fold_convert_loc (loc, lntype, r_const);
5775 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5776 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5777 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5778 fold_build1_loc (loc, BIT_NOT_EXPR,
5779 lntype, rl_mask))))
5780 {
5781 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5782
5783 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5784 }
5785 }
5786
5787 /* If the right sides are not constant, do the same for it. Also,
5788 disallow this optimization if a size or signedness mismatch occurs
5789 between the left and right sides. */
5790 if (l_const == 0)
5791 {
5792 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5793 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5794 /* Make sure the two fields on the right
5795 correspond to the left without being swapped. */
5796 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5797 return 0;
5798
5799 first_bit = MIN (lr_bitpos, rr_bitpos);
5800 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5801 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5802 TYPE_ALIGN (TREE_TYPE (lr_inner)), BITS_PER_WORD,
5803 volatilep, &rnmode))
5804 return 0;
5805
5806 rnbitsize = GET_MODE_BITSIZE (rnmode);
5807 rnbitpos = first_bit & ~ (rnbitsize - 1);
5808 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5809 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5810
5811 if (lr_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5812 {
5813 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5814 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5815 }
5816
5817 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5818 rntype, lr_mask),
5819 size_int (xlr_bitpos));
5820 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5821 rntype, rr_mask),
5822 size_int (xrr_bitpos));
5823
5824 /* Make a mask that corresponds to both fields being compared.
5825 Do this for both items being compared. If the operands are the
5826 same size and the bits being compared are in the same position
5827 then we can do this by masking both and comparing the masked
5828 results. */
5829 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5830 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5831 if (lnbitsize == rnbitsize
5832 && xll_bitpos == xlr_bitpos
5833 && lnbitpos >= 0
5834 && rnbitpos >= 0)
5835 {
5836 lhs = make_bit_field_ref (loc, ll_inner, ll_arg,
5837 lntype, lnbitsize, lnbitpos,
5838 ll_unsignedp || rl_unsignedp, ll_reversep);
5839 if (! all_ones_mask_p (ll_mask, lnbitsize))
5840 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5841
5842 rhs = make_bit_field_ref (loc, lr_inner, lr_arg,
5843 rntype, rnbitsize, rnbitpos,
5844 lr_unsignedp || rr_unsignedp, lr_reversep);
5845 if (! all_ones_mask_p (lr_mask, rnbitsize))
5846 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5847
5848 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5849 }
5850
5851 /* There is still another way we can do something: If both pairs of
5852 fields being compared are adjacent, we may be able to make a wider
5853 field containing them both.
5854
5855 Note that we still must mask the lhs/rhs expressions. Furthermore,
5856 the mask must be shifted to account for the shift done by
5857 make_bit_field_ref. */
5858 if (((ll_bitsize + ll_bitpos == rl_bitpos
5859 && lr_bitsize + lr_bitpos == rr_bitpos)
5860 || (ll_bitpos == rl_bitpos + rl_bitsize
5861 && lr_bitpos == rr_bitpos + rr_bitsize))
5862 && ll_bitpos >= 0
5863 && rl_bitpos >= 0
5864 && lr_bitpos >= 0
5865 && rr_bitpos >= 0)
5866 {
5867 tree type;
5868
5869 lhs = make_bit_field_ref (loc, ll_inner, ll_arg, lntype,
5870 ll_bitsize + rl_bitsize,
5871 MIN (ll_bitpos, rl_bitpos),
5872 ll_unsignedp, ll_reversep);
5873 rhs = make_bit_field_ref (loc, lr_inner, lr_arg, rntype,
5874 lr_bitsize + rr_bitsize,
5875 MIN (lr_bitpos, rr_bitpos),
5876 lr_unsignedp, lr_reversep);
5877
5878 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5879 size_int (MIN (xll_bitpos, xrl_bitpos)));
5880 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5881 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5882
5883 /* Convert to the smaller type before masking out unwanted bits. */
5884 type = lntype;
5885 if (lntype != rntype)
5886 {
5887 if (lnbitsize > rnbitsize)
5888 {
5889 lhs = fold_convert_loc (loc, rntype, lhs);
5890 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5891 type = rntype;
5892 }
5893 else if (lnbitsize < rnbitsize)
5894 {
5895 rhs = fold_convert_loc (loc, lntype, rhs);
5896 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5897 type = lntype;
5898 }
5899 }
5900
5901 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5902 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5903
5904 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5905 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5906
5907 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5908 }
5909
5910 return 0;
5911 }
5912
5913 /* Handle the case of comparisons with constants. If there is something in
5914 common between the masks, those bits of the constants must be the same.
5915 If not, the condition is always false. Test for this to avoid generating
5916 incorrect code below. */
5917 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5918 if (! integer_zerop (result)
5919 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5920 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5921 {
5922 if (wanted_code == NE_EXPR)
5923 {
5924 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5925 return constant_boolean_node (true, truth_type);
5926 }
5927 else
5928 {
5929 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5930 return constant_boolean_node (false, truth_type);
5931 }
5932 }
5933
5934 if (lnbitpos < 0)
5935 return 0;
5936
5937 /* Construct the expression we will return. First get the component
5938 reference we will make. Unless the mask is all ones the width of
5939 that field, perform the mask operation. Then compare with the
5940 merged constant. */
5941 result = make_bit_field_ref (loc, ll_inner, ll_arg,
5942 lntype, lnbitsize, lnbitpos,
5943 ll_unsignedp || rl_unsignedp, ll_reversep);
5944
5945 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5946 if (! all_ones_mask_p (ll_mask, lnbitsize))
5947 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5948
5949 return build2_loc (loc, wanted_code, truth_type, result,
5950 const_binop (BIT_IOR_EXPR, l_const, r_const));
5951 }
5952 \f
5953 /* T is an integer expression that is being multiplied, divided, or taken a
5954 modulus (CODE says which and what kind of divide or modulus) by a
5955 constant C. See if we can eliminate that operation by folding it with
5956 other operations already in T. WIDE_TYPE, if non-null, is a type that
5957 should be used for the computation if wider than our type.
5958
5959 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5960 (X * 2) + (Y * 4). We must, however, be assured that either the original
5961 expression would not overflow or that overflow is undefined for the type
5962 in the language in question.
5963
5964 If we return a non-null expression, it is an equivalent form of the
5965 original computation, but need not be in the original type.
5966
5967 We set *STRICT_OVERFLOW_P to true if the return values depends on
5968 signed overflow being undefined. Otherwise we do not change
5969 *STRICT_OVERFLOW_P. */
5970
5971 static tree
5972 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5973 bool *strict_overflow_p)
5974 {
5975 /* To avoid exponential search depth, refuse to allow recursion past
5976 three levels. Beyond that (1) it's highly unlikely that we'll find
5977 something interesting and (2) we've probably processed it before
5978 when we built the inner expression. */
5979
5980 static int depth;
5981 tree ret;
5982
5983 if (depth > 3)
5984 return NULL;
5985
5986 depth++;
5987 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5988 depth--;
5989
5990 return ret;
5991 }
5992
5993 static tree
5994 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5995 bool *strict_overflow_p)
5996 {
5997 tree type = TREE_TYPE (t);
5998 enum tree_code tcode = TREE_CODE (t);
5999 tree ctype = (wide_type != 0
6000 && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (wide_type))
6001 > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type)))
6002 ? wide_type : type);
6003 tree t1, t2;
6004 int same_p = tcode == code;
6005 tree op0 = NULL_TREE, op1 = NULL_TREE;
6006 bool sub_strict_overflow_p;
6007
6008 /* Don't deal with constants of zero here; they confuse the code below. */
6009 if (integer_zerop (c))
6010 return NULL_TREE;
6011
6012 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6013 op0 = TREE_OPERAND (t, 0);
6014
6015 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6016 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6017
6018 /* Note that we need not handle conditional operations here since fold
6019 already handles those cases. So just do arithmetic here. */
6020 switch (tcode)
6021 {
6022 case INTEGER_CST:
6023 /* For a constant, we can always simplify if we are a multiply
6024 or (for divide and modulus) if it is a multiple of our constant. */
6025 if (code == MULT_EXPR
6026 || wi::multiple_of_p (wi::to_wide (t), wi::to_wide (c),
6027 TYPE_SIGN (type)))
6028 {
6029 tree tem = const_binop (code, fold_convert (ctype, t),
6030 fold_convert (ctype, c));
6031 /* If the multiplication overflowed, we lost information on it.
6032 See PR68142 and PR69845. */
6033 if (TREE_OVERFLOW (tem))
6034 return NULL_TREE;
6035 return tem;
6036 }
6037 break;
6038
6039 CASE_CONVERT: case NON_LVALUE_EXPR:
6040 /* If op0 is an expression ... */
6041 if ((COMPARISON_CLASS_P (op0)
6042 || UNARY_CLASS_P (op0)
6043 || BINARY_CLASS_P (op0)
6044 || VL_EXP_CLASS_P (op0)
6045 || EXPRESSION_CLASS_P (op0))
6046 /* ... and has wrapping overflow, and its type is smaller
6047 than ctype, then we cannot pass through as widening. */
6048 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6049 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0)))
6050 && (TYPE_PRECISION (ctype)
6051 > TYPE_PRECISION (TREE_TYPE (op0))))
6052 /* ... or this is a truncation (t is narrower than op0),
6053 then we cannot pass through this narrowing. */
6054 || (TYPE_PRECISION (type)
6055 < TYPE_PRECISION (TREE_TYPE (op0)))
6056 /* ... or signedness changes for division or modulus,
6057 then we cannot pass through this conversion. */
6058 || (code != MULT_EXPR
6059 && (TYPE_UNSIGNED (ctype)
6060 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6061 /* ... or has undefined overflow while the converted to
6062 type has not, we cannot do the operation in the inner type
6063 as that would introduce undefined overflow. */
6064 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6065 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0)))
6066 && !TYPE_OVERFLOW_UNDEFINED (type))))
6067 break;
6068
6069 /* Pass the constant down and see if we can make a simplification. If
6070 we can, replace this expression with the inner simplification for
6071 possible later conversion to our or some other type. */
6072 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6073 && TREE_CODE (t2) == INTEGER_CST
6074 && !TREE_OVERFLOW (t2)
6075 && (0 != (t1 = extract_muldiv (op0, t2, code,
6076 code == MULT_EXPR
6077 ? ctype : NULL_TREE,
6078 strict_overflow_p))))
6079 return t1;
6080 break;
6081
6082 case ABS_EXPR:
6083 /* If widening the type changes it from signed to unsigned, then we
6084 must avoid building ABS_EXPR itself as unsigned. */
6085 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6086 {
6087 tree cstype = (*signed_type_for) (ctype);
6088 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6089 != 0)
6090 {
6091 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6092 return fold_convert (ctype, t1);
6093 }
6094 break;
6095 }
6096 /* If the constant is negative, we cannot simplify this. */
6097 if (tree_int_cst_sgn (c) == -1)
6098 break;
6099 /* FALLTHROUGH */
6100 case NEGATE_EXPR:
6101 /* For division and modulus, type can't be unsigned, as e.g.
6102 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6103 For signed types, even with wrapping overflow, this is fine. */
6104 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6105 break;
6106 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6107 != 0)
6108 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6109 break;
6110
6111 case MIN_EXPR: case MAX_EXPR:
6112 /* If widening the type changes the signedness, then we can't perform
6113 this optimization as that changes the result. */
6114 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6115 break;
6116
6117 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6118 sub_strict_overflow_p = false;
6119 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6120 &sub_strict_overflow_p)) != 0
6121 && (t2 = extract_muldiv (op1, c, code, wide_type,
6122 &sub_strict_overflow_p)) != 0)
6123 {
6124 if (tree_int_cst_sgn (c) < 0)
6125 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6126 if (sub_strict_overflow_p)
6127 *strict_overflow_p = true;
6128 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6129 fold_convert (ctype, t2));
6130 }
6131 break;
6132
6133 case LSHIFT_EXPR: case RSHIFT_EXPR:
6134 /* If the second operand is constant, this is a multiplication
6135 or floor division, by a power of two, so we can treat it that
6136 way unless the multiplier or divisor overflows. Signed
6137 left-shift overflow is implementation-defined rather than
6138 undefined in C90, so do not convert signed left shift into
6139 multiplication. */
6140 if (TREE_CODE (op1) == INTEGER_CST
6141 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6142 /* const_binop may not detect overflow correctly,
6143 so check for it explicitly here. */
6144 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
6145 wi::to_wide (op1))
6146 && 0 != (t1 = fold_convert (ctype,
6147 const_binop (LSHIFT_EXPR,
6148 size_one_node,
6149 op1)))
6150 && !TREE_OVERFLOW (t1))
6151 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6152 ? MULT_EXPR : FLOOR_DIV_EXPR,
6153 ctype,
6154 fold_convert (ctype, op0),
6155 t1),
6156 c, code, wide_type, strict_overflow_p);
6157 break;
6158
6159 case PLUS_EXPR: case MINUS_EXPR:
6160 /* See if we can eliminate the operation on both sides. If we can, we
6161 can return a new PLUS or MINUS. If we can't, the only remaining
6162 cases where we can do anything are if the second operand is a
6163 constant. */
6164 sub_strict_overflow_p = false;
6165 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6166 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6167 if (t1 != 0 && t2 != 0
6168 && TYPE_OVERFLOW_WRAPS (ctype)
6169 && (code == MULT_EXPR
6170 /* If not multiplication, we can only do this if both operands
6171 are divisible by c. */
6172 || (multiple_of_p (ctype, op0, c)
6173 && multiple_of_p (ctype, op1, c))))
6174 {
6175 if (sub_strict_overflow_p)
6176 *strict_overflow_p = true;
6177 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6178 fold_convert (ctype, t2));
6179 }
6180
6181 /* If this was a subtraction, negate OP1 and set it to be an addition.
6182 This simplifies the logic below. */
6183 if (tcode == MINUS_EXPR)
6184 {
6185 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6186 /* If OP1 was not easily negatable, the constant may be OP0. */
6187 if (TREE_CODE (op0) == INTEGER_CST)
6188 {
6189 std::swap (op0, op1);
6190 std::swap (t1, t2);
6191 }
6192 }
6193
6194 if (TREE_CODE (op1) != INTEGER_CST)
6195 break;
6196
6197 /* If either OP1 or C are negative, this optimization is not safe for
6198 some of the division and remainder types while for others we need
6199 to change the code. */
6200 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6201 {
6202 if (code == CEIL_DIV_EXPR)
6203 code = FLOOR_DIV_EXPR;
6204 else if (code == FLOOR_DIV_EXPR)
6205 code = CEIL_DIV_EXPR;
6206 else if (code != MULT_EXPR
6207 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6208 break;
6209 }
6210
6211 /* If it's a multiply or a division/modulus operation of a multiple
6212 of our constant, do the operation and verify it doesn't overflow. */
6213 if (code == MULT_EXPR
6214 || wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6215 TYPE_SIGN (type)))
6216 {
6217 op1 = const_binop (code, fold_convert (ctype, op1),
6218 fold_convert (ctype, c));
6219 /* We allow the constant to overflow with wrapping semantics. */
6220 if (op1 == 0
6221 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6222 break;
6223 }
6224 else
6225 break;
6226
6227 /* If we have an unsigned type, we cannot widen the operation since it
6228 will change the result if the original computation overflowed. */
6229 if (TYPE_UNSIGNED (ctype) && ctype != type)
6230 break;
6231
6232 /* The last case is if we are a multiply. In that case, we can
6233 apply the distributive law to commute the multiply and addition
6234 if the multiplication of the constants doesn't overflow
6235 and overflow is defined. With undefined overflow
6236 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
6237 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
6238 return fold_build2 (tcode, ctype,
6239 fold_build2 (code, ctype,
6240 fold_convert (ctype, op0),
6241 fold_convert (ctype, c)),
6242 op1);
6243
6244 break;
6245
6246 case MULT_EXPR:
6247 /* We have a special case here if we are doing something like
6248 (C * 8) % 4 since we know that's zero. */
6249 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6250 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6251 /* If the multiplication can overflow we cannot optimize this. */
6252 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6253 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6254 && wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6255 TYPE_SIGN (type)))
6256 {
6257 *strict_overflow_p = true;
6258 return omit_one_operand (type, integer_zero_node, op0);
6259 }
6260
6261 /* ... fall through ... */
6262
6263 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6264 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6265 /* If we can extract our operation from the LHS, do so and return a
6266 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6267 do something only if the second operand is a constant. */
6268 if (same_p
6269 && TYPE_OVERFLOW_WRAPS (ctype)
6270 && (t1 = extract_muldiv (op0, c, code, wide_type,
6271 strict_overflow_p)) != 0)
6272 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6273 fold_convert (ctype, op1));
6274 else if (tcode == MULT_EXPR && code == MULT_EXPR
6275 && TYPE_OVERFLOW_WRAPS (ctype)
6276 && (t1 = extract_muldiv (op1, c, code, wide_type,
6277 strict_overflow_p)) != 0)
6278 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6279 fold_convert (ctype, t1));
6280 else if (TREE_CODE (op1) != INTEGER_CST)
6281 return 0;
6282
6283 /* If these are the same operation types, we can associate them
6284 assuming no overflow. */
6285 if (tcode == code)
6286 {
6287 bool overflow_p = false;
6288 bool overflow_mul_p;
6289 signop sign = TYPE_SIGN (ctype);
6290 unsigned prec = TYPE_PRECISION (ctype);
6291 wide_int mul = wi::mul (wi::to_wide (op1, prec),
6292 wi::to_wide (c, prec),
6293 sign, &overflow_mul_p);
6294 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6295 if (overflow_mul_p
6296 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6297 overflow_p = true;
6298 if (!overflow_p)
6299 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6300 wide_int_to_tree (ctype, mul));
6301 }
6302
6303 /* If these operations "cancel" each other, we have the main
6304 optimizations of this pass, which occur when either constant is a
6305 multiple of the other, in which case we replace this with either an
6306 operation or CODE or TCODE.
6307
6308 If we have an unsigned type, we cannot do this since it will change
6309 the result if the original computation overflowed. */
6310 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6311 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6312 || (tcode == MULT_EXPR
6313 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6314 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6315 && code != MULT_EXPR)))
6316 {
6317 if (wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6318 TYPE_SIGN (type)))
6319 {
6320 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6321 *strict_overflow_p = true;
6322 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6323 fold_convert (ctype,
6324 const_binop (TRUNC_DIV_EXPR,
6325 op1, c)));
6326 }
6327 else if (wi::multiple_of_p (wi::to_wide (c), wi::to_wide (op1),
6328 TYPE_SIGN (type)))
6329 {
6330 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6331 *strict_overflow_p = true;
6332 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6333 fold_convert (ctype,
6334 const_binop (TRUNC_DIV_EXPR,
6335 c, op1)));
6336 }
6337 }
6338 break;
6339
6340 default:
6341 break;
6342 }
6343
6344 return 0;
6345 }
6346 \f
6347 /* Return a node which has the indicated constant VALUE (either 0 or
6348 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6349 and is of the indicated TYPE. */
6350
6351 tree
6352 constant_boolean_node (bool value, tree type)
6353 {
6354 if (type == integer_type_node)
6355 return value ? integer_one_node : integer_zero_node;
6356 else if (type == boolean_type_node)
6357 return value ? boolean_true_node : boolean_false_node;
6358 else if (TREE_CODE (type) == VECTOR_TYPE)
6359 return build_vector_from_val (type,
6360 build_int_cst (TREE_TYPE (type),
6361 value ? -1 : 0));
6362 else
6363 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6364 }
6365
6366
6367 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6368 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6369 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6370 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6371 COND is the first argument to CODE; otherwise (as in the example
6372 given here), it is the second argument. TYPE is the type of the
6373 original expression. Return NULL_TREE if no simplification is
6374 possible. */
6375
6376 static tree
6377 fold_binary_op_with_conditional_arg (location_t loc,
6378 enum tree_code code,
6379 tree type, tree op0, tree op1,
6380 tree cond, tree arg, int cond_first_p)
6381 {
6382 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6383 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6384 tree test, true_value, false_value;
6385 tree lhs = NULL_TREE;
6386 tree rhs = NULL_TREE;
6387 enum tree_code cond_code = COND_EXPR;
6388
6389 if (TREE_CODE (cond) == COND_EXPR
6390 || TREE_CODE (cond) == VEC_COND_EXPR)
6391 {
6392 test = TREE_OPERAND (cond, 0);
6393 true_value = TREE_OPERAND (cond, 1);
6394 false_value = TREE_OPERAND (cond, 2);
6395 /* If this operand throws an expression, then it does not make
6396 sense to try to perform a logical or arithmetic operation
6397 involving it. */
6398 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6399 lhs = true_value;
6400 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6401 rhs = false_value;
6402 }
6403 else if (!(TREE_CODE (type) != VECTOR_TYPE
6404 && TREE_CODE (TREE_TYPE (cond)) == VECTOR_TYPE))
6405 {
6406 tree testtype = TREE_TYPE (cond);
6407 test = cond;
6408 true_value = constant_boolean_node (true, testtype);
6409 false_value = constant_boolean_node (false, testtype);
6410 }
6411 else
6412 /* Detect the case of mixing vector and scalar types - bail out. */
6413 return NULL_TREE;
6414
6415 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6416 cond_code = VEC_COND_EXPR;
6417
6418 /* This transformation is only worthwhile if we don't have to wrap ARG
6419 in a SAVE_EXPR and the operation can be simplified without recursing
6420 on at least one of the branches once its pushed inside the COND_EXPR. */
6421 if (!TREE_CONSTANT (arg)
6422 && (TREE_SIDE_EFFECTS (arg)
6423 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6424 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6425 return NULL_TREE;
6426
6427 arg = fold_convert_loc (loc, arg_type, arg);
6428 if (lhs == 0)
6429 {
6430 true_value = fold_convert_loc (loc, cond_type, true_value);
6431 if (cond_first_p)
6432 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6433 else
6434 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6435 }
6436 if (rhs == 0)
6437 {
6438 false_value = fold_convert_loc (loc, cond_type, false_value);
6439 if (cond_first_p)
6440 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6441 else
6442 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6443 }
6444
6445 /* Check that we have simplified at least one of the branches. */
6446 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6447 return NULL_TREE;
6448
6449 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6450 }
6451
6452 \f
6453 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6454
6455 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6456 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6457 ADDEND is the same as X.
6458
6459 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6460 and finite. The problematic cases are when X is zero, and its mode
6461 has signed zeros. In the case of rounding towards -infinity,
6462 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6463 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6464
6465 bool
6466 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6467 {
6468 if (!real_zerop (addend))
6469 return false;
6470
6471 /* Don't allow the fold with -fsignaling-nans. */
6472 if (HONOR_SNANS (element_mode (type)))
6473 return false;
6474
6475 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6476 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
6477 return true;
6478
6479 /* In a vector or complex, we would need to check the sign of all zeros. */
6480 if (TREE_CODE (addend) != REAL_CST)
6481 return false;
6482
6483 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6484 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6485 negate = !negate;
6486
6487 /* The mode has signed zeros, and we have to honor their sign.
6488 In this situation, there is only one case we can return true for.
6489 X - 0 is the same as X unless rounding towards -infinity is
6490 supported. */
6491 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type));
6492 }
6493
6494 /* Subroutine of match.pd that optimizes comparisons of a division by
6495 a nonzero integer constant against an integer constant, i.e.
6496 X/C1 op C2.
6497
6498 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6499 GE_EXPR or LE_EXPR. ARG01 and ARG1 must be a INTEGER_CST. */
6500
6501 enum tree_code
6502 fold_div_compare (enum tree_code code, tree c1, tree c2, tree *lo,
6503 tree *hi, bool *neg_overflow)
6504 {
6505 tree prod, tmp, type = TREE_TYPE (c1);
6506 signop sign = TYPE_SIGN (type);
6507 bool overflow;
6508
6509 /* We have to do this the hard way to detect unsigned overflow.
6510 prod = int_const_binop (MULT_EXPR, c1, c2); */
6511 wide_int val = wi::mul (wi::to_wide (c1), wi::to_wide (c2), sign, &overflow);
6512 prod = force_fit_type (type, val, -1, overflow);
6513 *neg_overflow = false;
6514
6515 if (sign == UNSIGNED)
6516 {
6517 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
6518 *lo = prod;
6519
6520 /* Likewise *hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6521 val = wi::add (wi::to_wide (prod), wi::to_wide (tmp), sign, &overflow);
6522 *hi = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (prod));
6523 }
6524 else if (tree_int_cst_sgn (c1) >= 0)
6525 {
6526 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
6527 switch (tree_int_cst_sgn (c2))
6528 {
6529 case -1:
6530 *neg_overflow = true;
6531 *lo = int_const_binop (MINUS_EXPR, prod, tmp);
6532 *hi = prod;
6533 break;
6534
6535 case 0:
6536 *lo = fold_negate_const (tmp, type);
6537 *hi = tmp;
6538 break;
6539
6540 case 1:
6541 *hi = int_const_binop (PLUS_EXPR, prod, tmp);
6542 *lo = prod;
6543 break;
6544
6545 default:
6546 gcc_unreachable ();
6547 }
6548 }
6549 else
6550 {
6551 /* A negative divisor reverses the relational operators. */
6552 code = swap_tree_comparison (code);
6553
6554 tmp = int_const_binop (PLUS_EXPR, c1, build_int_cst (type, 1));
6555 switch (tree_int_cst_sgn (c2))
6556 {
6557 case -1:
6558 *hi = int_const_binop (MINUS_EXPR, prod, tmp);
6559 *lo = prod;
6560 break;
6561
6562 case 0:
6563 *hi = fold_negate_const (tmp, type);
6564 *lo = tmp;
6565 break;
6566
6567 case 1:
6568 *neg_overflow = true;
6569 *lo = int_const_binop (PLUS_EXPR, prod, tmp);
6570 *hi = prod;
6571 break;
6572
6573 default:
6574 gcc_unreachable ();
6575 }
6576 }
6577
6578 if (code != EQ_EXPR && code != NE_EXPR)
6579 return code;
6580
6581 if (TREE_OVERFLOW (*lo)
6582 || operand_equal_p (*lo, TYPE_MIN_VALUE (type), 0))
6583 *lo = NULL_TREE;
6584 if (TREE_OVERFLOW (*hi)
6585 || operand_equal_p (*hi, TYPE_MAX_VALUE (type), 0))
6586 *hi = NULL_TREE;
6587
6588 return code;
6589 }
6590
6591
6592 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6593 equality/inequality test, then return a simplified form of the test
6594 using a sign testing. Otherwise return NULL. TYPE is the desired
6595 result type. */
6596
6597 static tree
6598 fold_single_bit_test_into_sign_test (location_t loc,
6599 enum tree_code code, tree arg0, tree arg1,
6600 tree result_type)
6601 {
6602 /* If this is testing a single bit, we can optimize the test. */
6603 if ((code == NE_EXPR || code == EQ_EXPR)
6604 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6605 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6606 {
6607 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6608 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6609 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6610
6611 if (arg00 != NULL_TREE
6612 /* This is only a win if casting to a signed type is cheap,
6613 i.e. when arg00's type is not a partial mode. */
6614 && type_has_mode_precision_p (TREE_TYPE (arg00)))
6615 {
6616 tree stype = signed_type_for (TREE_TYPE (arg00));
6617 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6618 result_type,
6619 fold_convert_loc (loc, stype, arg00),
6620 build_int_cst (stype, 0));
6621 }
6622 }
6623
6624 return NULL_TREE;
6625 }
6626
6627 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6628 equality/inequality test, then return a simplified form of
6629 the test using shifts and logical operations. Otherwise return
6630 NULL. TYPE is the desired result type. */
6631
6632 tree
6633 fold_single_bit_test (location_t loc, enum tree_code code,
6634 tree arg0, tree arg1, tree result_type)
6635 {
6636 /* If this is testing a single bit, we can optimize the test. */
6637 if ((code == NE_EXPR || code == EQ_EXPR)
6638 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6639 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6640 {
6641 tree inner = TREE_OPERAND (arg0, 0);
6642 tree type = TREE_TYPE (arg0);
6643 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6644 scalar_int_mode operand_mode = SCALAR_INT_TYPE_MODE (type);
6645 int ops_unsigned;
6646 tree signed_type, unsigned_type, intermediate_type;
6647 tree tem, one;
6648
6649 /* First, see if we can fold the single bit test into a sign-bit
6650 test. */
6651 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6652 result_type);
6653 if (tem)
6654 return tem;
6655
6656 /* Otherwise we have (A & C) != 0 where C is a single bit,
6657 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6658 Similarly for (A & C) == 0. */
6659
6660 /* If INNER is a right shift of a constant and it plus BITNUM does
6661 not overflow, adjust BITNUM and INNER. */
6662 if (TREE_CODE (inner) == RSHIFT_EXPR
6663 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6664 && bitnum < TYPE_PRECISION (type)
6665 && wi::ltu_p (wi::to_wide (TREE_OPERAND (inner, 1)),
6666 TYPE_PRECISION (type) - bitnum))
6667 {
6668 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6669 inner = TREE_OPERAND (inner, 0);
6670 }
6671
6672 /* If we are going to be able to omit the AND below, we must do our
6673 operations as unsigned. If we must use the AND, we have a choice.
6674 Normally unsigned is faster, but for some machines signed is. */
6675 ops_unsigned = (load_extend_op (operand_mode) == SIGN_EXTEND
6676 && !flag_syntax_only) ? 0 : 1;
6677
6678 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6679 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6680 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6681 inner = fold_convert_loc (loc, intermediate_type, inner);
6682
6683 if (bitnum != 0)
6684 inner = build2 (RSHIFT_EXPR, intermediate_type,
6685 inner, size_int (bitnum));
6686
6687 one = build_int_cst (intermediate_type, 1);
6688
6689 if (code == EQ_EXPR)
6690 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6691
6692 /* Put the AND last so it can combine with more things. */
6693 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6694
6695 /* Make sure to return the proper type. */
6696 inner = fold_convert_loc (loc, result_type, inner);
6697
6698 return inner;
6699 }
6700 return NULL_TREE;
6701 }
6702
6703 /* Test whether it is preferable two swap two operands, ARG0 and
6704 ARG1, for example because ARG0 is an integer constant and ARG1
6705 isn't. */
6706
6707 bool
6708 tree_swap_operands_p (const_tree arg0, const_tree arg1)
6709 {
6710 if (CONSTANT_CLASS_P (arg1))
6711 return 0;
6712 if (CONSTANT_CLASS_P (arg0))
6713 return 1;
6714
6715 STRIP_NOPS (arg0);
6716 STRIP_NOPS (arg1);
6717
6718 if (TREE_CONSTANT (arg1))
6719 return 0;
6720 if (TREE_CONSTANT (arg0))
6721 return 1;
6722
6723 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6724 for commutative and comparison operators. Ensuring a canonical
6725 form allows the optimizers to find additional redundancies without
6726 having to explicitly check for both orderings. */
6727 if (TREE_CODE (arg0) == SSA_NAME
6728 && TREE_CODE (arg1) == SSA_NAME
6729 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6730 return 1;
6731
6732 /* Put SSA_NAMEs last. */
6733 if (TREE_CODE (arg1) == SSA_NAME)
6734 return 0;
6735 if (TREE_CODE (arg0) == SSA_NAME)
6736 return 1;
6737
6738 /* Put variables last. */
6739 if (DECL_P (arg1))
6740 return 0;
6741 if (DECL_P (arg0))
6742 return 1;
6743
6744 return 0;
6745 }
6746
6747
6748 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6749 means A >= Y && A != MAX, but in this case we know that
6750 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6751
6752 static tree
6753 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6754 {
6755 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6756
6757 if (TREE_CODE (bound) == LT_EXPR)
6758 a = TREE_OPERAND (bound, 0);
6759 else if (TREE_CODE (bound) == GT_EXPR)
6760 a = TREE_OPERAND (bound, 1);
6761 else
6762 return NULL_TREE;
6763
6764 typea = TREE_TYPE (a);
6765 if (!INTEGRAL_TYPE_P (typea)
6766 && !POINTER_TYPE_P (typea))
6767 return NULL_TREE;
6768
6769 if (TREE_CODE (ineq) == LT_EXPR)
6770 {
6771 a1 = TREE_OPERAND (ineq, 1);
6772 y = TREE_OPERAND (ineq, 0);
6773 }
6774 else if (TREE_CODE (ineq) == GT_EXPR)
6775 {
6776 a1 = TREE_OPERAND (ineq, 0);
6777 y = TREE_OPERAND (ineq, 1);
6778 }
6779 else
6780 return NULL_TREE;
6781
6782 if (TREE_TYPE (a1) != typea)
6783 return NULL_TREE;
6784
6785 if (POINTER_TYPE_P (typea))
6786 {
6787 /* Convert the pointer types into integer before taking the difference. */
6788 tree ta = fold_convert_loc (loc, ssizetype, a);
6789 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
6790 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
6791 }
6792 else
6793 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
6794
6795 if (!diff || !integer_onep (diff))
6796 return NULL_TREE;
6797
6798 return fold_build2_loc (loc, GE_EXPR, type, a, y);
6799 }
6800
6801 /* Fold a sum or difference of at least one multiplication.
6802 Returns the folded tree or NULL if no simplification could be made. */
6803
6804 static tree
6805 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
6806 tree arg0, tree arg1)
6807 {
6808 tree arg00, arg01, arg10, arg11;
6809 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6810
6811 /* (A * C) +- (B * C) -> (A+-B) * C.
6812 (A * C) +- A -> A * (C+-1).
6813 We are most concerned about the case where C is a constant,
6814 but other combinations show up during loop reduction. Since
6815 it is not difficult, try all four possibilities. */
6816
6817 if (TREE_CODE (arg0) == MULT_EXPR)
6818 {
6819 arg00 = TREE_OPERAND (arg0, 0);
6820 arg01 = TREE_OPERAND (arg0, 1);
6821 }
6822 else if (TREE_CODE (arg0) == INTEGER_CST)
6823 {
6824 arg00 = build_one_cst (type);
6825 arg01 = arg0;
6826 }
6827 else
6828 {
6829 /* We cannot generate constant 1 for fract. */
6830 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6831 return NULL_TREE;
6832 arg00 = arg0;
6833 arg01 = build_one_cst (type);
6834 }
6835 if (TREE_CODE (arg1) == MULT_EXPR)
6836 {
6837 arg10 = TREE_OPERAND (arg1, 0);
6838 arg11 = TREE_OPERAND (arg1, 1);
6839 }
6840 else if (TREE_CODE (arg1) == INTEGER_CST)
6841 {
6842 arg10 = build_one_cst (type);
6843 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
6844 the purpose of this canonicalization. */
6845 if (wi::neg_p (wi::to_wide (arg1), TYPE_SIGN (TREE_TYPE (arg1)))
6846 && negate_expr_p (arg1)
6847 && code == PLUS_EXPR)
6848 {
6849 arg11 = negate_expr (arg1);
6850 code = MINUS_EXPR;
6851 }
6852 else
6853 arg11 = arg1;
6854 }
6855 else
6856 {
6857 /* We cannot generate constant 1 for fract. */
6858 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6859 return NULL_TREE;
6860 arg10 = arg1;
6861 arg11 = build_one_cst (type);
6862 }
6863 same = NULL_TREE;
6864
6865 /* Prefer factoring a common non-constant. */
6866 if (operand_equal_p (arg00, arg10, 0))
6867 same = arg00, alt0 = arg01, alt1 = arg11;
6868 else if (operand_equal_p (arg01, arg11, 0))
6869 same = arg01, alt0 = arg00, alt1 = arg10;
6870 else if (operand_equal_p (arg00, arg11, 0))
6871 same = arg00, alt0 = arg01, alt1 = arg10;
6872 else if (operand_equal_p (arg01, arg10, 0))
6873 same = arg01, alt0 = arg00, alt1 = arg11;
6874
6875 /* No identical multiplicands; see if we can find a common
6876 power-of-two factor in non-power-of-two multiplies. This
6877 can help in multi-dimensional array access. */
6878 else if (tree_fits_shwi_p (arg01)
6879 && tree_fits_shwi_p (arg11))
6880 {
6881 HOST_WIDE_INT int01, int11, tmp;
6882 bool swap = false;
6883 tree maybe_same;
6884 int01 = tree_to_shwi (arg01);
6885 int11 = tree_to_shwi (arg11);
6886
6887 /* Move min of absolute values to int11. */
6888 if (absu_hwi (int01) < absu_hwi (int11))
6889 {
6890 tmp = int01, int01 = int11, int11 = tmp;
6891 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6892 maybe_same = arg01;
6893 swap = true;
6894 }
6895 else
6896 maybe_same = arg11;
6897
6898 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
6899 /* The remainder should not be a constant, otherwise we
6900 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
6901 increased the number of multiplications necessary. */
6902 && TREE_CODE (arg10) != INTEGER_CST)
6903 {
6904 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
6905 build_int_cst (TREE_TYPE (arg00),
6906 int01 / int11));
6907 alt1 = arg10;
6908 same = maybe_same;
6909 if (swap)
6910 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
6911 }
6912 }
6913
6914 if (!same)
6915 return NULL_TREE;
6916
6917 if (! INTEGRAL_TYPE_P (type)
6918 || TYPE_OVERFLOW_WRAPS (type)
6919 /* We are neither factoring zero nor minus one. */
6920 || TREE_CODE (same) == INTEGER_CST)
6921 return fold_build2_loc (loc, MULT_EXPR, type,
6922 fold_build2_loc (loc, code, type,
6923 fold_convert_loc (loc, type, alt0),
6924 fold_convert_loc (loc, type, alt1)),
6925 fold_convert_loc (loc, type, same));
6926
6927 /* Same may be zero and thus the operation 'code' may overflow. Likewise
6928 same may be minus one and thus the multiplication may overflow. Perform
6929 the operations in an unsigned type. */
6930 tree utype = unsigned_type_for (type);
6931 tree tem = fold_build2_loc (loc, code, utype,
6932 fold_convert_loc (loc, utype, alt0),
6933 fold_convert_loc (loc, utype, alt1));
6934 /* If the sum evaluated to a constant that is not -INF the multiplication
6935 cannot overflow. */
6936 if (TREE_CODE (tem) == INTEGER_CST
6937 && (wi::to_wide (tem)
6938 != wi::min_value (TYPE_PRECISION (utype), SIGNED)))
6939 return fold_build2_loc (loc, MULT_EXPR, type,
6940 fold_convert (type, tem), same);
6941
6942 return fold_convert_loc (loc, type,
6943 fold_build2_loc (loc, MULT_EXPR, utype, tem,
6944 fold_convert_loc (loc, utype, same)));
6945 }
6946
6947 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
6948 specified by EXPR into the buffer PTR of length LEN bytes.
6949 Return the number of bytes placed in the buffer, or zero
6950 upon failure. */
6951
6952 static int
6953 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
6954 {
6955 tree type = TREE_TYPE (expr);
6956 int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
6957 int byte, offset, word, words;
6958 unsigned char value;
6959
6960 if ((off == -1 && total_bytes > len) || off >= total_bytes)
6961 return 0;
6962 if (off == -1)
6963 off = 0;
6964
6965 if (ptr == NULL)
6966 /* Dry run. */
6967 return MIN (len, total_bytes - off);
6968
6969 words = total_bytes / UNITS_PER_WORD;
6970
6971 for (byte = 0; byte < total_bytes; byte++)
6972 {
6973 int bitpos = byte * BITS_PER_UNIT;
6974 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
6975 number of bytes. */
6976 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
6977
6978 if (total_bytes > UNITS_PER_WORD)
6979 {
6980 word = byte / UNITS_PER_WORD;
6981 if (WORDS_BIG_ENDIAN)
6982 word = (words - 1) - word;
6983 offset = word * UNITS_PER_WORD;
6984 if (BYTES_BIG_ENDIAN)
6985 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
6986 else
6987 offset += byte % UNITS_PER_WORD;
6988 }
6989 else
6990 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
6991 if (offset >= off && offset - off < len)
6992 ptr[offset - off] = value;
6993 }
6994 return MIN (len, total_bytes - off);
6995 }
6996
6997
6998 /* Subroutine of native_encode_expr. Encode the FIXED_CST
6999 specified by EXPR into the buffer PTR of length LEN bytes.
7000 Return the number of bytes placed in the buffer, or zero
7001 upon failure. */
7002
7003 static int
7004 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7005 {
7006 tree type = TREE_TYPE (expr);
7007 scalar_mode mode = SCALAR_TYPE_MODE (type);
7008 int total_bytes = GET_MODE_SIZE (mode);
7009 FIXED_VALUE_TYPE value;
7010 tree i_value, i_type;
7011
7012 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7013 return 0;
7014
7015 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7016
7017 if (NULL_TREE == i_type || TYPE_PRECISION (i_type) != total_bytes)
7018 return 0;
7019
7020 value = TREE_FIXED_CST (expr);
7021 i_value = double_int_to_tree (i_type, value.data);
7022
7023 return native_encode_int (i_value, ptr, len, off);
7024 }
7025
7026
7027 /* Subroutine of native_encode_expr. Encode the REAL_CST
7028 specified by EXPR into the buffer PTR of length LEN bytes.
7029 Return the number of bytes placed in the buffer, or zero
7030 upon failure. */
7031
7032 static int
7033 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7034 {
7035 tree type = TREE_TYPE (expr);
7036 int total_bytes = GET_MODE_SIZE (SCALAR_FLOAT_TYPE_MODE (type));
7037 int byte, offset, word, words, bitpos;
7038 unsigned char value;
7039
7040 /* There are always 32 bits in each long, no matter the size of
7041 the hosts long. We handle floating point representations with
7042 up to 192 bits. */
7043 long tmp[6];
7044
7045 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7046 return 0;
7047 if (off == -1)
7048 off = 0;
7049
7050 if (ptr == NULL)
7051 /* Dry run. */
7052 return MIN (len, total_bytes - off);
7053
7054 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7055
7056 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7057
7058 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7059 bitpos += BITS_PER_UNIT)
7060 {
7061 byte = (bitpos / BITS_PER_UNIT) & 3;
7062 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7063
7064 if (UNITS_PER_WORD < 4)
7065 {
7066 word = byte / UNITS_PER_WORD;
7067 if (WORDS_BIG_ENDIAN)
7068 word = (words - 1) - word;
7069 offset = word * UNITS_PER_WORD;
7070 if (BYTES_BIG_ENDIAN)
7071 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7072 else
7073 offset += byte % UNITS_PER_WORD;
7074 }
7075 else
7076 {
7077 offset = byte;
7078 if (BYTES_BIG_ENDIAN)
7079 {
7080 /* Reverse bytes within each long, or within the entire float
7081 if it's smaller than a long (for HFmode). */
7082 offset = MIN (3, total_bytes - 1) - offset;
7083 gcc_assert (offset >= 0);
7084 }
7085 }
7086 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7087 if (offset >= off
7088 && offset - off < len)
7089 ptr[offset - off] = value;
7090 }
7091 return MIN (len, total_bytes - off);
7092 }
7093
7094 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7095 specified by EXPR into the buffer PTR of length LEN bytes.
7096 Return the number of bytes placed in the buffer, or zero
7097 upon failure. */
7098
7099 static int
7100 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7101 {
7102 int rsize, isize;
7103 tree part;
7104
7105 part = TREE_REALPART (expr);
7106 rsize = native_encode_expr (part, ptr, len, off);
7107 if (off == -1 && rsize == 0)
7108 return 0;
7109 part = TREE_IMAGPART (expr);
7110 if (off != -1)
7111 off = MAX (0, off - GET_MODE_SIZE (SCALAR_TYPE_MODE (TREE_TYPE (part))));
7112 isize = native_encode_expr (part, ptr ? ptr + rsize : NULL,
7113 len - rsize, off);
7114 if (off == -1 && isize != rsize)
7115 return 0;
7116 return rsize + isize;
7117 }
7118
7119
7120 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7121 specified by EXPR into the buffer PTR of length LEN bytes.
7122 Return the number of bytes placed in the buffer, or zero
7123 upon failure. */
7124
7125 static int
7126 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7127 {
7128 unsigned i, count;
7129 int size, offset;
7130 tree itype, elem;
7131
7132 offset = 0;
7133 count = VECTOR_CST_NELTS (expr);
7134 itype = TREE_TYPE (TREE_TYPE (expr));
7135 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (itype));
7136 for (i = 0; i < count; i++)
7137 {
7138 if (off >= size)
7139 {
7140 off -= size;
7141 continue;
7142 }
7143 elem = VECTOR_CST_ELT (expr, i);
7144 int res = native_encode_expr (elem, ptr ? ptr + offset : NULL,
7145 len - offset, off);
7146 if ((off == -1 && res != size) || res == 0)
7147 return 0;
7148 offset += res;
7149 if (offset >= len)
7150 return offset;
7151 if (off != -1)
7152 off = 0;
7153 }
7154 return offset;
7155 }
7156
7157
7158 /* Subroutine of native_encode_expr. Encode the STRING_CST
7159 specified by EXPR into the buffer PTR of length LEN bytes.
7160 Return the number of bytes placed in the buffer, or zero
7161 upon failure. */
7162
7163 static int
7164 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7165 {
7166 tree type = TREE_TYPE (expr);
7167
7168 /* Wide-char strings are encoded in target byte-order so native
7169 encoding them is trivial. */
7170 if (BITS_PER_UNIT != CHAR_BIT
7171 || TREE_CODE (type) != ARRAY_TYPE
7172 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7173 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7174 return 0;
7175
7176 HOST_WIDE_INT total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
7177 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7178 return 0;
7179 if (off == -1)
7180 off = 0;
7181 if (ptr == NULL)
7182 /* Dry run. */;
7183 else if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7184 {
7185 int written = 0;
7186 if (off < TREE_STRING_LENGTH (expr))
7187 {
7188 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7189 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7190 }
7191 memset (ptr + written, 0,
7192 MIN (total_bytes - written, len - written));
7193 }
7194 else
7195 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7196 return MIN (total_bytes - off, len);
7197 }
7198
7199
7200 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7201 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7202 buffer PTR of length LEN bytes. If PTR is NULL, don't actually store
7203 anything, just do a dry run. If OFF is not -1 then start
7204 the encoding at byte offset OFF and encode at most LEN bytes.
7205 Return the number of bytes placed in the buffer, or zero upon failure. */
7206
7207 int
7208 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7209 {
7210 /* We don't support starting at negative offset and -1 is special. */
7211 if (off < -1)
7212 return 0;
7213
7214 switch (TREE_CODE (expr))
7215 {
7216 case INTEGER_CST:
7217 return native_encode_int (expr, ptr, len, off);
7218
7219 case REAL_CST:
7220 return native_encode_real (expr, ptr, len, off);
7221
7222 case FIXED_CST:
7223 return native_encode_fixed (expr, ptr, len, off);
7224
7225 case COMPLEX_CST:
7226 return native_encode_complex (expr, ptr, len, off);
7227
7228 case VECTOR_CST:
7229 return native_encode_vector (expr, ptr, len, off);
7230
7231 case STRING_CST:
7232 return native_encode_string (expr, ptr, len, off);
7233
7234 default:
7235 return 0;
7236 }
7237 }
7238
7239
7240 /* Subroutine of native_interpret_expr. Interpret the contents of
7241 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7242 If the buffer cannot be interpreted, return NULL_TREE. */
7243
7244 static tree
7245 native_interpret_int (tree type, const unsigned char *ptr, int len)
7246 {
7247 int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
7248
7249 if (total_bytes > len
7250 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7251 return NULL_TREE;
7252
7253 wide_int result = wi::from_buffer (ptr, total_bytes);
7254
7255 return wide_int_to_tree (type, result);
7256 }
7257
7258
7259 /* Subroutine of native_interpret_expr. Interpret the contents of
7260 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7261 If the buffer cannot be interpreted, return NULL_TREE. */
7262
7263 static tree
7264 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7265 {
7266 scalar_mode mode = SCALAR_TYPE_MODE (type);
7267 int total_bytes = GET_MODE_SIZE (mode);
7268 double_int result;
7269 FIXED_VALUE_TYPE fixed_value;
7270
7271 if (total_bytes > len
7272 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7273 return NULL_TREE;
7274
7275 result = double_int::from_buffer (ptr, total_bytes);
7276 fixed_value = fixed_from_double_int (result, mode);
7277
7278 return build_fixed (type, fixed_value);
7279 }
7280
7281
7282 /* Subroutine of native_interpret_expr. Interpret the contents of
7283 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7284 If the buffer cannot be interpreted, return NULL_TREE. */
7285
7286 static tree
7287 native_interpret_real (tree type, const unsigned char *ptr, int len)
7288 {
7289 scalar_float_mode mode = SCALAR_FLOAT_TYPE_MODE (type);
7290 int total_bytes = GET_MODE_SIZE (mode);
7291 unsigned char value;
7292 /* There are always 32 bits in each long, no matter the size of
7293 the hosts long. We handle floating point representations with
7294 up to 192 bits. */
7295 REAL_VALUE_TYPE r;
7296 long tmp[6];
7297
7298 if (total_bytes > len || total_bytes > 24)
7299 return NULL_TREE;
7300 int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7301
7302 memset (tmp, 0, sizeof (tmp));
7303 for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7304 bitpos += BITS_PER_UNIT)
7305 {
7306 /* Both OFFSET and BYTE index within a long;
7307 bitpos indexes the whole float. */
7308 int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
7309 if (UNITS_PER_WORD < 4)
7310 {
7311 int word = byte / UNITS_PER_WORD;
7312 if (WORDS_BIG_ENDIAN)
7313 word = (words - 1) - word;
7314 offset = word * UNITS_PER_WORD;
7315 if (BYTES_BIG_ENDIAN)
7316 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7317 else
7318 offset += byte % UNITS_PER_WORD;
7319 }
7320 else
7321 {
7322 offset = byte;
7323 if (BYTES_BIG_ENDIAN)
7324 {
7325 /* Reverse bytes within each long, or within the entire float
7326 if it's smaller than a long (for HFmode). */
7327 offset = MIN (3, total_bytes - 1) - offset;
7328 gcc_assert (offset >= 0);
7329 }
7330 }
7331 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7332
7333 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7334 }
7335
7336 real_from_target (&r, tmp, mode);
7337 return build_real (type, r);
7338 }
7339
7340
7341 /* Subroutine of native_interpret_expr. Interpret the contents of
7342 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7343 If the buffer cannot be interpreted, return NULL_TREE. */
7344
7345 static tree
7346 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7347 {
7348 tree etype, rpart, ipart;
7349 int size;
7350
7351 etype = TREE_TYPE (type);
7352 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
7353 if (size * 2 > len)
7354 return NULL_TREE;
7355 rpart = native_interpret_expr (etype, ptr, size);
7356 if (!rpart)
7357 return NULL_TREE;
7358 ipart = native_interpret_expr (etype, ptr+size, size);
7359 if (!ipart)
7360 return NULL_TREE;
7361 return build_complex (type, rpart, ipart);
7362 }
7363
7364
7365 /* Subroutine of native_interpret_expr. Interpret the contents of
7366 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7367 If the buffer cannot be interpreted, return NULL_TREE. */
7368
7369 static tree
7370 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7371 {
7372 tree etype, elem;
7373 int i, size, count;
7374
7375 etype = TREE_TYPE (type);
7376 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
7377 count = TYPE_VECTOR_SUBPARTS (type);
7378 if (size * count > len)
7379 return NULL_TREE;
7380
7381 tree_vector_builder elements (type, count, 1);
7382 for (i = 0; i < count; ++i)
7383 {
7384 elem = native_interpret_expr (etype, ptr+(i*size), size);
7385 if (!elem)
7386 return NULL_TREE;
7387 elements.quick_push (elem);
7388 }
7389 return elements.build ();
7390 }
7391
7392
7393 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7394 the buffer PTR of length LEN as a constant of type TYPE. For
7395 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7396 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7397 return NULL_TREE. */
7398
7399 tree
7400 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7401 {
7402 switch (TREE_CODE (type))
7403 {
7404 case INTEGER_TYPE:
7405 case ENUMERAL_TYPE:
7406 case BOOLEAN_TYPE:
7407 case POINTER_TYPE:
7408 case REFERENCE_TYPE:
7409 return native_interpret_int (type, ptr, len);
7410
7411 case REAL_TYPE:
7412 return native_interpret_real (type, ptr, len);
7413
7414 case FIXED_POINT_TYPE:
7415 return native_interpret_fixed (type, ptr, len);
7416
7417 case COMPLEX_TYPE:
7418 return native_interpret_complex (type, ptr, len);
7419
7420 case VECTOR_TYPE:
7421 return native_interpret_vector (type, ptr, len);
7422
7423 default:
7424 return NULL_TREE;
7425 }
7426 }
7427
7428 /* Returns true if we can interpret the contents of a native encoding
7429 as TYPE. */
7430
7431 static bool
7432 can_native_interpret_type_p (tree type)
7433 {
7434 switch (TREE_CODE (type))
7435 {
7436 case INTEGER_TYPE:
7437 case ENUMERAL_TYPE:
7438 case BOOLEAN_TYPE:
7439 case POINTER_TYPE:
7440 case REFERENCE_TYPE:
7441 case FIXED_POINT_TYPE:
7442 case REAL_TYPE:
7443 case COMPLEX_TYPE:
7444 case VECTOR_TYPE:
7445 return true;
7446 default:
7447 return false;
7448 }
7449 }
7450
7451
7452 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7453 TYPE at compile-time. If we're unable to perform the conversion
7454 return NULL_TREE. */
7455
7456 static tree
7457 fold_view_convert_expr (tree type, tree expr)
7458 {
7459 /* We support up to 512-bit values (for V8DFmode). */
7460 unsigned char buffer[64];
7461 int len;
7462
7463 /* Check that the host and target are sane. */
7464 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7465 return NULL_TREE;
7466
7467 len = native_encode_expr (expr, buffer, sizeof (buffer));
7468 if (len == 0)
7469 return NULL_TREE;
7470
7471 return native_interpret_expr (type, buffer, len);
7472 }
7473
7474 /* Build an expression for the address of T. Folds away INDIRECT_REF
7475 to avoid confusing the gimplify process. */
7476
7477 tree
7478 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7479 {
7480 /* The size of the object is not relevant when talking about its address. */
7481 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7482 t = TREE_OPERAND (t, 0);
7483
7484 if (TREE_CODE (t) == INDIRECT_REF)
7485 {
7486 t = TREE_OPERAND (t, 0);
7487
7488 if (TREE_TYPE (t) != ptrtype)
7489 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7490 }
7491 else if (TREE_CODE (t) == MEM_REF
7492 && integer_zerop (TREE_OPERAND (t, 1)))
7493 return TREE_OPERAND (t, 0);
7494 else if (TREE_CODE (t) == MEM_REF
7495 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7496 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7497 TREE_OPERAND (t, 0),
7498 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7499 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7500 {
7501 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7502
7503 if (TREE_TYPE (t) != ptrtype)
7504 t = fold_convert_loc (loc, ptrtype, t);
7505 }
7506 else
7507 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7508
7509 return t;
7510 }
7511
7512 /* Build an expression for the address of T. */
7513
7514 tree
7515 build_fold_addr_expr_loc (location_t loc, tree t)
7516 {
7517 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7518
7519 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7520 }
7521
7522 /* Fold a unary expression of code CODE and type TYPE with operand
7523 OP0. Return the folded expression if folding is successful.
7524 Otherwise, return NULL_TREE. */
7525
7526 tree
7527 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7528 {
7529 tree tem;
7530 tree arg0;
7531 enum tree_code_class kind = TREE_CODE_CLASS (code);
7532
7533 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7534 && TREE_CODE_LENGTH (code) == 1);
7535
7536 arg0 = op0;
7537 if (arg0)
7538 {
7539 if (CONVERT_EXPR_CODE_P (code)
7540 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7541 {
7542 /* Don't use STRIP_NOPS, because signedness of argument type
7543 matters. */
7544 STRIP_SIGN_NOPS (arg0);
7545 }
7546 else
7547 {
7548 /* Strip any conversions that don't change the mode. This
7549 is safe for every expression, except for a comparison
7550 expression because its signedness is derived from its
7551 operands.
7552
7553 Note that this is done as an internal manipulation within
7554 the constant folder, in order to find the simplest
7555 representation of the arguments so that their form can be
7556 studied. In any cases, the appropriate type conversions
7557 should be put back in the tree that will get out of the
7558 constant folder. */
7559 STRIP_NOPS (arg0);
7560 }
7561
7562 if (CONSTANT_CLASS_P (arg0))
7563 {
7564 tree tem = const_unop (code, type, arg0);
7565 if (tem)
7566 {
7567 if (TREE_TYPE (tem) != type)
7568 tem = fold_convert_loc (loc, type, tem);
7569 return tem;
7570 }
7571 }
7572 }
7573
7574 tem = generic_simplify (loc, code, type, op0);
7575 if (tem)
7576 return tem;
7577
7578 if (TREE_CODE_CLASS (code) == tcc_unary)
7579 {
7580 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7581 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7582 fold_build1_loc (loc, code, type,
7583 fold_convert_loc (loc, TREE_TYPE (op0),
7584 TREE_OPERAND (arg0, 1))));
7585 else if (TREE_CODE (arg0) == COND_EXPR)
7586 {
7587 tree arg01 = TREE_OPERAND (arg0, 1);
7588 tree arg02 = TREE_OPERAND (arg0, 2);
7589 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7590 arg01 = fold_build1_loc (loc, code, type,
7591 fold_convert_loc (loc,
7592 TREE_TYPE (op0), arg01));
7593 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7594 arg02 = fold_build1_loc (loc, code, type,
7595 fold_convert_loc (loc,
7596 TREE_TYPE (op0), arg02));
7597 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7598 arg01, arg02);
7599
7600 /* If this was a conversion, and all we did was to move into
7601 inside the COND_EXPR, bring it back out. But leave it if
7602 it is a conversion from integer to integer and the
7603 result precision is no wider than a word since such a
7604 conversion is cheap and may be optimized away by combine,
7605 while it couldn't if it were outside the COND_EXPR. Then return
7606 so we don't get into an infinite recursion loop taking the
7607 conversion out and then back in. */
7608
7609 if ((CONVERT_EXPR_CODE_P (code)
7610 || code == NON_LVALUE_EXPR)
7611 && TREE_CODE (tem) == COND_EXPR
7612 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7613 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7614 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7615 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7616 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7617 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7618 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7619 && (INTEGRAL_TYPE_P
7620 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7621 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7622 || flag_syntax_only))
7623 tem = build1_loc (loc, code, type,
7624 build3 (COND_EXPR,
7625 TREE_TYPE (TREE_OPERAND
7626 (TREE_OPERAND (tem, 1), 0)),
7627 TREE_OPERAND (tem, 0),
7628 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7629 TREE_OPERAND (TREE_OPERAND (tem, 2),
7630 0)));
7631 return tem;
7632 }
7633 }
7634
7635 switch (code)
7636 {
7637 case NON_LVALUE_EXPR:
7638 if (!maybe_lvalue_p (op0))
7639 return fold_convert_loc (loc, type, op0);
7640 return NULL_TREE;
7641
7642 CASE_CONVERT:
7643 case FLOAT_EXPR:
7644 case FIX_TRUNC_EXPR:
7645 if (COMPARISON_CLASS_P (op0))
7646 {
7647 /* If we have (type) (a CMP b) and type is an integral type, return
7648 new expression involving the new type. Canonicalize
7649 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7650 non-integral type.
7651 Do not fold the result as that would not simplify further, also
7652 folding again results in recursions. */
7653 if (TREE_CODE (type) == BOOLEAN_TYPE)
7654 return build2_loc (loc, TREE_CODE (op0), type,
7655 TREE_OPERAND (op0, 0),
7656 TREE_OPERAND (op0, 1));
7657 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7658 && TREE_CODE (type) != VECTOR_TYPE)
7659 return build3_loc (loc, COND_EXPR, type, op0,
7660 constant_boolean_node (true, type),
7661 constant_boolean_node (false, type));
7662 }
7663
7664 /* Handle (T *)&A.B.C for A being of type T and B and C
7665 living at offset zero. This occurs frequently in
7666 C++ upcasting and then accessing the base. */
7667 if (TREE_CODE (op0) == ADDR_EXPR
7668 && POINTER_TYPE_P (type)
7669 && handled_component_p (TREE_OPERAND (op0, 0)))
7670 {
7671 HOST_WIDE_INT bitsize, bitpos;
7672 tree offset;
7673 machine_mode mode;
7674 int unsignedp, reversep, volatilep;
7675 tree base
7676 = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
7677 &offset, &mode, &unsignedp, &reversep,
7678 &volatilep);
7679 /* If the reference was to a (constant) zero offset, we can use
7680 the address of the base if it has the same base type
7681 as the result type and the pointer type is unqualified. */
7682 if (! offset && bitpos == 0
7683 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7684 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7685 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7686 return fold_convert_loc (loc, type,
7687 build_fold_addr_expr_loc (loc, base));
7688 }
7689
7690 if (TREE_CODE (op0) == MODIFY_EXPR
7691 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7692 /* Detect assigning a bitfield. */
7693 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7694 && DECL_BIT_FIELD
7695 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7696 {
7697 /* Don't leave an assignment inside a conversion
7698 unless assigning a bitfield. */
7699 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7700 /* First do the assignment, then return converted constant. */
7701 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7702 TREE_NO_WARNING (tem) = 1;
7703 TREE_USED (tem) = 1;
7704 return tem;
7705 }
7706
7707 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7708 constants (if x has signed type, the sign bit cannot be set
7709 in c). This folds extension into the BIT_AND_EXPR.
7710 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7711 very likely don't have maximal range for their precision and this
7712 transformation effectively doesn't preserve non-maximal ranges. */
7713 if (TREE_CODE (type) == INTEGER_TYPE
7714 && TREE_CODE (op0) == BIT_AND_EXPR
7715 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7716 {
7717 tree and_expr = op0;
7718 tree and0 = TREE_OPERAND (and_expr, 0);
7719 tree and1 = TREE_OPERAND (and_expr, 1);
7720 int change = 0;
7721
7722 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7723 || (TYPE_PRECISION (type)
7724 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7725 change = 1;
7726 else if (TYPE_PRECISION (TREE_TYPE (and1))
7727 <= HOST_BITS_PER_WIDE_INT
7728 && tree_fits_uhwi_p (and1))
7729 {
7730 unsigned HOST_WIDE_INT cst;
7731
7732 cst = tree_to_uhwi (and1);
7733 cst &= HOST_WIDE_INT_M1U
7734 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7735 change = (cst == 0);
7736 if (change
7737 && !flag_syntax_only
7738 && (load_extend_op (TYPE_MODE (TREE_TYPE (and0)))
7739 == ZERO_EXTEND))
7740 {
7741 tree uns = unsigned_type_for (TREE_TYPE (and0));
7742 and0 = fold_convert_loc (loc, uns, and0);
7743 and1 = fold_convert_loc (loc, uns, and1);
7744 }
7745 }
7746 if (change)
7747 {
7748 tem = force_fit_type (type, wi::to_widest (and1), 0,
7749 TREE_OVERFLOW (and1));
7750 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7751 fold_convert_loc (loc, type, and0), tem);
7752 }
7753 }
7754
7755 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
7756 cast (T1)X will fold away. We assume that this happens when X itself
7757 is a cast. */
7758 if (POINTER_TYPE_P (type)
7759 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7760 && CONVERT_EXPR_P (TREE_OPERAND (arg0, 0)))
7761 {
7762 tree arg00 = TREE_OPERAND (arg0, 0);
7763 tree arg01 = TREE_OPERAND (arg0, 1);
7764
7765 return fold_build_pointer_plus_loc
7766 (loc, fold_convert_loc (loc, type, arg00), arg01);
7767 }
7768
7769 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7770 of the same precision, and X is an integer type not narrower than
7771 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7772 if (INTEGRAL_TYPE_P (type)
7773 && TREE_CODE (op0) == BIT_NOT_EXPR
7774 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7775 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7776 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7777 {
7778 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7779 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7780 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7781 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7782 fold_convert_loc (loc, type, tem));
7783 }
7784
7785 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7786 type of X and Y (integer types only). */
7787 if (INTEGRAL_TYPE_P (type)
7788 && TREE_CODE (op0) == MULT_EXPR
7789 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7790 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7791 {
7792 /* Be careful not to introduce new overflows. */
7793 tree mult_type;
7794 if (TYPE_OVERFLOW_WRAPS (type))
7795 mult_type = type;
7796 else
7797 mult_type = unsigned_type_for (type);
7798
7799 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7800 {
7801 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7802 fold_convert_loc (loc, mult_type,
7803 TREE_OPERAND (op0, 0)),
7804 fold_convert_loc (loc, mult_type,
7805 TREE_OPERAND (op0, 1)));
7806 return fold_convert_loc (loc, type, tem);
7807 }
7808 }
7809
7810 return NULL_TREE;
7811
7812 case VIEW_CONVERT_EXPR:
7813 if (TREE_CODE (op0) == MEM_REF)
7814 {
7815 if (TYPE_ALIGN (TREE_TYPE (op0)) != TYPE_ALIGN (type))
7816 type = build_aligned_type (type, TYPE_ALIGN (TREE_TYPE (op0)));
7817 tem = fold_build2_loc (loc, MEM_REF, type,
7818 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
7819 REF_REVERSE_STORAGE_ORDER (tem) = REF_REVERSE_STORAGE_ORDER (op0);
7820 return tem;
7821 }
7822
7823 return NULL_TREE;
7824
7825 case NEGATE_EXPR:
7826 tem = fold_negate_expr (loc, arg0);
7827 if (tem)
7828 return fold_convert_loc (loc, type, tem);
7829 return NULL_TREE;
7830
7831 case ABS_EXPR:
7832 /* Convert fabs((double)float) into (double)fabsf(float). */
7833 if (TREE_CODE (arg0) == NOP_EXPR
7834 && TREE_CODE (type) == REAL_TYPE)
7835 {
7836 tree targ0 = strip_float_extensions (arg0);
7837 if (targ0 != arg0)
7838 return fold_convert_loc (loc, type,
7839 fold_build1_loc (loc, ABS_EXPR,
7840 TREE_TYPE (targ0),
7841 targ0));
7842 }
7843 return NULL_TREE;
7844
7845 case BIT_NOT_EXPR:
7846 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7847 if (TREE_CODE (arg0) == BIT_XOR_EXPR
7848 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7849 fold_convert_loc (loc, type,
7850 TREE_OPERAND (arg0, 0)))))
7851 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
7852 fold_convert_loc (loc, type,
7853 TREE_OPERAND (arg0, 1)));
7854 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7855 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7856 fold_convert_loc (loc, type,
7857 TREE_OPERAND (arg0, 1)))))
7858 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
7859 fold_convert_loc (loc, type,
7860 TREE_OPERAND (arg0, 0)), tem);
7861
7862 return NULL_TREE;
7863
7864 case TRUTH_NOT_EXPR:
7865 /* Note that the operand of this must be an int
7866 and its values must be 0 or 1.
7867 ("true" is a fixed value perhaps depending on the language,
7868 but we don't handle values other than 1 correctly yet.) */
7869 tem = fold_truth_not_expr (loc, arg0);
7870 if (!tem)
7871 return NULL_TREE;
7872 return fold_convert_loc (loc, type, tem);
7873
7874 case INDIRECT_REF:
7875 /* Fold *&X to X if X is an lvalue. */
7876 if (TREE_CODE (op0) == ADDR_EXPR)
7877 {
7878 tree op00 = TREE_OPERAND (op0, 0);
7879 if ((VAR_P (op00)
7880 || TREE_CODE (op00) == PARM_DECL
7881 || TREE_CODE (op00) == RESULT_DECL)
7882 && !TREE_READONLY (op00))
7883 return op00;
7884 }
7885 return NULL_TREE;
7886
7887 default:
7888 return NULL_TREE;
7889 } /* switch (code) */
7890 }
7891
7892
7893 /* If the operation was a conversion do _not_ mark a resulting constant
7894 with TREE_OVERFLOW if the original constant was not. These conversions
7895 have implementation defined behavior and retaining the TREE_OVERFLOW
7896 flag here would confuse later passes such as VRP. */
7897 tree
7898 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
7899 tree type, tree op0)
7900 {
7901 tree res = fold_unary_loc (loc, code, type, op0);
7902 if (res
7903 && TREE_CODE (res) == INTEGER_CST
7904 && TREE_CODE (op0) == INTEGER_CST
7905 && CONVERT_EXPR_CODE_P (code))
7906 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
7907
7908 return res;
7909 }
7910
7911 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
7912 operands OP0 and OP1. LOC is the location of the resulting expression.
7913 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
7914 Return the folded expression if folding is successful. Otherwise,
7915 return NULL_TREE. */
7916 static tree
7917 fold_truth_andor (location_t loc, enum tree_code code, tree type,
7918 tree arg0, tree arg1, tree op0, tree op1)
7919 {
7920 tree tem;
7921
7922 /* We only do these simplifications if we are optimizing. */
7923 if (!optimize)
7924 return NULL_TREE;
7925
7926 /* Check for things like (A || B) && (A || C). We can convert this
7927 to A || (B && C). Note that either operator can be any of the four
7928 truth and/or operations and the transformation will still be
7929 valid. Also note that we only care about order for the
7930 ANDIF and ORIF operators. If B contains side effects, this
7931 might change the truth-value of A. */
7932 if (TREE_CODE (arg0) == TREE_CODE (arg1)
7933 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
7934 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
7935 || TREE_CODE (arg0) == TRUTH_AND_EXPR
7936 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
7937 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
7938 {
7939 tree a00 = TREE_OPERAND (arg0, 0);
7940 tree a01 = TREE_OPERAND (arg0, 1);
7941 tree a10 = TREE_OPERAND (arg1, 0);
7942 tree a11 = TREE_OPERAND (arg1, 1);
7943 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
7944 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
7945 && (code == TRUTH_AND_EXPR
7946 || code == TRUTH_OR_EXPR));
7947
7948 if (operand_equal_p (a00, a10, 0))
7949 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
7950 fold_build2_loc (loc, code, type, a01, a11));
7951 else if (commutative && operand_equal_p (a00, a11, 0))
7952 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
7953 fold_build2_loc (loc, code, type, a01, a10));
7954 else if (commutative && operand_equal_p (a01, a10, 0))
7955 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
7956 fold_build2_loc (loc, code, type, a00, a11));
7957
7958 /* This case if tricky because we must either have commutative
7959 operators or else A10 must not have side-effects. */
7960
7961 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
7962 && operand_equal_p (a01, a11, 0))
7963 return fold_build2_loc (loc, TREE_CODE (arg0), type,
7964 fold_build2_loc (loc, code, type, a00, a10),
7965 a01);
7966 }
7967
7968 /* See if we can build a range comparison. */
7969 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
7970 return tem;
7971
7972 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
7973 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
7974 {
7975 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
7976 if (tem)
7977 return fold_build2_loc (loc, code, type, tem, arg1);
7978 }
7979
7980 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
7981 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
7982 {
7983 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
7984 if (tem)
7985 return fold_build2_loc (loc, code, type, arg0, tem);
7986 }
7987
7988 /* Check for the possibility of merging component references. If our
7989 lhs is another similar operation, try to merge its rhs with our
7990 rhs. Then try to merge our lhs and rhs. */
7991 if (TREE_CODE (arg0) == code
7992 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
7993 TREE_OPERAND (arg0, 1), arg1)))
7994 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
7995
7996 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
7997 return tem;
7998
7999 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8000 && !flag_sanitize_coverage
8001 && (code == TRUTH_AND_EXPR
8002 || code == TRUTH_ANDIF_EXPR
8003 || code == TRUTH_OR_EXPR
8004 || code == TRUTH_ORIF_EXPR))
8005 {
8006 enum tree_code ncode, icode;
8007
8008 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8009 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8010 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8011
8012 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8013 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8014 We don't want to pack more than two leafs to a non-IF AND/OR
8015 expression.
8016 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8017 equal to IF-CODE, then we don't want to add right-hand operand.
8018 If the inner right-hand side of left-hand operand has
8019 side-effects, or isn't simple, then we can't add to it,
8020 as otherwise we might destroy if-sequence. */
8021 if (TREE_CODE (arg0) == icode
8022 && simple_operand_p_2 (arg1)
8023 /* Needed for sequence points to handle trappings, and
8024 side-effects. */
8025 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8026 {
8027 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8028 arg1);
8029 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8030 tem);
8031 }
8032 /* Same as above but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8033 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8034 else if (TREE_CODE (arg1) == icode
8035 && simple_operand_p_2 (arg0)
8036 /* Needed for sequence points to handle trappings, and
8037 side-effects. */
8038 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8039 {
8040 tem = fold_build2_loc (loc, ncode, type,
8041 arg0, TREE_OPERAND (arg1, 0));
8042 return fold_build2_loc (loc, icode, type, tem,
8043 TREE_OPERAND (arg1, 1));
8044 }
8045 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8046 into (A OR B).
8047 For sequence point consistancy, we need to check for trapping,
8048 and side-effects. */
8049 else if (code == icode && simple_operand_p_2 (arg0)
8050 && simple_operand_p_2 (arg1))
8051 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8052 }
8053
8054 return NULL_TREE;
8055 }
8056
8057 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8058 by changing CODE to reduce the magnitude of constants involved in
8059 ARG0 of the comparison.
8060 Returns a canonicalized comparison tree if a simplification was
8061 possible, otherwise returns NULL_TREE.
8062 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8063 valid if signed overflow is undefined. */
8064
8065 static tree
8066 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8067 tree arg0, tree arg1,
8068 bool *strict_overflow_p)
8069 {
8070 enum tree_code code0 = TREE_CODE (arg0);
8071 tree t, cst0 = NULL_TREE;
8072 int sgn0;
8073
8074 /* Match A +- CST code arg1. We can change this only if overflow
8075 is undefined. */
8076 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8077 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8078 /* In principle pointers also have undefined overflow behavior,
8079 but that causes problems elsewhere. */
8080 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8081 && (code0 == MINUS_EXPR
8082 || code0 == PLUS_EXPR)
8083 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
8084 return NULL_TREE;
8085
8086 /* Identify the constant in arg0 and its sign. */
8087 cst0 = TREE_OPERAND (arg0, 1);
8088 sgn0 = tree_int_cst_sgn (cst0);
8089
8090 /* Overflowed constants and zero will cause problems. */
8091 if (integer_zerop (cst0)
8092 || TREE_OVERFLOW (cst0))
8093 return NULL_TREE;
8094
8095 /* See if we can reduce the magnitude of the constant in
8096 arg0 by changing the comparison code. */
8097 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8098 if (code == LT_EXPR
8099 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8100 code = LE_EXPR;
8101 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8102 else if (code == GT_EXPR
8103 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8104 code = GE_EXPR;
8105 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8106 else if (code == LE_EXPR
8107 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8108 code = LT_EXPR;
8109 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8110 else if (code == GE_EXPR
8111 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8112 code = GT_EXPR;
8113 else
8114 return NULL_TREE;
8115 *strict_overflow_p = true;
8116
8117 /* Now build the constant reduced in magnitude. But not if that
8118 would produce one outside of its types range. */
8119 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8120 && ((sgn0 == 1
8121 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8122 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8123 || (sgn0 == -1
8124 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8125 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8126 return NULL_TREE;
8127
8128 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8129 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8130 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8131 t = fold_convert (TREE_TYPE (arg1), t);
8132
8133 return fold_build2_loc (loc, code, type, t, arg1);
8134 }
8135
8136 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8137 overflow further. Try to decrease the magnitude of constants involved
8138 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8139 and put sole constants at the second argument position.
8140 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8141
8142 static tree
8143 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8144 tree arg0, tree arg1)
8145 {
8146 tree t;
8147 bool strict_overflow_p;
8148 const char * const warnmsg = G_("assuming signed overflow does not occur "
8149 "when reducing constant in comparison");
8150
8151 /* Try canonicalization by simplifying arg0. */
8152 strict_overflow_p = false;
8153 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8154 &strict_overflow_p);
8155 if (t)
8156 {
8157 if (strict_overflow_p)
8158 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8159 return t;
8160 }
8161
8162 /* Try canonicalization by simplifying arg1 using the swapped
8163 comparison. */
8164 code = swap_tree_comparison (code);
8165 strict_overflow_p = false;
8166 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8167 &strict_overflow_p);
8168 if (t && strict_overflow_p)
8169 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8170 return t;
8171 }
8172
8173 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8174 space. This is used to avoid issuing overflow warnings for
8175 expressions like &p->x which can not wrap. */
8176
8177 static bool
8178 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8179 {
8180 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8181 return true;
8182
8183 if (bitpos < 0)
8184 return true;
8185
8186 wide_int wi_offset;
8187 int precision = TYPE_PRECISION (TREE_TYPE (base));
8188 if (offset == NULL_TREE)
8189 wi_offset = wi::zero (precision);
8190 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8191 return true;
8192 else
8193 wi_offset = wi::to_wide (offset);
8194
8195 bool overflow;
8196 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8197 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8198 if (overflow)
8199 return true;
8200
8201 if (!wi::fits_uhwi_p (total))
8202 return true;
8203
8204 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8205 if (size <= 0)
8206 return true;
8207
8208 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8209 array. */
8210 if (TREE_CODE (base) == ADDR_EXPR)
8211 {
8212 HOST_WIDE_INT base_size;
8213
8214 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8215 if (base_size > 0 && size < base_size)
8216 size = base_size;
8217 }
8218
8219 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8220 }
8221
8222 /* Return a positive integer when the symbol DECL is known to have
8223 a nonzero address, zero when it's known not to (e.g., it's a weak
8224 symbol), and a negative integer when the symbol is not yet in the
8225 symbol table and so whether or not its address is zero is unknown.
8226 For function local objects always return positive integer. */
8227 static int
8228 maybe_nonzero_address (tree decl)
8229 {
8230 if (DECL_P (decl) && decl_in_symtab_p (decl))
8231 if (struct symtab_node *symbol = symtab_node::get_create (decl))
8232 return symbol->nonzero_address ();
8233
8234 /* Function local objects are never NULL. */
8235 if (DECL_P (decl)
8236 && (DECL_CONTEXT (decl)
8237 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
8238 && auto_var_in_fn_p (decl, DECL_CONTEXT (decl))))
8239 return 1;
8240
8241 return -1;
8242 }
8243
8244 /* Subroutine of fold_binary. This routine performs all of the
8245 transformations that are common to the equality/inequality
8246 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8247 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8248 fold_binary should call fold_binary. Fold a comparison with
8249 tree code CODE and type TYPE with operands OP0 and OP1. Return
8250 the folded comparison or NULL_TREE. */
8251
8252 static tree
8253 fold_comparison (location_t loc, enum tree_code code, tree type,
8254 tree op0, tree op1)
8255 {
8256 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8257 tree arg0, arg1, tem;
8258
8259 arg0 = op0;
8260 arg1 = op1;
8261
8262 STRIP_SIGN_NOPS (arg0);
8263 STRIP_SIGN_NOPS (arg1);
8264
8265 /* For comparisons of pointers we can decompose it to a compile time
8266 comparison of the base objects and the offsets into the object.
8267 This requires at least one operand being an ADDR_EXPR or a
8268 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8269 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8270 && (TREE_CODE (arg0) == ADDR_EXPR
8271 || TREE_CODE (arg1) == ADDR_EXPR
8272 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8273 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8274 {
8275 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8276 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8277 machine_mode mode;
8278 int volatilep, reversep, unsignedp;
8279 bool indirect_base0 = false, indirect_base1 = false;
8280
8281 /* Get base and offset for the access. Strip ADDR_EXPR for
8282 get_inner_reference, but put it back by stripping INDIRECT_REF
8283 off the base object if possible. indirect_baseN will be true
8284 if baseN is not an address but refers to the object itself. */
8285 base0 = arg0;
8286 if (TREE_CODE (arg0) == ADDR_EXPR)
8287 {
8288 base0
8289 = get_inner_reference (TREE_OPERAND (arg0, 0),
8290 &bitsize, &bitpos0, &offset0, &mode,
8291 &unsignedp, &reversep, &volatilep);
8292 if (TREE_CODE (base0) == INDIRECT_REF)
8293 base0 = TREE_OPERAND (base0, 0);
8294 else
8295 indirect_base0 = true;
8296 }
8297 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8298 {
8299 base0 = TREE_OPERAND (arg0, 0);
8300 STRIP_SIGN_NOPS (base0);
8301 if (TREE_CODE (base0) == ADDR_EXPR)
8302 {
8303 base0
8304 = get_inner_reference (TREE_OPERAND (base0, 0),
8305 &bitsize, &bitpos0, &offset0, &mode,
8306 &unsignedp, &reversep, &volatilep);
8307 if (TREE_CODE (base0) == INDIRECT_REF)
8308 base0 = TREE_OPERAND (base0, 0);
8309 else
8310 indirect_base0 = true;
8311 }
8312 if (offset0 == NULL_TREE || integer_zerop (offset0))
8313 offset0 = TREE_OPERAND (arg0, 1);
8314 else
8315 offset0 = size_binop (PLUS_EXPR, offset0,
8316 TREE_OPERAND (arg0, 1));
8317 if (TREE_CODE (offset0) == INTEGER_CST)
8318 {
8319 offset_int tem = wi::sext (wi::to_offset (offset0),
8320 TYPE_PRECISION (sizetype));
8321 tem <<= LOG2_BITS_PER_UNIT;
8322 tem += bitpos0;
8323 if (wi::fits_shwi_p (tem))
8324 {
8325 bitpos0 = tem.to_shwi ();
8326 offset0 = NULL_TREE;
8327 }
8328 }
8329 }
8330
8331 base1 = arg1;
8332 if (TREE_CODE (arg1) == ADDR_EXPR)
8333 {
8334 base1
8335 = get_inner_reference (TREE_OPERAND (arg1, 0),
8336 &bitsize, &bitpos1, &offset1, &mode,
8337 &unsignedp, &reversep, &volatilep);
8338 if (TREE_CODE (base1) == INDIRECT_REF)
8339 base1 = TREE_OPERAND (base1, 0);
8340 else
8341 indirect_base1 = true;
8342 }
8343 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8344 {
8345 base1 = TREE_OPERAND (arg1, 0);
8346 STRIP_SIGN_NOPS (base1);
8347 if (TREE_CODE (base1) == ADDR_EXPR)
8348 {
8349 base1
8350 = get_inner_reference (TREE_OPERAND (base1, 0),
8351 &bitsize, &bitpos1, &offset1, &mode,
8352 &unsignedp, &reversep, &volatilep);
8353 if (TREE_CODE (base1) == INDIRECT_REF)
8354 base1 = TREE_OPERAND (base1, 0);
8355 else
8356 indirect_base1 = true;
8357 }
8358 if (offset1 == NULL_TREE || integer_zerop (offset1))
8359 offset1 = TREE_OPERAND (arg1, 1);
8360 else
8361 offset1 = size_binop (PLUS_EXPR, offset1,
8362 TREE_OPERAND (arg1, 1));
8363 if (TREE_CODE (offset1) == INTEGER_CST)
8364 {
8365 offset_int tem = wi::sext (wi::to_offset (offset1),
8366 TYPE_PRECISION (sizetype));
8367 tem <<= LOG2_BITS_PER_UNIT;
8368 tem += bitpos1;
8369 if (wi::fits_shwi_p (tem))
8370 {
8371 bitpos1 = tem.to_shwi ();
8372 offset1 = NULL_TREE;
8373 }
8374 }
8375 }
8376
8377 /* If we have equivalent bases we might be able to simplify. */
8378 if (indirect_base0 == indirect_base1
8379 && operand_equal_p (base0, base1,
8380 indirect_base0 ? OEP_ADDRESS_OF : 0))
8381 {
8382 /* We can fold this expression to a constant if the non-constant
8383 offset parts are equal. */
8384 if (offset0 == offset1
8385 || (offset0 && offset1
8386 && operand_equal_p (offset0, offset1, 0)))
8387 {
8388 if (!equality_code
8389 && bitpos0 != bitpos1
8390 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8391 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8392 fold_overflow_warning (("assuming pointer wraparound does not "
8393 "occur when comparing P +- C1 with "
8394 "P +- C2"),
8395 WARN_STRICT_OVERFLOW_CONDITIONAL);
8396
8397 switch (code)
8398 {
8399 case EQ_EXPR:
8400 return constant_boolean_node (bitpos0 == bitpos1, type);
8401 case NE_EXPR:
8402 return constant_boolean_node (bitpos0 != bitpos1, type);
8403 case LT_EXPR:
8404 return constant_boolean_node (bitpos0 < bitpos1, type);
8405 case LE_EXPR:
8406 return constant_boolean_node (bitpos0 <= bitpos1, type);
8407 case GE_EXPR:
8408 return constant_boolean_node (bitpos0 >= bitpos1, type);
8409 case GT_EXPR:
8410 return constant_boolean_node (bitpos0 > bitpos1, type);
8411 default:;
8412 }
8413 }
8414 /* We can simplify the comparison to a comparison of the variable
8415 offset parts if the constant offset parts are equal.
8416 Be careful to use signed sizetype here because otherwise we
8417 mess with array offsets in the wrong way. This is possible
8418 because pointer arithmetic is restricted to retain within an
8419 object and overflow on pointer differences is undefined as of
8420 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8421 else if (bitpos0 == bitpos1)
8422 {
8423 /* By converting to signed sizetype we cover middle-end pointer
8424 arithmetic which operates on unsigned pointer types of size
8425 type size and ARRAY_REF offsets which are properly sign or
8426 zero extended from their type in case it is narrower than
8427 sizetype. */
8428 if (offset0 == NULL_TREE)
8429 offset0 = build_int_cst (ssizetype, 0);
8430 else
8431 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8432 if (offset1 == NULL_TREE)
8433 offset1 = build_int_cst (ssizetype, 0);
8434 else
8435 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8436
8437 if (!equality_code
8438 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8439 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8440 fold_overflow_warning (("assuming pointer wraparound does not "
8441 "occur when comparing P +- C1 with "
8442 "P +- C2"),
8443 WARN_STRICT_OVERFLOW_COMPARISON);
8444
8445 return fold_build2_loc (loc, code, type, offset0, offset1);
8446 }
8447 }
8448 /* For equal offsets we can simplify to a comparison of the
8449 base addresses. */
8450 else if (bitpos0 == bitpos1
8451 && (indirect_base0
8452 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8453 && (indirect_base1
8454 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8455 && ((offset0 == offset1)
8456 || (offset0 && offset1
8457 && operand_equal_p (offset0, offset1, 0))))
8458 {
8459 if (indirect_base0)
8460 base0 = build_fold_addr_expr_loc (loc, base0);
8461 if (indirect_base1)
8462 base1 = build_fold_addr_expr_loc (loc, base1);
8463 return fold_build2_loc (loc, code, type, base0, base1);
8464 }
8465 /* Comparison between an ordinary (non-weak) symbol and a null
8466 pointer can be eliminated since such symbols must have a non
8467 null address. In C, relational expressions between pointers
8468 to objects and null pointers are undefined. The results
8469 below follow the C++ rules with the additional property that
8470 every object pointer compares greater than a null pointer.
8471 */
8472 else if (((DECL_P (base0)
8473 && maybe_nonzero_address (base0) > 0
8474 /* Avoid folding references to struct members at offset 0 to
8475 prevent tests like '&ptr->firstmember == 0' from getting
8476 eliminated. When ptr is null, although the -> expression
8477 is strictly speaking invalid, GCC retains it as a matter
8478 of QoI. See PR c/44555. */
8479 && (offset0 == NULL_TREE && bitpos0 != 0))
8480 || CONSTANT_CLASS_P (base0))
8481 && indirect_base0
8482 /* The caller guarantees that when one of the arguments is
8483 constant (i.e., null in this case) it is second. */
8484 && integer_zerop (arg1))
8485 {
8486 switch (code)
8487 {
8488 case EQ_EXPR:
8489 case LE_EXPR:
8490 case LT_EXPR:
8491 return constant_boolean_node (false, type);
8492 case GE_EXPR:
8493 case GT_EXPR:
8494 case NE_EXPR:
8495 return constant_boolean_node (true, type);
8496 default:
8497 gcc_unreachable ();
8498 }
8499 }
8500 }
8501
8502 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8503 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8504 the resulting offset is smaller in absolute value than the
8505 original one and has the same sign. */
8506 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8507 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8508 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8509 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8510 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8511 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8512 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8513 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8514 {
8515 tree const1 = TREE_OPERAND (arg0, 1);
8516 tree const2 = TREE_OPERAND (arg1, 1);
8517 tree variable1 = TREE_OPERAND (arg0, 0);
8518 tree variable2 = TREE_OPERAND (arg1, 0);
8519 tree cst;
8520 const char * const warnmsg = G_("assuming signed overflow does not "
8521 "occur when combining constants around "
8522 "a comparison");
8523
8524 /* Put the constant on the side where it doesn't overflow and is
8525 of lower absolute value and of same sign than before. */
8526 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8527 ? MINUS_EXPR : PLUS_EXPR,
8528 const2, const1);
8529 if (!TREE_OVERFLOW (cst)
8530 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
8531 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
8532 {
8533 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8534 return fold_build2_loc (loc, code, type,
8535 variable1,
8536 fold_build2_loc (loc, TREE_CODE (arg1),
8537 TREE_TYPE (arg1),
8538 variable2, cst));
8539 }
8540
8541 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8542 ? MINUS_EXPR : PLUS_EXPR,
8543 const1, const2);
8544 if (!TREE_OVERFLOW (cst)
8545 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
8546 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
8547 {
8548 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8549 return fold_build2_loc (loc, code, type,
8550 fold_build2_loc (loc, TREE_CODE (arg0),
8551 TREE_TYPE (arg0),
8552 variable1, cst),
8553 variable2);
8554 }
8555 }
8556
8557 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
8558 if (tem)
8559 return tem;
8560
8561 /* If we are comparing an expression that just has comparisons
8562 of two integer values, arithmetic expressions of those comparisons,
8563 and constants, we can simplify it. There are only three cases
8564 to check: the two values can either be equal, the first can be
8565 greater, or the second can be greater. Fold the expression for
8566 those three values. Since each value must be 0 or 1, we have
8567 eight possibilities, each of which corresponds to the constant 0
8568 or 1 or one of the six possible comparisons.
8569
8570 This handles common cases like (a > b) == 0 but also handles
8571 expressions like ((x > y) - (y > x)) > 0, which supposedly
8572 occur in macroized code. */
8573
8574 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8575 {
8576 tree cval1 = 0, cval2 = 0;
8577 int save_p = 0;
8578
8579 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8580 /* Don't handle degenerate cases here; they should already
8581 have been handled anyway. */
8582 && cval1 != 0 && cval2 != 0
8583 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8584 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8585 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8586 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8587 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8588 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8589 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8590 {
8591 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8592 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8593
8594 /* We can't just pass T to eval_subst in case cval1 or cval2
8595 was the same as ARG1. */
8596
8597 tree high_result
8598 = fold_build2_loc (loc, code, type,
8599 eval_subst (loc, arg0, cval1, maxval,
8600 cval2, minval),
8601 arg1);
8602 tree equal_result
8603 = fold_build2_loc (loc, code, type,
8604 eval_subst (loc, arg0, cval1, maxval,
8605 cval2, maxval),
8606 arg1);
8607 tree low_result
8608 = fold_build2_loc (loc, code, type,
8609 eval_subst (loc, arg0, cval1, minval,
8610 cval2, maxval),
8611 arg1);
8612
8613 /* All three of these results should be 0 or 1. Confirm they are.
8614 Then use those values to select the proper code to use. */
8615
8616 if (TREE_CODE (high_result) == INTEGER_CST
8617 && TREE_CODE (equal_result) == INTEGER_CST
8618 && TREE_CODE (low_result) == INTEGER_CST)
8619 {
8620 /* Make a 3-bit mask with the high-order bit being the
8621 value for `>', the next for '=', and the low for '<'. */
8622 switch ((integer_onep (high_result) * 4)
8623 + (integer_onep (equal_result) * 2)
8624 + integer_onep (low_result))
8625 {
8626 case 0:
8627 /* Always false. */
8628 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
8629 case 1:
8630 code = LT_EXPR;
8631 break;
8632 case 2:
8633 code = EQ_EXPR;
8634 break;
8635 case 3:
8636 code = LE_EXPR;
8637 break;
8638 case 4:
8639 code = GT_EXPR;
8640 break;
8641 case 5:
8642 code = NE_EXPR;
8643 break;
8644 case 6:
8645 code = GE_EXPR;
8646 break;
8647 case 7:
8648 /* Always true. */
8649 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
8650 }
8651
8652 if (save_p)
8653 {
8654 tem = save_expr (build2 (code, type, cval1, cval2));
8655 protected_set_expr_location (tem, loc);
8656 return tem;
8657 }
8658 return fold_build2_loc (loc, code, type, cval1, cval2);
8659 }
8660 }
8661 }
8662
8663 return NULL_TREE;
8664 }
8665
8666
8667 /* Subroutine of fold_binary. Optimize complex multiplications of the
8668 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8669 argument EXPR represents the expression "z" of type TYPE. */
8670
8671 static tree
8672 fold_mult_zconjz (location_t loc, tree type, tree expr)
8673 {
8674 tree itype = TREE_TYPE (type);
8675 tree rpart, ipart, tem;
8676
8677 if (TREE_CODE (expr) == COMPLEX_EXPR)
8678 {
8679 rpart = TREE_OPERAND (expr, 0);
8680 ipart = TREE_OPERAND (expr, 1);
8681 }
8682 else if (TREE_CODE (expr) == COMPLEX_CST)
8683 {
8684 rpart = TREE_REALPART (expr);
8685 ipart = TREE_IMAGPART (expr);
8686 }
8687 else
8688 {
8689 expr = save_expr (expr);
8690 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
8691 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
8692 }
8693
8694 rpart = save_expr (rpart);
8695 ipart = save_expr (ipart);
8696 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
8697 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
8698 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
8699 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
8700 build_zero_cst (itype));
8701 }
8702
8703
8704 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
8705 CONSTRUCTOR ARG into array ELTS, which has NELTS elements, and return
8706 true if successful. */
8707
8708 static bool
8709 vec_cst_ctor_to_array (tree arg, unsigned int nelts, tree *elts)
8710 {
8711 unsigned int i;
8712
8713 if (TREE_CODE (arg) == VECTOR_CST)
8714 {
8715 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
8716 elts[i] = VECTOR_CST_ELT (arg, i);
8717 }
8718 else if (TREE_CODE (arg) == CONSTRUCTOR)
8719 {
8720 constructor_elt *elt;
8721
8722 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
8723 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
8724 return false;
8725 else
8726 elts[i] = elt->value;
8727 }
8728 else
8729 return false;
8730 for (; i < nelts; i++)
8731 elts[i]
8732 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
8733 return true;
8734 }
8735
8736 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
8737 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
8738 NULL_TREE otherwise. */
8739
8740 static tree
8741 fold_vec_perm (tree type, tree arg0, tree arg1, vec_perm_indices sel)
8742 {
8743 unsigned int i;
8744 bool need_ctor = false;
8745
8746 unsigned int nelts = sel.length ();
8747 gcc_assert (TYPE_VECTOR_SUBPARTS (type) == nelts
8748 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
8749 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
8750 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
8751 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
8752 return NULL_TREE;
8753
8754 tree *in_elts = XALLOCAVEC (tree, nelts * 2);
8755 if (!vec_cst_ctor_to_array (arg0, nelts, in_elts)
8756 || !vec_cst_ctor_to_array (arg1, nelts, in_elts + nelts))
8757 return NULL_TREE;
8758
8759 tree_vector_builder out_elts (type, nelts, 1);
8760 for (i = 0; i < nelts; i++)
8761 {
8762 if (!CONSTANT_CLASS_P (in_elts[sel[i]]))
8763 need_ctor = true;
8764 out_elts.quick_push (unshare_expr (in_elts[sel[i]]));
8765 }
8766
8767 if (need_ctor)
8768 {
8769 vec<constructor_elt, va_gc> *v;
8770 vec_alloc (v, nelts);
8771 for (i = 0; i < nelts; i++)
8772 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, out_elts[i]);
8773 return build_constructor (type, v);
8774 }
8775 else
8776 return out_elts.build ();
8777 }
8778
8779 /* Try to fold a pointer difference of type TYPE two address expressions of
8780 array references AREF0 and AREF1 using location LOC. Return a
8781 simplified expression for the difference or NULL_TREE. */
8782
8783 static tree
8784 fold_addr_of_array_ref_difference (location_t loc, tree type,
8785 tree aref0, tree aref1,
8786 bool use_pointer_diff)
8787 {
8788 tree base0 = TREE_OPERAND (aref0, 0);
8789 tree base1 = TREE_OPERAND (aref1, 0);
8790 tree base_offset = build_int_cst (type, 0);
8791
8792 /* If the bases are array references as well, recurse. If the bases
8793 are pointer indirections compute the difference of the pointers.
8794 If the bases are equal, we are set. */
8795 if ((TREE_CODE (base0) == ARRAY_REF
8796 && TREE_CODE (base1) == ARRAY_REF
8797 && (base_offset
8798 = fold_addr_of_array_ref_difference (loc, type, base0, base1,
8799 use_pointer_diff)))
8800 || (INDIRECT_REF_P (base0)
8801 && INDIRECT_REF_P (base1)
8802 && (base_offset
8803 = use_pointer_diff
8804 ? fold_binary_loc (loc, POINTER_DIFF_EXPR, type,
8805 TREE_OPERAND (base0, 0),
8806 TREE_OPERAND (base1, 0))
8807 : fold_binary_loc (loc, MINUS_EXPR, type,
8808 fold_convert (type,
8809 TREE_OPERAND (base0, 0)),
8810 fold_convert (type,
8811 TREE_OPERAND (base1, 0)))))
8812 || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
8813 {
8814 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
8815 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
8816 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
8817 tree diff = fold_build2_loc (loc, MINUS_EXPR, type, op0, op1);
8818 return fold_build2_loc (loc, PLUS_EXPR, type,
8819 base_offset,
8820 fold_build2_loc (loc, MULT_EXPR, type,
8821 diff, esz));
8822 }
8823 return NULL_TREE;
8824 }
8825
8826 /* If the real or vector real constant CST of type TYPE has an exact
8827 inverse, return it, else return NULL. */
8828
8829 tree
8830 exact_inverse (tree type, tree cst)
8831 {
8832 REAL_VALUE_TYPE r;
8833 tree unit_type;
8834 machine_mode mode;
8835 unsigned vec_nelts, i;
8836
8837 switch (TREE_CODE (cst))
8838 {
8839 case REAL_CST:
8840 r = TREE_REAL_CST (cst);
8841
8842 if (exact_real_inverse (TYPE_MODE (type), &r))
8843 return build_real (type, r);
8844
8845 return NULL_TREE;
8846
8847 case VECTOR_CST:
8848 {
8849 vec_nelts = VECTOR_CST_NELTS (cst);
8850 unit_type = TREE_TYPE (type);
8851 mode = TYPE_MODE (unit_type);
8852
8853 auto_vec<tree, 32> elts (vec_nelts);
8854 for (i = 0; i < vec_nelts; i++)
8855 {
8856 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
8857 if (!exact_real_inverse (mode, &r))
8858 return NULL_TREE;
8859 elts.quick_push (build_real (unit_type, r));
8860 }
8861
8862 return build_vector (type, elts);
8863 }
8864
8865 default:
8866 return NULL_TREE;
8867 }
8868 }
8869
8870 /* Mask out the tz least significant bits of X of type TYPE where
8871 tz is the number of trailing zeroes in Y. */
8872 static wide_int
8873 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
8874 {
8875 int tz = wi::ctz (y);
8876 if (tz > 0)
8877 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
8878 return x;
8879 }
8880
8881 /* Return true when T is an address and is known to be nonzero.
8882 For floating point we further ensure that T is not denormal.
8883 Similar logic is present in nonzero_address in rtlanal.h.
8884
8885 If the return value is based on the assumption that signed overflow
8886 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
8887 change *STRICT_OVERFLOW_P. */
8888
8889 static bool
8890 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
8891 {
8892 tree type = TREE_TYPE (t);
8893 enum tree_code code;
8894
8895 /* Doing something useful for floating point would need more work. */
8896 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
8897 return false;
8898
8899 code = TREE_CODE (t);
8900 switch (TREE_CODE_CLASS (code))
8901 {
8902 case tcc_unary:
8903 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
8904 strict_overflow_p);
8905 case tcc_binary:
8906 case tcc_comparison:
8907 return tree_binary_nonzero_warnv_p (code, type,
8908 TREE_OPERAND (t, 0),
8909 TREE_OPERAND (t, 1),
8910 strict_overflow_p);
8911 case tcc_constant:
8912 case tcc_declaration:
8913 case tcc_reference:
8914 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
8915
8916 default:
8917 break;
8918 }
8919
8920 switch (code)
8921 {
8922 case TRUTH_NOT_EXPR:
8923 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
8924 strict_overflow_p);
8925
8926 case TRUTH_AND_EXPR:
8927 case TRUTH_OR_EXPR:
8928 case TRUTH_XOR_EXPR:
8929 return tree_binary_nonzero_warnv_p (code, type,
8930 TREE_OPERAND (t, 0),
8931 TREE_OPERAND (t, 1),
8932 strict_overflow_p);
8933
8934 case COND_EXPR:
8935 case CONSTRUCTOR:
8936 case OBJ_TYPE_REF:
8937 case ASSERT_EXPR:
8938 case ADDR_EXPR:
8939 case WITH_SIZE_EXPR:
8940 case SSA_NAME:
8941 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
8942
8943 case COMPOUND_EXPR:
8944 case MODIFY_EXPR:
8945 case BIND_EXPR:
8946 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
8947 strict_overflow_p);
8948
8949 case SAVE_EXPR:
8950 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
8951 strict_overflow_p);
8952
8953 case CALL_EXPR:
8954 {
8955 tree fndecl = get_callee_fndecl (t);
8956 if (!fndecl) return false;
8957 if (flag_delete_null_pointer_checks && !flag_check_new
8958 && DECL_IS_OPERATOR_NEW (fndecl)
8959 && !TREE_NOTHROW (fndecl))
8960 return true;
8961 if (flag_delete_null_pointer_checks
8962 && lookup_attribute ("returns_nonnull",
8963 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
8964 return true;
8965 return alloca_call_p (t);
8966 }
8967
8968 default:
8969 break;
8970 }
8971 return false;
8972 }
8973
8974 /* Return true when T is an address and is known to be nonzero.
8975 Handle warnings about undefined signed overflow. */
8976
8977 bool
8978 tree_expr_nonzero_p (tree t)
8979 {
8980 bool ret, strict_overflow_p;
8981
8982 strict_overflow_p = false;
8983 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
8984 if (strict_overflow_p)
8985 fold_overflow_warning (("assuming signed overflow does not occur when "
8986 "determining that expression is always "
8987 "non-zero"),
8988 WARN_STRICT_OVERFLOW_MISC);
8989 return ret;
8990 }
8991
8992 /* Return true if T is known not to be equal to an integer W. */
8993
8994 bool
8995 expr_not_equal_to (tree t, const wide_int &w)
8996 {
8997 wide_int min, max, nz;
8998 value_range_type rtype;
8999 switch (TREE_CODE (t))
9000 {
9001 case INTEGER_CST:
9002 return wi::to_wide (t) != w;
9003
9004 case SSA_NAME:
9005 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
9006 return false;
9007 rtype = get_range_info (t, &min, &max);
9008 if (rtype == VR_RANGE)
9009 {
9010 if (wi::lt_p (max, w, TYPE_SIGN (TREE_TYPE (t))))
9011 return true;
9012 if (wi::lt_p (w, min, TYPE_SIGN (TREE_TYPE (t))))
9013 return true;
9014 }
9015 else if (rtype == VR_ANTI_RANGE
9016 && wi::le_p (min, w, TYPE_SIGN (TREE_TYPE (t)))
9017 && wi::le_p (w, max, TYPE_SIGN (TREE_TYPE (t))))
9018 return true;
9019 /* If T has some known zero bits and W has any of those bits set,
9020 then T is known not to be equal to W. */
9021 if (wi::ne_p (wi::zext (wi::bit_and_not (w, get_nonzero_bits (t)),
9022 TYPE_PRECISION (TREE_TYPE (t))), 0))
9023 return true;
9024 return false;
9025
9026 default:
9027 return false;
9028 }
9029 }
9030
9031 /* Fold a binary expression of code CODE and type TYPE with operands
9032 OP0 and OP1. LOC is the location of the resulting expression.
9033 Return the folded expression if folding is successful. Otherwise,
9034 return NULL_TREE. */
9035
9036 tree
9037 fold_binary_loc (location_t loc,
9038 enum tree_code code, tree type, tree op0, tree op1)
9039 {
9040 enum tree_code_class kind = TREE_CODE_CLASS (code);
9041 tree arg0, arg1, tem;
9042 tree t1 = NULL_TREE;
9043 bool strict_overflow_p;
9044 unsigned int prec;
9045
9046 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9047 && TREE_CODE_LENGTH (code) == 2
9048 && op0 != NULL_TREE
9049 && op1 != NULL_TREE);
9050
9051 arg0 = op0;
9052 arg1 = op1;
9053
9054 /* Strip any conversions that don't change the mode. This is
9055 safe for every expression, except for a comparison expression
9056 because its signedness is derived from its operands. So, in
9057 the latter case, only strip conversions that don't change the
9058 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9059 preserved.
9060
9061 Note that this is done as an internal manipulation within the
9062 constant folder, in order to find the simplest representation
9063 of the arguments so that their form can be studied. In any
9064 cases, the appropriate type conversions should be put back in
9065 the tree that will get out of the constant folder. */
9066
9067 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9068 {
9069 STRIP_SIGN_NOPS (arg0);
9070 STRIP_SIGN_NOPS (arg1);
9071 }
9072 else
9073 {
9074 STRIP_NOPS (arg0);
9075 STRIP_NOPS (arg1);
9076 }
9077
9078 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9079 constant but we can't do arithmetic on them. */
9080 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
9081 {
9082 tem = const_binop (code, type, arg0, arg1);
9083 if (tem != NULL_TREE)
9084 {
9085 if (TREE_TYPE (tem) != type)
9086 tem = fold_convert_loc (loc, type, tem);
9087 return tem;
9088 }
9089 }
9090
9091 /* If this is a commutative operation, and ARG0 is a constant, move it
9092 to ARG1 to reduce the number of tests below. */
9093 if (commutative_tree_code (code)
9094 && tree_swap_operands_p (arg0, arg1))
9095 return fold_build2_loc (loc, code, type, op1, op0);
9096
9097 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9098 to ARG1 to reduce the number of tests below. */
9099 if (kind == tcc_comparison
9100 && tree_swap_operands_p (arg0, arg1))
9101 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9102
9103 tem = generic_simplify (loc, code, type, op0, op1);
9104 if (tem)
9105 return tem;
9106
9107 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9108
9109 First check for cases where an arithmetic operation is applied to a
9110 compound, conditional, or comparison operation. Push the arithmetic
9111 operation inside the compound or conditional to see if any folding
9112 can then be done. Convert comparison to conditional for this purpose.
9113 The also optimizes non-constant cases that used to be done in
9114 expand_expr.
9115
9116 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9117 one of the operands is a comparison and the other is a comparison, a
9118 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9119 code below would make the expression more complex. Change it to a
9120 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9121 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9122
9123 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9124 || code == EQ_EXPR || code == NE_EXPR)
9125 && TREE_CODE (type) != VECTOR_TYPE
9126 && ((truth_value_p (TREE_CODE (arg0))
9127 && (truth_value_p (TREE_CODE (arg1))
9128 || (TREE_CODE (arg1) == BIT_AND_EXPR
9129 && integer_onep (TREE_OPERAND (arg1, 1)))))
9130 || (truth_value_p (TREE_CODE (arg1))
9131 && (truth_value_p (TREE_CODE (arg0))
9132 || (TREE_CODE (arg0) == BIT_AND_EXPR
9133 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9134 {
9135 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9136 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9137 : TRUTH_XOR_EXPR,
9138 boolean_type_node,
9139 fold_convert_loc (loc, boolean_type_node, arg0),
9140 fold_convert_loc (loc, boolean_type_node, arg1));
9141
9142 if (code == EQ_EXPR)
9143 tem = invert_truthvalue_loc (loc, tem);
9144
9145 return fold_convert_loc (loc, type, tem);
9146 }
9147
9148 if (TREE_CODE_CLASS (code) == tcc_binary
9149 || TREE_CODE_CLASS (code) == tcc_comparison)
9150 {
9151 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9152 {
9153 tem = fold_build2_loc (loc, code, type,
9154 fold_convert_loc (loc, TREE_TYPE (op0),
9155 TREE_OPERAND (arg0, 1)), op1);
9156 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9157 tem);
9158 }
9159 if (TREE_CODE (arg1) == COMPOUND_EXPR)
9160 {
9161 tem = fold_build2_loc (loc, code, type, op0,
9162 fold_convert_loc (loc, TREE_TYPE (op1),
9163 TREE_OPERAND (arg1, 1)));
9164 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9165 tem);
9166 }
9167
9168 if (TREE_CODE (arg0) == COND_EXPR
9169 || TREE_CODE (arg0) == VEC_COND_EXPR
9170 || COMPARISON_CLASS_P (arg0))
9171 {
9172 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9173 arg0, arg1,
9174 /*cond_first_p=*/1);
9175 if (tem != NULL_TREE)
9176 return tem;
9177 }
9178
9179 if (TREE_CODE (arg1) == COND_EXPR
9180 || TREE_CODE (arg1) == VEC_COND_EXPR
9181 || COMPARISON_CLASS_P (arg1))
9182 {
9183 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9184 arg1, arg0,
9185 /*cond_first_p=*/0);
9186 if (tem != NULL_TREE)
9187 return tem;
9188 }
9189 }
9190
9191 switch (code)
9192 {
9193 case MEM_REF:
9194 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9195 if (TREE_CODE (arg0) == ADDR_EXPR
9196 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9197 {
9198 tree iref = TREE_OPERAND (arg0, 0);
9199 return fold_build2 (MEM_REF, type,
9200 TREE_OPERAND (iref, 0),
9201 int_const_binop (PLUS_EXPR, arg1,
9202 TREE_OPERAND (iref, 1)));
9203 }
9204
9205 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9206 if (TREE_CODE (arg0) == ADDR_EXPR
9207 && handled_component_p (TREE_OPERAND (arg0, 0)))
9208 {
9209 tree base;
9210 HOST_WIDE_INT coffset;
9211 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9212 &coffset);
9213 if (!base)
9214 return NULL_TREE;
9215 return fold_build2 (MEM_REF, type,
9216 build_fold_addr_expr (base),
9217 int_const_binop (PLUS_EXPR, arg1,
9218 size_int (coffset)));
9219 }
9220
9221 return NULL_TREE;
9222
9223 case POINTER_PLUS_EXPR:
9224 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9225 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9226 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9227 return fold_convert_loc (loc, type,
9228 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9229 fold_convert_loc (loc, sizetype,
9230 arg1),
9231 fold_convert_loc (loc, sizetype,
9232 arg0)));
9233
9234 return NULL_TREE;
9235
9236 case PLUS_EXPR:
9237 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
9238 {
9239 /* X + (X / CST) * -CST is X % CST. */
9240 if (TREE_CODE (arg1) == MULT_EXPR
9241 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9242 && operand_equal_p (arg0,
9243 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9244 {
9245 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9246 tree cst1 = TREE_OPERAND (arg1, 1);
9247 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9248 cst1, cst0);
9249 if (sum && integer_zerop (sum))
9250 return fold_convert_loc (loc, type,
9251 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9252 TREE_TYPE (arg0), arg0,
9253 cst0));
9254 }
9255 }
9256
9257 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
9258 one. Make sure the type is not saturating and has the signedness of
9259 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9260 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9261 if ((TREE_CODE (arg0) == MULT_EXPR
9262 || TREE_CODE (arg1) == MULT_EXPR)
9263 && !TYPE_SATURATING (type)
9264 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9265 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9266 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9267 {
9268 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9269 if (tem)
9270 return tem;
9271 }
9272
9273 if (! FLOAT_TYPE_P (type))
9274 {
9275 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9276 (plus (plus (mult) (mult)) (foo)) so that we can
9277 take advantage of the factoring cases below. */
9278 if (ANY_INTEGRAL_TYPE_P (type)
9279 && TYPE_OVERFLOW_WRAPS (type)
9280 && (((TREE_CODE (arg0) == PLUS_EXPR
9281 || TREE_CODE (arg0) == MINUS_EXPR)
9282 && TREE_CODE (arg1) == MULT_EXPR)
9283 || ((TREE_CODE (arg1) == PLUS_EXPR
9284 || TREE_CODE (arg1) == MINUS_EXPR)
9285 && TREE_CODE (arg0) == MULT_EXPR)))
9286 {
9287 tree parg0, parg1, parg, marg;
9288 enum tree_code pcode;
9289
9290 if (TREE_CODE (arg1) == MULT_EXPR)
9291 parg = arg0, marg = arg1;
9292 else
9293 parg = arg1, marg = arg0;
9294 pcode = TREE_CODE (parg);
9295 parg0 = TREE_OPERAND (parg, 0);
9296 parg1 = TREE_OPERAND (parg, 1);
9297 STRIP_NOPS (parg0);
9298 STRIP_NOPS (parg1);
9299
9300 if (TREE_CODE (parg0) == MULT_EXPR
9301 && TREE_CODE (parg1) != MULT_EXPR)
9302 return fold_build2_loc (loc, pcode, type,
9303 fold_build2_loc (loc, PLUS_EXPR, type,
9304 fold_convert_loc (loc, type,
9305 parg0),
9306 fold_convert_loc (loc, type,
9307 marg)),
9308 fold_convert_loc (loc, type, parg1));
9309 if (TREE_CODE (parg0) != MULT_EXPR
9310 && TREE_CODE (parg1) == MULT_EXPR)
9311 return
9312 fold_build2_loc (loc, PLUS_EXPR, type,
9313 fold_convert_loc (loc, type, parg0),
9314 fold_build2_loc (loc, pcode, type,
9315 fold_convert_loc (loc, type, marg),
9316 fold_convert_loc (loc, type,
9317 parg1)));
9318 }
9319 }
9320 else
9321 {
9322 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9323 to __complex__ ( x, y ). This is not the same for SNaNs or
9324 if signed zeros are involved. */
9325 if (!HONOR_SNANS (element_mode (arg0))
9326 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9327 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9328 {
9329 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9330 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9331 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9332 bool arg0rz = false, arg0iz = false;
9333 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9334 || (arg0i && (arg0iz = real_zerop (arg0i))))
9335 {
9336 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9337 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9338 if (arg0rz && arg1i && real_zerop (arg1i))
9339 {
9340 tree rp = arg1r ? arg1r
9341 : build1 (REALPART_EXPR, rtype, arg1);
9342 tree ip = arg0i ? arg0i
9343 : build1 (IMAGPART_EXPR, rtype, arg0);
9344 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9345 }
9346 else if (arg0iz && arg1r && real_zerop (arg1r))
9347 {
9348 tree rp = arg0r ? arg0r
9349 : build1 (REALPART_EXPR, rtype, arg0);
9350 tree ip = arg1i ? arg1i
9351 : build1 (IMAGPART_EXPR, rtype, arg1);
9352 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9353 }
9354 }
9355 }
9356
9357 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9358 We associate floats only if the user has specified
9359 -fassociative-math. */
9360 if (flag_associative_math
9361 && TREE_CODE (arg1) == PLUS_EXPR
9362 && TREE_CODE (arg0) != MULT_EXPR)
9363 {
9364 tree tree10 = TREE_OPERAND (arg1, 0);
9365 tree tree11 = TREE_OPERAND (arg1, 1);
9366 if (TREE_CODE (tree11) == MULT_EXPR
9367 && TREE_CODE (tree10) == MULT_EXPR)
9368 {
9369 tree tree0;
9370 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
9371 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
9372 }
9373 }
9374 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9375 We associate floats only if the user has specified
9376 -fassociative-math. */
9377 if (flag_associative_math
9378 && TREE_CODE (arg0) == PLUS_EXPR
9379 && TREE_CODE (arg1) != MULT_EXPR)
9380 {
9381 tree tree00 = TREE_OPERAND (arg0, 0);
9382 tree tree01 = TREE_OPERAND (arg0, 1);
9383 if (TREE_CODE (tree01) == MULT_EXPR
9384 && TREE_CODE (tree00) == MULT_EXPR)
9385 {
9386 tree tree0;
9387 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
9388 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
9389 }
9390 }
9391 }
9392
9393 bit_rotate:
9394 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9395 is a rotate of A by C1 bits. */
9396 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9397 is a rotate of A by B bits.
9398 Similarly for (A << B) | (A >> (-B & C3)) where C3 is Z-1,
9399 though in this case CODE must be | and not + or ^, otherwise
9400 it doesn't return A when B is 0. */
9401 {
9402 enum tree_code code0, code1;
9403 tree rtype;
9404 code0 = TREE_CODE (arg0);
9405 code1 = TREE_CODE (arg1);
9406 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9407 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9408 && operand_equal_p (TREE_OPERAND (arg0, 0),
9409 TREE_OPERAND (arg1, 0), 0)
9410 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9411 TYPE_UNSIGNED (rtype))
9412 /* Only create rotates in complete modes. Other cases are not
9413 expanded properly. */
9414 && (element_precision (rtype)
9415 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
9416 {
9417 tree tree01, tree11;
9418 tree orig_tree01, orig_tree11;
9419 enum tree_code code01, code11;
9420
9421 tree01 = orig_tree01 = TREE_OPERAND (arg0, 1);
9422 tree11 = orig_tree11 = TREE_OPERAND (arg1, 1);
9423 STRIP_NOPS (tree01);
9424 STRIP_NOPS (tree11);
9425 code01 = TREE_CODE (tree01);
9426 code11 = TREE_CODE (tree11);
9427 if (code11 != MINUS_EXPR
9428 && (code01 == MINUS_EXPR || code01 == BIT_AND_EXPR))
9429 {
9430 std::swap (code0, code1);
9431 std::swap (code01, code11);
9432 std::swap (tree01, tree11);
9433 std::swap (orig_tree01, orig_tree11);
9434 }
9435 if (code01 == INTEGER_CST
9436 && code11 == INTEGER_CST
9437 && (wi::to_widest (tree01) + wi::to_widest (tree11)
9438 == element_precision (rtype)))
9439 {
9440 tem = build2_loc (loc, LROTATE_EXPR,
9441 rtype, TREE_OPERAND (arg0, 0),
9442 code0 == LSHIFT_EXPR
9443 ? orig_tree01 : orig_tree11);
9444 return fold_convert_loc (loc, type, tem);
9445 }
9446 else if (code11 == MINUS_EXPR)
9447 {
9448 tree tree110, tree111;
9449 tree110 = TREE_OPERAND (tree11, 0);
9450 tree111 = TREE_OPERAND (tree11, 1);
9451 STRIP_NOPS (tree110);
9452 STRIP_NOPS (tree111);
9453 if (TREE_CODE (tree110) == INTEGER_CST
9454 && 0 == compare_tree_int (tree110,
9455 element_precision (rtype))
9456 && operand_equal_p (tree01, tree111, 0))
9457 {
9458 tem = build2_loc (loc, (code0 == LSHIFT_EXPR
9459 ? LROTATE_EXPR : RROTATE_EXPR),
9460 rtype, TREE_OPERAND (arg0, 0),
9461 orig_tree01);
9462 return fold_convert_loc (loc, type, tem);
9463 }
9464 }
9465 else if (code == BIT_IOR_EXPR
9466 && code11 == BIT_AND_EXPR
9467 && pow2p_hwi (element_precision (rtype)))
9468 {
9469 tree tree110, tree111;
9470 tree110 = TREE_OPERAND (tree11, 0);
9471 tree111 = TREE_OPERAND (tree11, 1);
9472 STRIP_NOPS (tree110);
9473 STRIP_NOPS (tree111);
9474 if (TREE_CODE (tree110) == NEGATE_EXPR
9475 && TREE_CODE (tree111) == INTEGER_CST
9476 && 0 == compare_tree_int (tree111,
9477 element_precision (rtype) - 1)
9478 && operand_equal_p (tree01, TREE_OPERAND (tree110, 0), 0))
9479 {
9480 tem = build2_loc (loc, (code0 == LSHIFT_EXPR
9481 ? LROTATE_EXPR : RROTATE_EXPR),
9482 rtype, TREE_OPERAND (arg0, 0),
9483 orig_tree01);
9484 return fold_convert_loc (loc, type, tem);
9485 }
9486 }
9487 }
9488 }
9489
9490 associate:
9491 /* In most languages, can't associate operations on floats through
9492 parentheses. Rather than remember where the parentheses were, we
9493 don't associate floats at all, unless the user has specified
9494 -fassociative-math.
9495 And, we need to make sure type is not saturating. */
9496
9497 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9498 && !TYPE_SATURATING (type))
9499 {
9500 tree var0, minus_var0, con0, minus_con0, lit0, minus_lit0;
9501 tree var1, minus_var1, con1, minus_con1, lit1, minus_lit1;
9502 tree atype = type;
9503 bool ok = true;
9504
9505 /* Split both trees into variables, constants, and literals. Then
9506 associate each group together, the constants with literals,
9507 then the result with variables. This increases the chances of
9508 literals being recombined later and of generating relocatable
9509 expressions for the sum of a constant and literal. */
9510 var0 = split_tree (arg0, type, code,
9511 &minus_var0, &con0, &minus_con0,
9512 &lit0, &minus_lit0, 0);
9513 var1 = split_tree (arg1, type, code,
9514 &minus_var1, &con1, &minus_con1,
9515 &lit1, &minus_lit1, code == MINUS_EXPR);
9516
9517 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9518 if (code == MINUS_EXPR)
9519 code = PLUS_EXPR;
9520
9521 /* With undefined overflow prefer doing association in a type
9522 which wraps on overflow, if that is one of the operand types. */
9523 if (POINTER_TYPE_P (type)
9524 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9525 {
9526 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9527 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
9528 atype = TREE_TYPE (arg0);
9529 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9530 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
9531 atype = TREE_TYPE (arg1);
9532 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
9533 }
9534
9535 /* With undefined overflow we can only associate constants with one
9536 variable, and constants whose association doesn't overflow. */
9537 if (POINTER_TYPE_P (atype)
9538 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
9539 {
9540 if ((var0 && var1) || (minus_var0 && minus_var1))
9541 {
9542 /* ??? If split_tree would handle NEGATE_EXPR we could
9543 simply reject these cases and the allowed cases would
9544 be the var0/minus_var1 ones. */
9545 tree tmp0 = var0 ? var0 : minus_var0;
9546 tree tmp1 = var1 ? var1 : minus_var1;
9547 bool one_neg = false;
9548
9549 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9550 {
9551 tmp0 = TREE_OPERAND (tmp0, 0);
9552 one_neg = !one_neg;
9553 }
9554 if (CONVERT_EXPR_P (tmp0)
9555 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9556 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9557 <= TYPE_PRECISION (atype)))
9558 tmp0 = TREE_OPERAND (tmp0, 0);
9559 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9560 {
9561 tmp1 = TREE_OPERAND (tmp1, 0);
9562 one_neg = !one_neg;
9563 }
9564 if (CONVERT_EXPR_P (tmp1)
9565 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9566 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9567 <= TYPE_PRECISION (atype)))
9568 tmp1 = TREE_OPERAND (tmp1, 0);
9569 /* The only case we can still associate with two variables
9570 is if they cancel out. */
9571 if (!one_neg
9572 || !operand_equal_p (tmp0, tmp1, 0))
9573 ok = false;
9574 }
9575 else if ((var0 && minus_var1
9576 && ! operand_equal_p (var0, minus_var1, 0))
9577 || (minus_var0 && var1
9578 && ! operand_equal_p (minus_var0, var1, 0)))
9579 ok = false;
9580 }
9581
9582 /* Only do something if we found more than two objects. Otherwise,
9583 nothing has changed and we risk infinite recursion. */
9584 if (ok
9585 && (2 < ((var0 != 0) + (var1 != 0)
9586 + (minus_var0 != 0) + (minus_var1 != 0)
9587 + (con0 != 0) + (con1 != 0)
9588 + (minus_con0 != 0) + (minus_con1 != 0)
9589 + (lit0 != 0) + (lit1 != 0)
9590 + (minus_lit0 != 0) + (minus_lit1 != 0))))
9591 {
9592 var0 = associate_trees (loc, var0, var1, code, atype);
9593 minus_var0 = associate_trees (loc, minus_var0, minus_var1,
9594 code, atype);
9595 con0 = associate_trees (loc, con0, con1, code, atype);
9596 minus_con0 = associate_trees (loc, minus_con0, minus_con1,
9597 code, atype);
9598 lit0 = associate_trees (loc, lit0, lit1, code, atype);
9599 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
9600 code, atype);
9601
9602 if (minus_var0 && var0)
9603 {
9604 var0 = associate_trees (loc, var0, minus_var0,
9605 MINUS_EXPR, atype);
9606 minus_var0 = 0;
9607 }
9608 if (minus_con0 && con0)
9609 {
9610 con0 = associate_trees (loc, con0, minus_con0,
9611 MINUS_EXPR, atype);
9612 minus_con0 = 0;
9613 }
9614
9615 /* Preserve the MINUS_EXPR if the negative part of the literal is
9616 greater than the positive part. Otherwise, the multiplicative
9617 folding code (i.e extract_muldiv) may be fooled in case
9618 unsigned constants are subtracted, like in the following
9619 example: ((X*2 + 4) - 8U)/2. */
9620 if (minus_lit0 && lit0)
9621 {
9622 if (TREE_CODE (lit0) == INTEGER_CST
9623 && TREE_CODE (minus_lit0) == INTEGER_CST
9624 && tree_int_cst_lt (lit0, minus_lit0)
9625 /* But avoid ending up with only negated parts. */
9626 && (var0 || con0))
9627 {
9628 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
9629 MINUS_EXPR, atype);
9630 lit0 = 0;
9631 }
9632 else
9633 {
9634 lit0 = associate_trees (loc, lit0, minus_lit0,
9635 MINUS_EXPR, atype);
9636 minus_lit0 = 0;
9637 }
9638 }
9639
9640 /* Don't introduce overflows through reassociation. */
9641 if ((lit0 && TREE_OVERFLOW_P (lit0))
9642 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0)))
9643 return NULL_TREE;
9644
9645 /* Eliminate lit0 and minus_lit0 to con0 and minus_con0. */
9646 con0 = associate_trees (loc, con0, lit0, code, atype);
9647 lit0 = 0;
9648 minus_con0 = associate_trees (loc, minus_con0, minus_lit0,
9649 code, atype);
9650 minus_lit0 = 0;
9651
9652 /* Eliminate minus_con0. */
9653 if (minus_con0)
9654 {
9655 if (con0)
9656 con0 = associate_trees (loc, con0, minus_con0,
9657 MINUS_EXPR, atype);
9658 else if (var0)
9659 var0 = associate_trees (loc, var0, minus_con0,
9660 MINUS_EXPR, atype);
9661 else
9662 gcc_unreachable ();
9663 minus_con0 = 0;
9664 }
9665
9666 /* Eliminate minus_var0. */
9667 if (minus_var0)
9668 {
9669 if (con0)
9670 con0 = associate_trees (loc, con0, minus_var0,
9671 MINUS_EXPR, atype);
9672 else
9673 gcc_unreachable ();
9674 minus_var0 = 0;
9675 }
9676
9677 return
9678 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
9679 code, atype));
9680 }
9681 }
9682
9683 return NULL_TREE;
9684
9685 case POINTER_DIFF_EXPR:
9686 case MINUS_EXPR:
9687 /* Fold &a[i] - &a[j] to i-j. */
9688 if (TREE_CODE (arg0) == ADDR_EXPR
9689 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9690 && TREE_CODE (arg1) == ADDR_EXPR
9691 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9692 {
9693 tree tem = fold_addr_of_array_ref_difference (loc, type,
9694 TREE_OPERAND (arg0, 0),
9695 TREE_OPERAND (arg1, 0),
9696 code
9697 == POINTER_DIFF_EXPR);
9698 if (tem)
9699 return tem;
9700 }
9701
9702 /* Further transformations are not for pointers. */
9703 if (code == POINTER_DIFF_EXPR)
9704 return NULL_TREE;
9705
9706 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9707 if (TREE_CODE (arg0) == NEGATE_EXPR
9708 && negate_expr_p (op1))
9709 return fold_build2_loc (loc, MINUS_EXPR, type,
9710 negate_expr (op1),
9711 fold_convert_loc (loc, type,
9712 TREE_OPERAND (arg0, 0)));
9713
9714 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
9715 __complex__ ( x, -y ). This is not the same for SNaNs or if
9716 signed zeros are involved. */
9717 if (!HONOR_SNANS (element_mode (arg0))
9718 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9719 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9720 {
9721 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9722 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9723 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9724 bool arg0rz = false, arg0iz = false;
9725 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9726 || (arg0i && (arg0iz = real_zerop (arg0i))))
9727 {
9728 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9729 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9730 if (arg0rz && arg1i && real_zerop (arg1i))
9731 {
9732 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9733 arg1r ? arg1r
9734 : build1 (REALPART_EXPR, rtype, arg1));
9735 tree ip = arg0i ? arg0i
9736 : build1 (IMAGPART_EXPR, rtype, arg0);
9737 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9738 }
9739 else if (arg0iz && arg1r && real_zerop (arg1r))
9740 {
9741 tree rp = arg0r ? arg0r
9742 : build1 (REALPART_EXPR, rtype, arg0);
9743 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9744 arg1i ? arg1i
9745 : build1 (IMAGPART_EXPR, rtype, arg1));
9746 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9747 }
9748 }
9749 }
9750
9751 /* A - B -> A + (-B) if B is easily negatable. */
9752 if (negate_expr_p (op1)
9753 && ! TYPE_OVERFLOW_SANITIZED (type)
9754 && ((FLOAT_TYPE_P (type)
9755 /* Avoid this transformation if B is a positive REAL_CST. */
9756 && (TREE_CODE (op1) != REAL_CST
9757 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1))))
9758 || INTEGRAL_TYPE_P (type)))
9759 return fold_build2_loc (loc, PLUS_EXPR, type,
9760 fold_convert_loc (loc, type, arg0),
9761 negate_expr (op1));
9762
9763 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
9764 one. Make sure the type is not saturating and has the signedness of
9765 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9766 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9767 if ((TREE_CODE (arg0) == MULT_EXPR
9768 || TREE_CODE (arg1) == MULT_EXPR)
9769 && !TYPE_SATURATING (type)
9770 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9771 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9772 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9773 {
9774 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9775 if (tem)
9776 return tem;
9777 }
9778
9779 goto associate;
9780
9781 case MULT_EXPR:
9782 if (! FLOAT_TYPE_P (type))
9783 {
9784 /* Transform x * -C into -x * C if x is easily negatable. */
9785 if (TREE_CODE (op1) == INTEGER_CST
9786 && tree_int_cst_sgn (op1) == -1
9787 && negate_expr_p (op0)
9788 && negate_expr_p (op1)
9789 && (tem = negate_expr (op1)) != op1
9790 && ! TREE_OVERFLOW (tem))
9791 return fold_build2_loc (loc, MULT_EXPR, type,
9792 fold_convert_loc (loc, type,
9793 negate_expr (op0)), tem);
9794
9795 strict_overflow_p = false;
9796 if (TREE_CODE (arg1) == INTEGER_CST
9797 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
9798 &strict_overflow_p)))
9799 {
9800 if (strict_overflow_p)
9801 fold_overflow_warning (("assuming signed overflow does not "
9802 "occur when simplifying "
9803 "multiplication"),
9804 WARN_STRICT_OVERFLOW_MISC);
9805 return fold_convert_loc (loc, type, tem);
9806 }
9807
9808 /* Optimize z * conj(z) for integer complex numbers. */
9809 if (TREE_CODE (arg0) == CONJ_EXPR
9810 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9811 return fold_mult_zconjz (loc, type, arg1);
9812 if (TREE_CODE (arg1) == CONJ_EXPR
9813 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9814 return fold_mult_zconjz (loc, type, arg0);
9815 }
9816 else
9817 {
9818 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
9819 This is not the same for NaNs or if signed zeros are
9820 involved. */
9821 if (!HONOR_NANS (arg0)
9822 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9823 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9824 && TREE_CODE (arg1) == COMPLEX_CST
9825 && real_zerop (TREE_REALPART (arg1)))
9826 {
9827 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9828 if (real_onep (TREE_IMAGPART (arg1)))
9829 return
9830 fold_build2_loc (loc, COMPLEX_EXPR, type,
9831 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
9832 rtype, arg0)),
9833 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
9834 else if (real_minus_onep (TREE_IMAGPART (arg1)))
9835 return
9836 fold_build2_loc (loc, COMPLEX_EXPR, type,
9837 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
9838 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
9839 rtype, arg0)));
9840 }
9841
9842 /* Optimize z * conj(z) for floating point complex numbers.
9843 Guarded by flag_unsafe_math_optimizations as non-finite
9844 imaginary components don't produce scalar results. */
9845 if (flag_unsafe_math_optimizations
9846 && TREE_CODE (arg0) == CONJ_EXPR
9847 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9848 return fold_mult_zconjz (loc, type, arg1);
9849 if (flag_unsafe_math_optimizations
9850 && TREE_CODE (arg1) == CONJ_EXPR
9851 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9852 return fold_mult_zconjz (loc, type, arg0);
9853 }
9854 goto associate;
9855
9856 case BIT_IOR_EXPR:
9857 /* Canonicalize (X & C1) | C2. */
9858 if (TREE_CODE (arg0) == BIT_AND_EXPR
9859 && TREE_CODE (arg1) == INTEGER_CST
9860 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9861 {
9862 int width = TYPE_PRECISION (type), w;
9863 wide_int c1 = wi::to_wide (TREE_OPERAND (arg0, 1));
9864 wide_int c2 = wi::to_wide (arg1);
9865
9866 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
9867 if ((c1 & c2) == c1)
9868 return omit_one_operand_loc (loc, type, arg1,
9869 TREE_OPERAND (arg0, 0));
9870
9871 wide_int msk = wi::mask (width, false,
9872 TYPE_PRECISION (TREE_TYPE (arg1)));
9873
9874 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
9875 if (wi::bit_and_not (msk, c1 | c2) == 0)
9876 {
9877 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
9878 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
9879 }
9880
9881 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
9882 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
9883 mode which allows further optimizations. */
9884 c1 &= msk;
9885 c2 &= msk;
9886 wide_int c3 = wi::bit_and_not (c1, c2);
9887 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
9888 {
9889 wide_int mask = wi::mask (w, false,
9890 TYPE_PRECISION (type));
9891 if (((c1 | c2) & mask) == mask
9892 && wi::bit_and_not (c1, mask) == 0)
9893 {
9894 c3 = mask;
9895 break;
9896 }
9897 }
9898
9899 if (c3 != c1)
9900 {
9901 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
9902 tem = fold_build2_loc (loc, BIT_AND_EXPR, type, tem,
9903 wide_int_to_tree (type, c3));
9904 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
9905 }
9906 }
9907
9908 /* See if this can be simplified into a rotate first. If that
9909 is unsuccessful continue in the association code. */
9910 goto bit_rotate;
9911
9912 case BIT_XOR_EXPR:
9913 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
9914 if (TREE_CODE (arg0) == BIT_AND_EXPR
9915 && INTEGRAL_TYPE_P (type)
9916 && integer_onep (TREE_OPERAND (arg0, 1))
9917 && integer_onep (arg1))
9918 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
9919 build_zero_cst (TREE_TYPE (arg0)));
9920
9921 /* See if this can be simplified into a rotate first. If that
9922 is unsuccessful continue in the association code. */
9923 goto bit_rotate;
9924
9925 case BIT_AND_EXPR:
9926 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
9927 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9928 && INTEGRAL_TYPE_P (type)
9929 && integer_onep (TREE_OPERAND (arg0, 1))
9930 && integer_onep (arg1))
9931 {
9932 tree tem2;
9933 tem = TREE_OPERAND (arg0, 0);
9934 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
9935 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
9936 tem, tem2);
9937 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
9938 build_zero_cst (TREE_TYPE (tem)));
9939 }
9940 /* Fold ~X & 1 as (X & 1) == 0. */
9941 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9942 && INTEGRAL_TYPE_P (type)
9943 && integer_onep (arg1))
9944 {
9945 tree tem2;
9946 tem = TREE_OPERAND (arg0, 0);
9947 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
9948 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
9949 tem, tem2);
9950 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
9951 build_zero_cst (TREE_TYPE (tem)));
9952 }
9953 /* Fold !X & 1 as X == 0. */
9954 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
9955 && integer_onep (arg1))
9956 {
9957 tem = TREE_OPERAND (arg0, 0);
9958 return fold_build2_loc (loc, EQ_EXPR, type, tem,
9959 build_zero_cst (TREE_TYPE (tem)));
9960 }
9961
9962 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
9963 multiple of 1 << CST. */
9964 if (TREE_CODE (arg1) == INTEGER_CST)
9965 {
9966 wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
9967 wide_int ncst1 = -cst1;
9968 if ((cst1 & ncst1) == ncst1
9969 && multiple_of_p (type, arg0,
9970 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
9971 return fold_convert_loc (loc, type, arg0);
9972 }
9973
9974 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
9975 bits from CST2. */
9976 if (TREE_CODE (arg1) == INTEGER_CST
9977 && TREE_CODE (arg0) == MULT_EXPR
9978 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9979 {
9980 wi::tree_to_wide_ref warg1 = wi::to_wide (arg1);
9981 wide_int masked
9982 = mask_with_tz (type, warg1, wi::to_wide (TREE_OPERAND (arg0, 1)));
9983
9984 if (masked == 0)
9985 return omit_two_operands_loc (loc, type, build_zero_cst (type),
9986 arg0, arg1);
9987 else if (masked != warg1)
9988 {
9989 /* Avoid the transform if arg1 is a mask of some
9990 mode which allows further optimizations. */
9991 int pop = wi::popcount (warg1);
9992 if (!(pop >= BITS_PER_UNIT
9993 && pow2p_hwi (pop)
9994 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
9995 return fold_build2_loc (loc, code, type, op0,
9996 wide_int_to_tree (type, masked));
9997 }
9998 }
9999
10000 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
10001 ((A & N) + B) & M -> (A + B) & M
10002 Similarly if (N & M) == 0,
10003 ((A | N) + B) & M -> (A + B) & M
10004 and for - instead of + (or unary - instead of +)
10005 and/or ^ instead of |.
10006 If B is constant and (B & M) == 0, fold into A & M. */
10007 if (TREE_CODE (arg1) == INTEGER_CST)
10008 {
10009 wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
10010 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
10011 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10012 && (TREE_CODE (arg0) == PLUS_EXPR
10013 || TREE_CODE (arg0) == MINUS_EXPR
10014 || TREE_CODE (arg0) == NEGATE_EXPR)
10015 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
10016 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
10017 {
10018 tree pmop[2];
10019 int which = 0;
10020 wide_int cst0;
10021
10022 /* Now we know that arg0 is (C + D) or (C - D) or
10023 -C and arg1 (M) is == (1LL << cst) - 1.
10024 Store C into PMOP[0] and D into PMOP[1]. */
10025 pmop[0] = TREE_OPERAND (arg0, 0);
10026 pmop[1] = NULL;
10027 if (TREE_CODE (arg0) != NEGATE_EXPR)
10028 {
10029 pmop[1] = TREE_OPERAND (arg0, 1);
10030 which = 1;
10031 }
10032
10033 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
10034 which = -1;
10035
10036 for (; which >= 0; which--)
10037 switch (TREE_CODE (pmop[which]))
10038 {
10039 case BIT_AND_EXPR:
10040 case BIT_IOR_EXPR:
10041 case BIT_XOR_EXPR:
10042 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
10043 != INTEGER_CST)
10044 break;
10045 cst0 = wi::to_wide (TREE_OPERAND (pmop[which], 1)) & cst1;
10046 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
10047 {
10048 if (cst0 != cst1)
10049 break;
10050 }
10051 else if (cst0 != 0)
10052 break;
10053 /* If C or D is of the form (A & N) where
10054 (N & M) == M, or of the form (A | N) or
10055 (A ^ N) where (N & M) == 0, replace it with A. */
10056 pmop[which] = TREE_OPERAND (pmop[which], 0);
10057 break;
10058 case INTEGER_CST:
10059 /* If C or D is a N where (N & M) == 0, it can be
10060 omitted (assumed 0). */
10061 if ((TREE_CODE (arg0) == PLUS_EXPR
10062 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
10063 && (cst1 & wi::to_wide (pmop[which])) == 0)
10064 pmop[which] = NULL;
10065 break;
10066 default:
10067 break;
10068 }
10069
10070 /* Only build anything new if we optimized one or both arguments
10071 above. */
10072 if (pmop[0] != TREE_OPERAND (arg0, 0)
10073 || (TREE_CODE (arg0) != NEGATE_EXPR
10074 && pmop[1] != TREE_OPERAND (arg0, 1)))
10075 {
10076 tree utype = TREE_TYPE (arg0);
10077 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10078 {
10079 /* Perform the operations in a type that has defined
10080 overflow behavior. */
10081 utype = unsigned_type_for (TREE_TYPE (arg0));
10082 if (pmop[0] != NULL)
10083 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
10084 if (pmop[1] != NULL)
10085 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
10086 }
10087
10088 if (TREE_CODE (arg0) == NEGATE_EXPR)
10089 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
10090 else if (TREE_CODE (arg0) == PLUS_EXPR)
10091 {
10092 if (pmop[0] != NULL && pmop[1] != NULL)
10093 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
10094 pmop[0], pmop[1]);
10095 else if (pmop[0] != NULL)
10096 tem = pmop[0];
10097 else if (pmop[1] != NULL)
10098 tem = pmop[1];
10099 else
10100 return build_int_cst (type, 0);
10101 }
10102 else if (pmop[0] == NULL)
10103 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
10104 else
10105 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
10106 pmop[0], pmop[1]);
10107 /* TEM is now the new binary +, - or unary - replacement. */
10108 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
10109 fold_convert_loc (loc, utype, arg1));
10110 return fold_convert_loc (loc, type, tem);
10111 }
10112 }
10113 }
10114
10115 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10116 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10117 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10118 {
10119 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10120
10121 wide_int mask = wide_int::from (wi::to_wide (arg1), prec, UNSIGNED);
10122 if (mask == -1)
10123 return
10124 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10125 }
10126
10127 goto associate;
10128
10129 case RDIV_EXPR:
10130 /* Don't touch a floating-point divide by zero unless the mode
10131 of the constant can represent infinity. */
10132 if (TREE_CODE (arg1) == REAL_CST
10133 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10134 && real_zerop (arg1))
10135 return NULL_TREE;
10136
10137 /* (-A) / (-B) -> A / B */
10138 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10139 return fold_build2_loc (loc, RDIV_EXPR, type,
10140 TREE_OPERAND (arg0, 0),
10141 negate_expr (arg1));
10142 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10143 return fold_build2_loc (loc, RDIV_EXPR, type,
10144 negate_expr (arg0),
10145 TREE_OPERAND (arg1, 0));
10146 return NULL_TREE;
10147
10148 case TRUNC_DIV_EXPR:
10149 /* Fall through */
10150
10151 case FLOOR_DIV_EXPR:
10152 /* Simplify A / (B << N) where A and B are positive and B is
10153 a power of 2, to A >> (N + log2(B)). */
10154 strict_overflow_p = false;
10155 if (TREE_CODE (arg1) == LSHIFT_EXPR
10156 && (TYPE_UNSIGNED (type)
10157 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
10158 {
10159 tree sval = TREE_OPERAND (arg1, 0);
10160 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10161 {
10162 tree sh_cnt = TREE_OPERAND (arg1, 1);
10163 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
10164 wi::exact_log2 (wi::to_wide (sval)));
10165
10166 if (strict_overflow_p)
10167 fold_overflow_warning (("assuming signed overflow does not "
10168 "occur when simplifying A / (B << N)"),
10169 WARN_STRICT_OVERFLOW_MISC);
10170
10171 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
10172 sh_cnt, pow2);
10173 return fold_build2_loc (loc, RSHIFT_EXPR, type,
10174 fold_convert_loc (loc, type, arg0), sh_cnt);
10175 }
10176 }
10177
10178 /* Fall through */
10179
10180 case ROUND_DIV_EXPR:
10181 case CEIL_DIV_EXPR:
10182 case EXACT_DIV_EXPR:
10183 if (integer_zerop (arg1))
10184 return NULL_TREE;
10185
10186 /* Convert -A / -B to A / B when the type is signed and overflow is
10187 undefined. */
10188 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10189 && TREE_CODE (op0) == NEGATE_EXPR
10190 && negate_expr_p (op1))
10191 {
10192 if (INTEGRAL_TYPE_P (type))
10193 fold_overflow_warning (("assuming signed overflow does not occur "
10194 "when distributing negation across "
10195 "division"),
10196 WARN_STRICT_OVERFLOW_MISC);
10197 return fold_build2_loc (loc, code, type,
10198 fold_convert_loc (loc, type,
10199 TREE_OPERAND (arg0, 0)),
10200 negate_expr (op1));
10201 }
10202 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10203 && TREE_CODE (arg1) == NEGATE_EXPR
10204 && negate_expr_p (op0))
10205 {
10206 if (INTEGRAL_TYPE_P (type))
10207 fold_overflow_warning (("assuming signed overflow does not occur "
10208 "when distributing negation across "
10209 "division"),
10210 WARN_STRICT_OVERFLOW_MISC);
10211 return fold_build2_loc (loc, code, type,
10212 negate_expr (op0),
10213 fold_convert_loc (loc, type,
10214 TREE_OPERAND (arg1, 0)));
10215 }
10216
10217 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10218 operation, EXACT_DIV_EXPR.
10219
10220 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10221 At one time others generated faster code, it's not clear if they do
10222 after the last round to changes to the DIV code in expmed.c. */
10223 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10224 && multiple_of_p (type, arg0, arg1))
10225 return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
10226 fold_convert (type, arg0),
10227 fold_convert (type, arg1));
10228
10229 strict_overflow_p = false;
10230 if (TREE_CODE (arg1) == INTEGER_CST
10231 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10232 &strict_overflow_p)))
10233 {
10234 if (strict_overflow_p)
10235 fold_overflow_warning (("assuming signed overflow does not occur "
10236 "when simplifying division"),
10237 WARN_STRICT_OVERFLOW_MISC);
10238 return fold_convert_loc (loc, type, tem);
10239 }
10240
10241 return NULL_TREE;
10242
10243 case CEIL_MOD_EXPR:
10244 case FLOOR_MOD_EXPR:
10245 case ROUND_MOD_EXPR:
10246 case TRUNC_MOD_EXPR:
10247 strict_overflow_p = false;
10248 if (TREE_CODE (arg1) == INTEGER_CST
10249 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10250 &strict_overflow_p)))
10251 {
10252 if (strict_overflow_p)
10253 fold_overflow_warning (("assuming signed overflow does not occur "
10254 "when simplifying modulus"),
10255 WARN_STRICT_OVERFLOW_MISC);
10256 return fold_convert_loc (loc, type, tem);
10257 }
10258
10259 return NULL_TREE;
10260
10261 case LROTATE_EXPR:
10262 case RROTATE_EXPR:
10263 case RSHIFT_EXPR:
10264 case LSHIFT_EXPR:
10265 /* Since negative shift count is not well-defined,
10266 don't try to compute it in the compiler. */
10267 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10268 return NULL_TREE;
10269
10270 prec = element_precision (type);
10271
10272 /* If we have a rotate of a bit operation with the rotate count and
10273 the second operand of the bit operation both constant,
10274 permute the two operations. */
10275 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10276 && (TREE_CODE (arg0) == BIT_AND_EXPR
10277 || TREE_CODE (arg0) == BIT_IOR_EXPR
10278 || TREE_CODE (arg0) == BIT_XOR_EXPR)
10279 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10280 {
10281 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10282 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10283 return fold_build2_loc (loc, TREE_CODE (arg0), type,
10284 fold_build2_loc (loc, code, type,
10285 arg00, arg1),
10286 fold_build2_loc (loc, code, type,
10287 arg01, arg1));
10288 }
10289
10290 /* Two consecutive rotates adding up to the some integer
10291 multiple of the precision of the type can be ignored. */
10292 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10293 && TREE_CODE (arg0) == RROTATE_EXPR
10294 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10295 && wi::umod_trunc (wi::to_wide (arg1)
10296 + wi::to_wide (TREE_OPERAND (arg0, 1)),
10297 prec) == 0)
10298 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10299
10300 return NULL_TREE;
10301
10302 case MIN_EXPR:
10303 case MAX_EXPR:
10304 goto associate;
10305
10306 case TRUTH_ANDIF_EXPR:
10307 /* Note that the operands of this must be ints
10308 and their values must be 0 or 1.
10309 ("true" is a fixed value perhaps depending on the language.) */
10310 /* If first arg is constant zero, return it. */
10311 if (integer_zerop (arg0))
10312 return fold_convert_loc (loc, type, arg0);
10313 /* FALLTHRU */
10314 case TRUTH_AND_EXPR:
10315 /* If either arg is constant true, drop it. */
10316 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10317 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10318 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10319 /* Preserve sequence points. */
10320 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10321 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10322 /* If second arg is constant zero, result is zero, but first arg
10323 must be evaluated. */
10324 if (integer_zerop (arg1))
10325 return omit_one_operand_loc (loc, type, arg1, arg0);
10326 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10327 case will be handled here. */
10328 if (integer_zerop (arg0))
10329 return omit_one_operand_loc (loc, type, arg0, arg1);
10330
10331 /* !X && X is always false. */
10332 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10333 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10334 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
10335 /* X && !X is always false. */
10336 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10337 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10338 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10339
10340 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10341 means A >= Y && A != MAX, but in this case we know that
10342 A < X <= MAX. */
10343
10344 if (!TREE_SIDE_EFFECTS (arg0)
10345 && !TREE_SIDE_EFFECTS (arg1))
10346 {
10347 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
10348 if (tem && !operand_equal_p (tem, arg0, 0))
10349 return fold_build2_loc (loc, code, type, tem, arg1);
10350
10351 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
10352 if (tem && !operand_equal_p (tem, arg1, 0))
10353 return fold_build2_loc (loc, code, type, arg0, tem);
10354 }
10355
10356 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10357 != NULL_TREE)
10358 return tem;
10359
10360 return NULL_TREE;
10361
10362 case TRUTH_ORIF_EXPR:
10363 /* Note that the operands of this must be ints
10364 and their values must be 0 or true.
10365 ("true" is a fixed value perhaps depending on the language.) */
10366 /* If first arg is constant true, return it. */
10367 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10368 return fold_convert_loc (loc, type, arg0);
10369 /* FALLTHRU */
10370 case TRUTH_OR_EXPR:
10371 /* If either arg is constant zero, drop it. */
10372 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10373 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10374 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10375 /* Preserve sequence points. */
10376 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10377 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10378 /* If second arg is constant true, result is true, but we must
10379 evaluate first arg. */
10380 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10381 return omit_one_operand_loc (loc, type, arg1, arg0);
10382 /* Likewise for first arg, but note this only occurs here for
10383 TRUTH_OR_EXPR. */
10384 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10385 return omit_one_operand_loc (loc, type, arg0, arg1);
10386
10387 /* !X || X is always true. */
10388 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10389 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10390 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10391 /* X || !X is always true. */
10392 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10393 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10394 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10395
10396 /* (X && !Y) || (!X && Y) is X ^ Y */
10397 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
10398 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
10399 {
10400 tree a0, a1, l0, l1, n0, n1;
10401
10402 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10403 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10404
10405 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10406 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10407
10408 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
10409 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
10410
10411 if ((operand_equal_p (n0, a0, 0)
10412 && operand_equal_p (n1, a1, 0))
10413 || (operand_equal_p (n0, a1, 0)
10414 && operand_equal_p (n1, a0, 0)))
10415 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
10416 }
10417
10418 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10419 != NULL_TREE)
10420 return tem;
10421
10422 return NULL_TREE;
10423
10424 case TRUTH_XOR_EXPR:
10425 /* If the second arg is constant zero, drop it. */
10426 if (integer_zerop (arg1))
10427 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10428 /* If the second arg is constant true, this is a logical inversion. */
10429 if (integer_onep (arg1))
10430 {
10431 tem = invert_truthvalue_loc (loc, arg0);
10432 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
10433 }
10434 /* Identical arguments cancel to zero. */
10435 if (operand_equal_p (arg0, arg1, 0))
10436 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10437
10438 /* !X ^ X is always true. */
10439 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10440 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10441 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10442
10443 /* X ^ !X is always true. */
10444 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10445 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10446 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10447
10448 return NULL_TREE;
10449
10450 case EQ_EXPR:
10451 case NE_EXPR:
10452 STRIP_NOPS (arg0);
10453 STRIP_NOPS (arg1);
10454
10455 tem = fold_comparison (loc, code, type, op0, op1);
10456 if (tem != NULL_TREE)
10457 return tem;
10458
10459 /* bool_var != 1 becomes !bool_var. */
10460 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10461 && code == NE_EXPR)
10462 return fold_convert_loc (loc, type,
10463 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10464 TREE_TYPE (arg0), arg0));
10465
10466 /* bool_var == 0 becomes !bool_var. */
10467 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10468 && code == EQ_EXPR)
10469 return fold_convert_loc (loc, type,
10470 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10471 TREE_TYPE (arg0), arg0));
10472
10473 /* !exp != 0 becomes !exp */
10474 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
10475 && code == NE_EXPR)
10476 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10477
10478 /* If this is an EQ or NE comparison with zero and ARG0 is
10479 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10480 two operations, but the latter can be done in one less insn
10481 on machines that have only two-operand insns or on which a
10482 constant cannot be the first operand. */
10483 if (TREE_CODE (arg0) == BIT_AND_EXPR
10484 && integer_zerop (arg1))
10485 {
10486 tree arg00 = TREE_OPERAND (arg0, 0);
10487 tree arg01 = TREE_OPERAND (arg0, 1);
10488 if (TREE_CODE (arg00) == LSHIFT_EXPR
10489 && integer_onep (TREE_OPERAND (arg00, 0)))
10490 {
10491 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
10492 arg01, TREE_OPERAND (arg00, 1));
10493 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10494 build_int_cst (TREE_TYPE (arg0), 1));
10495 return fold_build2_loc (loc, code, type,
10496 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10497 arg1);
10498 }
10499 else if (TREE_CODE (arg01) == LSHIFT_EXPR
10500 && integer_onep (TREE_OPERAND (arg01, 0)))
10501 {
10502 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
10503 arg00, TREE_OPERAND (arg01, 1));
10504 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10505 build_int_cst (TREE_TYPE (arg0), 1));
10506 return fold_build2_loc (loc, code, type,
10507 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10508 arg1);
10509 }
10510 }
10511
10512 /* If this is an NE or EQ comparison of zero against the result of a
10513 signed MOD operation whose second operand is a power of 2, make
10514 the MOD operation unsigned since it is simpler and equivalent. */
10515 if (integer_zerop (arg1)
10516 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10517 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10518 || TREE_CODE (arg0) == CEIL_MOD_EXPR
10519 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10520 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10521 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10522 {
10523 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
10524 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
10525 fold_convert_loc (loc, newtype,
10526 TREE_OPERAND (arg0, 0)),
10527 fold_convert_loc (loc, newtype,
10528 TREE_OPERAND (arg0, 1)));
10529
10530 return fold_build2_loc (loc, code, type, newmod,
10531 fold_convert_loc (loc, newtype, arg1));
10532 }
10533
10534 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10535 C1 is a valid shift constant, and C2 is a power of two, i.e.
10536 a single bit. */
10537 if (TREE_CODE (arg0) == BIT_AND_EXPR
10538 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10539 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10540 == INTEGER_CST
10541 && integer_pow2p (TREE_OPERAND (arg0, 1))
10542 && integer_zerop (arg1))
10543 {
10544 tree itype = TREE_TYPE (arg0);
10545 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10546 prec = TYPE_PRECISION (itype);
10547
10548 /* Check for a valid shift count. */
10549 if (wi::ltu_p (wi::to_wide (arg001), prec))
10550 {
10551 tree arg01 = TREE_OPERAND (arg0, 1);
10552 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10553 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10554 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10555 can be rewritten as (X & (C2 << C1)) != 0. */
10556 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
10557 {
10558 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
10559 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
10560 return fold_build2_loc (loc, code, type, tem,
10561 fold_convert_loc (loc, itype, arg1));
10562 }
10563 /* Otherwise, for signed (arithmetic) shifts,
10564 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10565 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10566 else if (!TYPE_UNSIGNED (itype))
10567 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10568 arg000, build_int_cst (itype, 0));
10569 /* Otherwise, of unsigned (logical) shifts,
10570 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10571 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10572 else
10573 return omit_one_operand_loc (loc, type,
10574 code == EQ_EXPR ? integer_one_node
10575 : integer_zero_node,
10576 arg000);
10577 }
10578 }
10579
10580 /* If this is a comparison of a field, we may be able to simplify it. */
10581 if ((TREE_CODE (arg0) == COMPONENT_REF
10582 || TREE_CODE (arg0) == BIT_FIELD_REF)
10583 /* Handle the constant case even without -O
10584 to make sure the warnings are given. */
10585 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
10586 {
10587 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
10588 if (t1)
10589 return t1;
10590 }
10591
10592 /* Optimize comparisons of strlen vs zero to a compare of the
10593 first character of the string vs zero. To wit,
10594 strlen(ptr) == 0 => *ptr == 0
10595 strlen(ptr) != 0 => *ptr != 0
10596 Other cases should reduce to one of these two (or a constant)
10597 due to the return value of strlen being unsigned. */
10598 if (TREE_CODE (arg0) == CALL_EXPR
10599 && integer_zerop (arg1))
10600 {
10601 tree fndecl = get_callee_fndecl (arg0);
10602
10603 if (fndecl
10604 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
10605 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
10606 && call_expr_nargs (arg0) == 1
10607 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
10608 {
10609 tree iref = build_fold_indirect_ref_loc (loc,
10610 CALL_EXPR_ARG (arg0, 0));
10611 return fold_build2_loc (loc, code, type, iref,
10612 build_int_cst (TREE_TYPE (iref), 0));
10613 }
10614 }
10615
10616 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10617 of X. Similarly fold (X >> C) == 0 into X >= 0. */
10618 if (TREE_CODE (arg0) == RSHIFT_EXPR
10619 && integer_zerop (arg1)
10620 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10621 {
10622 tree arg00 = TREE_OPERAND (arg0, 0);
10623 tree arg01 = TREE_OPERAND (arg0, 1);
10624 tree itype = TREE_TYPE (arg00);
10625 if (wi::to_wide (arg01) == element_precision (itype) - 1)
10626 {
10627 if (TYPE_UNSIGNED (itype))
10628 {
10629 itype = signed_type_for (itype);
10630 arg00 = fold_convert_loc (loc, itype, arg00);
10631 }
10632 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
10633 type, arg00, build_zero_cst (itype));
10634 }
10635 }
10636
10637 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
10638 (X & C) == 0 when C is a single bit. */
10639 if (TREE_CODE (arg0) == BIT_AND_EXPR
10640 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
10641 && integer_zerop (arg1)
10642 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10643 {
10644 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
10645 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
10646 TREE_OPERAND (arg0, 1));
10647 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
10648 type, tem,
10649 fold_convert_loc (loc, TREE_TYPE (arg0),
10650 arg1));
10651 }
10652
10653 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
10654 constant C is a power of two, i.e. a single bit. */
10655 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10656 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10657 && integer_zerop (arg1)
10658 && integer_pow2p (TREE_OPERAND (arg0, 1))
10659 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10660 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10661 {
10662 tree arg00 = TREE_OPERAND (arg0, 0);
10663 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10664 arg00, build_int_cst (TREE_TYPE (arg00), 0));
10665 }
10666
10667 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
10668 when is C is a power of two, i.e. a single bit. */
10669 if (TREE_CODE (arg0) == BIT_AND_EXPR
10670 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
10671 && integer_zerop (arg1)
10672 && integer_pow2p (TREE_OPERAND (arg0, 1))
10673 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10674 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10675 {
10676 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10677 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
10678 arg000, TREE_OPERAND (arg0, 1));
10679 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10680 tem, build_int_cst (TREE_TYPE (tem), 0));
10681 }
10682
10683 if (integer_zerop (arg1)
10684 && tree_expr_nonzero_p (arg0))
10685 {
10686 tree res = constant_boolean_node (code==NE_EXPR, type);
10687 return omit_one_operand_loc (loc, type, res, arg0);
10688 }
10689
10690 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
10691 if (TREE_CODE (arg0) == BIT_AND_EXPR
10692 && TREE_CODE (arg1) == BIT_AND_EXPR)
10693 {
10694 tree arg00 = TREE_OPERAND (arg0, 0);
10695 tree arg01 = TREE_OPERAND (arg0, 1);
10696 tree arg10 = TREE_OPERAND (arg1, 0);
10697 tree arg11 = TREE_OPERAND (arg1, 1);
10698 tree itype = TREE_TYPE (arg0);
10699
10700 if (operand_equal_p (arg01, arg11, 0))
10701 {
10702 tem = fold_convert_loc (loc, itype, arg10);
10703 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
10704 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg01);
10705 return fold_build2_loc (loc, code, type, tem,
10706 build_zero_cst (itype));
10707 }
10708 if (operand_equal_p (arg01, arg10, 0))
10709 {
10710 tem = fold_convert_loc (loc, itype, arg11);
10711 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
10712 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg01);
10713 return fold_build2_loc (loc, code, type, tem,
10714 build_zero_cst (itype));
10715 }
10716 if (operand_equal_p (arg00, arg11, 0))
10717 {
10718 tem = fold_convert_loc (loc, itype, arg10);
10719 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01, tem);
10720 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg00);
10721 return fold_build2_loc (loc, code, type, tem,
10722 build_zero_cst (itype));
10723 }
10724 if (operand_equal_p (arg00, arg10, 0))
10725 {
10726 tem = fold_convert_loc (loc, itype, arg11);
10727 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01, tem);
10728 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg00);
10729 return fold_build2_loc (loc, code, type, tem,
10730 build_zero_cst (itype));
10731 }
10732 }
10733
10734 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10735 && TREE_CODE (arg1) == BIT_XOR_EXPR)
10736 {
10737 tree arg00 = TREE_OPERAND (arg0, 0);
10738 tree arg01 = TREE_OPERAND (arg0, 1);
10739 tree arg10 = TREE_OPERAND (arg1, 0);
10740 tree arg11 = TREE_OPERAND (arg1, 1);
10741 tree itype = TREE_TYPE (arg0);
10742
10743 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
10744 operand_equal_p guarantees no side-effects so we don't need
10745 to use omit_one_operand on Z. */
10746 if (operand_equal_p (arg01, arg11, 0))
10747 return fold_build2_loc (loc, code, type, arg00,
10748 fold_convert_loc (loc, TREE_TYPE (arg00),
10749 arg10));
10750 if (operand_equal_p (arg01, arg10, 0))
10751 return fold_build2_loc (loc, code, type, arg00,
10752 fold_convert_loc (loc, TREE_TYPE (arg00),
10753 arg11));
10754 if (operand_equal_p (arg00, arg11, 0))
10755 return fold_build2_loc (loc, code, type, arg01,
10756 fold_convert_loc (loc, TREE_TYPE (arg01),
10757 arg10));
10758 if (operand_equal_p (arg00, arg10, 0))
10759 return fold_build2_loc (loc, code, type, arg01,
10760 fold_convert_loc (loc, TREE_TYPE (arg01),
10761 arg11));
10762
10763 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
10764 if (TREE_CODE (arg01) == INTEGER_CST
10765 && TREE_CODE (arg11) == INTEGER_CST)
10766 {
10767 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
10768 fold_convert_loc (loc, itype, arg11));
10769 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
10770 return fold_build2_loc (loc, code, type, tem,
10771 fold_convert_loc (loc, itype, arg10));
10772 }
10773 }
10774
10775 /* Attempt to simplify equality/inequality comparisons of complex
10776 values. Only lower the comparison if the result is known or
10777 can be simplified to a single scalar comparison. */
10778 if ((TREE_CODE (arg0) == COMPLEX_EXPR
10779 || TREE_CODE (arg0) == COMPLEX_CST)
10780 && (TREE_CODE (arg1) == COMPLEX_EXPR
10781 || TREE_CODE (arg1) == COMPLEX_CST))
10782 {
10783 tree real0, imag0, real1, imag1;
10784 tree rcond, icond;
10785
10786 if (TREE_CODE (arg0) == COMPLEX_EXPR)
10787 {
10788 real0 = TREE_OPERAND (arg0, 0);
10789 imag0 = TREE_OPERAND (arg0, 1);
10790 }
10791 else
10792 {
10793 real0 = TREE_REALPART (arg0);
10794 imag0 = TREE_IMAGPART (arg0);
10795 }
10796
10797 if (TREE_CODE (arg1) == COMPLEX_EXPR)
10798 {
10799 real1 = TREE_OPERAND (arg1, 0);
10800 imag1 = TREE_OPERAND (arg1, 1);
10801 }
10802 else
10803 {
10804 real1 = TREE_REALPART (arg1);
10805 imag1 = TREE_IMAGPART (arg1);
10806 }
10807
10808 rcond = fold_binary_loc (loc, code, type, real0, real1);
10809 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
10810 {
10811 if (integer_zerop (rcond))
10812 {
10813 if (code == EQ_EXPR)
10814 return omit_two_operands_loc (loc, type, boolean_false_node,
10815 imag0, imag1);
10816 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
10817 }
10818 else
10819 {
10820 if (code == NE_EXPR)
10821 return omit_two_operands_loc (loc, type, boolean_true_node,
10822 imag0, imag1);
10823 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
10824 }
10825 }
10826
10827 icond = fold_binary_loc (loc, code, type, imag0, imag1);
10828 if (icond && TREE_CODE (icond) == INTEGER_CST)
10829 {
10830 if (integer_zerop (icond))
10831 {
10832 if (code == EQ_EXPR)
10833 return omit_two_operands_loc (loc, type, boolean_false_node,
10834 real0, real1);
10835 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
10836 }
10837 else
10838 {
10839 if (code == NE_EXPR)
10840 return omit_two_operands_loc (loc, type, boolean_true_node,
10841 real0, real1);
10842 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
10843 }
10844 }
10845 }
10846
10847 return NULL_TREE;
10848
10849 case LT_EXPR:
10850 case GT_EXPR:
10851 case LE_EXPR:
10852 case GE_EXPR:
10853 tem = fold_comparison (loc, code, type, op0, op1);
10854 if (tem != NULL_TREE)
10855 return tem;
10856
10857 /* Transform comparisons of the form X +- C CMP X. */
10858 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
10859 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10860 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
10861 && !HONOR_SNANS (arg0))
10862 {
10863 tree arg01 = TREE_OPERAND (arg0, 1);
10864 enum tree_code code0 = TREE_CODE (arg0);
10865 int is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
10866
10867 /* (X - c) > X becomes false. */
10868 if (code == GT_EXPR
10869 && ((code0 == MINUS_EXPR && is_positive >= 0)
10870 || (code0 == PLUS_EXPR && is_positive <= 0)))
10871 return constant_boolean_node (0, type);
10872
10873 /* Likewise (X + c) < X becomes false. */
10874 if (code == LT_EXPR
10875 && ((code0 == PLUS_EXPR && is_positive >= 0)
10876 || (code0 == MINUS_EXPR && is_positive <= 0)))
10877 return constant_boolean_node (0, type);
10878
10879 /* Convert (X - c) <= X to true. */
10880 if (!HONOR_NANS (arg1)
10881 && code == LE_EXPR
10882 && ((code0 == MINUS_EXPR && is_positive >= 0)
10883 || (code0 == PLUS_EXPR && is_positive <= 0)))
10884 return constant_boolean_node (1, type);
10885
10886 /* Convert (X + c) >= X to true. */
10887 if (!HONOR_NANS (arg1)
10888 && code == GE_EXPR
10889 && ((code0 == PLUS_EXPR && is_positive >= 0)
10890 || (code0 == MINUS_EXPR && is_positive <= 0)))
10891 return constant_boolean_node (1, type);
10892 }
10893
10894 /* If we are comparing an ABS_EXPR with a constant, we can
10895 convert all the cases into explicit comparisons, but they may
10896 well not be faster than doing the ABS and one comparison.
10897 But ABS (X) <= C is a range comparison, which becomes a subtraction
10898 and a comparison, and is probably faster. */
10899 if (code == LE_EXPR
10900 && TREE_CODE (arg1) == INTEGER_CST
10901 && TREE_CODE (arg0) == ABS_EXPR
10902 && ! TREE_SIDE_EFFECTS (arg0)
10903 && (0 != (tem = negate_expr (arg1)))
10904 && TREE_CODE (tem) == INTEGER_CST
10905 && !TREE_OVERFLOW (tem))
10906 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
10907 build2 (GE_EXPR, type,
10908 TREE_OPERAND (arg0, 0), tem),
10909 build2 (LE_EXPR, type,
10910 TREE_OPERAND (arg0, 0), arg1));
10911
10912 /* Convert ABS_EXPR<x> >= 0 to true. */
10913 strict_overflow_p = false;
10914 if (code == GE_EXPR
10915 && (integer_zerop (arg1)
10916 || (! HONOR_NANS (arg0)
10917 && real_zerop (arg1)))
10918 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
10919 {
10920 if (strict_overflow_p)
10921 fold_overflow_warning (("assuming signed overflow does not occur "
10922 "when simplifying comparison of "
10923 "absolute value and zero"),
10924 WARN_STRICT_OVERFLOW_CONDITIONAL);
10925 return omit_one_operand_loc (loc, type,
10926 constant_boolean_node (true, type),
10927 arg0);
10928 }
10929
10930 /* Convert ABS_EXPR<x> < 0 to false. */
10931 strict_overflow_p = false;
10932 if (code == LT_EXPR
10933 && (integer_zerop (arg1) || real_zerop (arg1))
10934 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
10935 {
10936 if (strict_overflow_p)
10937 fold_overflow_warning (("assuming signed overflow does not occur "
10938 "when simplifying comparison of "
10939 "absolute value and zero"),
10940 WARN_STRICT_OVERFLOW_CONDITIONAL);
10941 return omit_one_operand_loc (loc, type,
10942 constant_boolean_node (false, type),
10943 arg0);
10944 }
10945
10946 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
10947 and similarly for >= into !=. */
10948 if ((code == LT_EXPR || code == GE_EXPR)
10949 && TYPE_UNSIGNED (TREE_TYPE (arg0))
10950 && TREE_CODE (arg1) == LSHIFT_EXPR
10951 && integer_onep (TREE_OPERAND (arg1, 0)))
10952 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
10953 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
10954 TREE_OPERAND (arg1, 1)),
10955 build_zero_cst (TREE_TYPE (arg0)));
10956
10957 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
10958 otherwise Y might be >= # of bits in X's type and thus e.g.
10959 (unsigned char) (1 << Y) for Y 15 might be 0.
10960 If the cast is widening, then 1 << Y should have unsigned type,
10961 otherwise if Y is number of bits in the signed shift type minus 1,
10962 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
10963 31 might be 0xffffffff80000000. */
10964 if ((code == LT_EXPR || code == GE_EXPR)
10965 && TYPE_UNSIGNED (TREE_TYPE (arg0))
10966 && CONVERT_EXPR_P (arg1)
10967 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
10968 && (element_precision (TREE_TYPE (arg1))
10969 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
10970 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
10971 || (element_precision (TREE_TYPE (arg1))
10972 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
10973 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
10974 {
10975 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
10976 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
10977 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
10978 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
10979 build_zero_cst (TREE_TYPE (arg0)));
10980 }
10981
10982 return NULL_TREE;
10983
10984 case UNORDERED_EXPR:
10985 case ORDERED_EXPR:
10986 case UNLT_EXPR:
10987 case UNLE_EXPR:
10988 case UNGT_EXPR:
10989 case UNGE_EXPR:
10990 case UNEQ_EXPR:
10991 case LTGT_EXPR:
10992 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
10993 {
10994 tree targ0 = strip_float_extensions (arg0);
10995 tree targ1 = strip_float_extensions (arg1);
10996 tree newtype = TREE_TYPE (targ0);
10997
10998 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
10999 newtype = TREE_TYPE (targ1);
11000
11001 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
11002 return fold_build2_loc (loc, code, type,
11003 fold_convert_loc (loc, newtype, targ0),
11004 fold_convert_loc (loc, newtype, targ1));
11005 }
11006
11007 return NULL_TREE;
11008
11009 case COMPOUND_EXPR:
11010 /* When pedantic, a compound expression can be neither an lvalue
11011 nor an integer constant expression. */
11012 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11013 return NULL_TREE;
11014 /* Don't let (0, 0) be null pointer constant. */
11015 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11016 : fold_convert_loc (loc, type, arg1);
11017 return pedantic_non_lvalue_loc (loc, tem);
11018
11019 case ASSERT_EXPR:
11020 /* An ASSERT_EXPR should never be passed to fold_binary. */
11021 gcc_unreachable ();
11022
11023 default:
11024 return NULL_TREE;
11025 } /* switch (code) */
11026 }
11027
11028 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
11029 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
11030 of GOTO_EXPR. */
11031
11032 static tree
11033 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
11034 {
11035 switch (TREE_CODE (*tp))
11036 {
11037 case LABEL_EXPR:
11038 return *tp;
11039
11040 case GOTO_EXPR:
11041 *walk_subtrees = 0;
11042
11043 /* fall through */
11044
11045 default:
11046 return NULL_TREE;
11047 }
11048 }
11049
11050 /* Return whether the sub-tree ST contains a label which is accessible from
11051 outside the sub-tree. */
11052
11053 static bool
11054 contains_label_p (tree st)
11055 {
11056 return
11057 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
11058 }
11059
11060 /* Fold a ternary expression of code CODE and type TYPE with operands
11061 OP0, OP1, and OP2. Return the folded expression if folding is
11062 successful. Otherwise, return NULL_TREE. */
11063
11064 tree
11065 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
11066 tree op0, tree op1, tree op2)
11067 {
11068 tree tem;
11069 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
11070 enum tree_code_class kind = TREE_CODE_CLASS (code);
11071
11072 gcc_assert (IS_EXPR_CODE_CLASS (kind)
11073 && TREE_CODE_LENGTH (code) == 3);
11074
11075 /* If this is a commutative operation, and OP0 is a constant, move it
11076 to OP1 to reduce the number of tests below. */
11077 if (commutative_ternary_tree_code (code)
11078 && tree_swap_operands_p (op0, op1))
11079 return fold_build3_loc (loc, code, type, op1, op0, op2);
11080
11081 tem = generic_simplify (loc, code, type, op0, op1, op2);
11082 if (tem)
11083 return tem;
11084
11085 /* Strip any conversions that don't change the mode. This is safe
11086 for every expression, except for a comparison expression because
11087 its signedness is derived from its operands. So, in the latter
11088 case, only strip conversions that don't change the signedness.
11089
11090 Note that this is done as an internal manipulation within the
11091 constant folder, in order to find the simplest representation of
11092 the arguments so that their form can be studied. In any cases,
11093 the appropriate type conversions should be put back in the tree
11094 that will get out of the constant folder. */
11095 if (op0)
11096 {
11097 arg0 = op0;
11098 STRIP_NOPS (arg0);
11099 }
11100
11101 if (op1)
11102 {
11103 arg1 = op1;
11104 STRIP_NOPS (arg1);
11105 }
11106
11107 if (op2)
11108 {
11109 arg2 = op2;
11110 STRIP_NOPS (arg2);
11111 }
11112
11113 switch (code)
11114 {
11115 case COMPONENT_REF:
11116 if (TREE_CODE (arg0) == CONSTRUCTOR
11117 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11118 {
11119 unsigned HOST_WIDE_INT idx;
11120 tree field, value;
11121 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11122 if (field == arg1)
11123 return value;
11124 }
11125 return NULL_TREE;
11126
11127 case COND_EXPR:
11128 case VEC_COND_EXPR:
11129 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11130 so all simple results must be passed through pedantic_non_lvalue. */
11131 if (TREE_CODE (arg0) == INTEGER_CST)
11132 {
11133 tree unused_op = integer_zerop (arg0) ? op1 : op2;
11134 tem = integer_zerop (arg0) ? op2 : op1;
11135 /* Only optimize constant conditions when the selected branch
11136 has the same type as the COND_EXPR. This avoids optimizing
11137 away "c ? x : throw", where the throw has a void type.
11138 Avoid throwing away that operand which contains label. */
11139 if ((!TREE_SIDE_EFFECTS (unused_op)
11140 || !contains_label_p (unused_op))
11141 && (! VOID_TYPE_P (TREE_TYPE (tem))
11142 || VOID_TYPE_P (type)))
11143 return pedantic_non_lvalue_loc (loc, tem);
11144 return NULL_TREE;
11145 }
11146 else if (TREE_CODE (arg0) == VECTOR_CST)
11147 {
11148 if ((TREE_CODE (arg1) == VECTOR_CST
11149 || TREE_CODE (arg1) == CONSTRUCTOR)
11150 && (TREE_CODE (arg2) == VECTOR_CST
11151 || TREE_CODE (arg2) == CONSTRUCTOR))
11152 {
11153 unsigned int nelts = VECTOR_CST_NELTS (arg0), i;
11154 gcc_assert (nelts == TYPE_VECTOR_SUBPARTS (type));
11155 auto_vec_perm_indices sel (nelts);
11156 for (i = 0; i < nelts; i++)
11157 {
11158 tree val = VECTOR_CST_ELT (arg0, i);
11159 if (integer_all_onesp (val))
11160 sel.quick_push (i);
11161 else if (integer_zerop (val))
11162 sel.quick_push (nelts + i);
11163 else /* Currently unreachable. */
11164 return NULL_TREE;
11165 }
11166 tree t = fold_vec_perm (type, arg1, arg2, sel);
11167 if (t != NULL_TREE)
11168 return t;
11169 }
11170 }
11171
11172 /* If we have A op B ? A : C, we may be able to convert this to a
11173 simpler expression, depending on the operation and the values
11174 of B and C. Signed zeros prevent all of these transformations,
11175 for reasons given above each one.
11176
11177 Also try swapping the arguments and inverting the conditional. */
11178 if (COMPARISON_CLASS_P (arg0)
11179 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op1)
11180 && !HONOR_SIGNED_ZEROS (element_mode (op1)))
11181 {
11182 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
11183 if (tem)
11184 return tem;
11185 }
11186
11187 if (COMPARISON_CLASS_P (arg0)
11188 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op2)
11189 && !HONOR_SIGNED_ZEROS (element_mode (op2)))
11190 {
11191 location_t loc0 = expr_location_or (arg0, loc);
11192 tem = fold_invert_truthvalue (loc0, arg0);
11193 if (tem && COMPARISON_CLASS_P (tem))
11194 {
11195 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
11196 if (tem)
11197 return tem;
11198 }
11199 }
11200
11201 /* If the second operand is simpler than the third, swap them
11202 since that produces better jump optimization results. */
11203 if (truth_value_p (TREE_CODE (arg0))
11204 && tree_swap_operands_p (op1, op2))
11205 {
11206 location_t loc0 = expr_location_or (arg0, loc);
11207 /* See if this can be inverted. If it can't, possibly because
11208 it was a floating-point inequality comparison, don't do
11209 anything. */
11210 tem = fold_invert_truthvalue (loc0, arg0);
11211 if (tem)
11212 return fold_build3_loc (loc, code, type, tem, op2, op1);
11213 }
11214
11215 /* Convert A ? 1 : 0 to simply A. */
11216 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
11217 : (integer_onep (op1)
11218 && !VECTOR_TYPE_P (type)))
11219 && integer_zerop (op2)
11220 /* If we try to convert OP0 to our type, the
11221 call to fold will try to move the conversion inside
11222 a COND, which will recurse. In that case, the COND_EXPR
11223 is probably the best choice, so leave it alone. */
11224 && type == TREE_TYPE (arg0))
11225 return pedantic_non_lvalue_loc (loc, arg0);
11226
11227 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11228 over COND_EXPR in cases such as floating point comparisons. */
11229 if (integer_zerop (op1)
11230 && code == COND_EXPR
11231 && integer_onep (op2)
11232 && !VECTOR_TYPE_P (type)
11233 && truth_value_p (TREE_CODE (arg0)))
11234 return pedantic_non_lvalue_loc (loc,
11235 fold_convert_loc (loc, type,
11236 invert_truthvalue_loc (loc,
11237 arg0)));
11238
11239 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11240 if (TREE_CODE (arg0) == LT_EXPR
11241 && integer_zerop (TREE_OPERAND (arg0, 1))
11242 && integer_zerop (op2)
11243 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11244 {
11245 /* sign_bit_p looks through both zero and sign extensions,
11246 but for this optimization only sign extensions are
11247 usable. */
11248 tree tem2 = TREE_OPERAND (arg0, 0);
11249 while (tem != tem2)
11250 {
11251 if (TREE_CODE (tem2) != NOP_EXPR
11252 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
11253 {
11254 tem = NULL_TREE;
11255 break;
11256 }
11257 tem2 = TREE_OPERAND (tem2, 0);
11258 }
11259 /* sign_bit_p only checks ARG1 bits within A's precision.
11260 If <sign bit of A> has wider type than A, bits outside
11261 of A's precision in <sign bit of A> need to be checked.
11262 If they are all 0, this optimization needs to be done
11263 in unsigned A's type, if they are all 1 in signed A's type,
11264 otherwise this can't be done. */
11265 if (tem
11266 && TYPE_PRECISION (TREE_TYPE (tem))
11267 < TYPE_PRECISION (TREE_TYPE (arg1))
11268 && TYPE_PRECISION (TREE_TYPE (tem))
11269 < TYPE_PRECISION (type))
11270 {
11271 int inner_width, outer_width;
11272 tree tem_type;
11273
11274 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
11275 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
11276 if (outer_width > TYPE_PRECISION (type))
11277 outer_width = TYPE_PRECISION (type);
11278
11279 wide_int mask = wi::shifted_mask
11280 (inner_width, outer_width - inner_width, false,
11281 TYPE_PRECISION (TREE_TYPE (arg1)));
11282
11283 wide_int common = mask & wi::to_wide (arg1);
11284 if (common == mask)
11285 {
11286 tem_type = signed_type_for (TREE_TYPE (tem));
11287 tem = fold_convert_loc (loc, tem_type, tem);
11288 }
11289 else if (common == 0)
11290 {
11291 tem_type = unsigned_type_for (TREE_TYPE (tem));
11292 tem = fold_convert_loc (loc, tem_type, tem);
11293 }
11294 else
11295 tem = NULL;
11296 }
11297
11298 if (tem)
11299 return
11300 fold_convert_loc (loc, type,
11301 fold_build2_loc (loc, BIT_AND_EXPR,
11302 TREE_TYPE (tem), tem,
11303 fold_convert_loc (loc,
11304 TREE_TYPE (tem),
11305 arg1)));
11306 }
11307
11308 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11309 already handled above. */
11310 if (TREE_CODE (arg0) == BIT_AND_EXPR
11311 && integer_onep (TREE_OPERAND (arg0, 1))
11312 && integer_zerop (op2)
11313 && integer_pow2p (arg1))
11314 {
11315 tree tem = TREE_OPERAND (arg0, 0);
11316 STRIP_NOPS (tem);
11317 if (TREE_CODE (tem) == RSHIFT_EXPR
11318 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
11319 && (unsigned HOST_WIDE_INT) tree_log2 (arg1)
11320 == tree_to_uhwi (TREE_OPERAND (tem, 1)))
11321 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11322 fold_convert_loc (loc, type,
11323 TREE_OPERAND (tem, 0)),
11324 op1);
11325 }
11326
11327 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11328 is probably obsolete because the first operand should be a
11329 truth value (that's why we have the two cases above), but let's
11330 leave it in until we can confirm this for all front-ends. */
11331 if (integer_zerop (op2)
11332 && TREE_CODE (arg0) == NE_EXPR
11333 && integer_zerop (TREE_OPERAND (arg0, 1))
11334 && integer_pow2p (arg1)
11335 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11336 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11337 arg1, OEP_ONLY_CONST))
11338 return pedantic_non_lvalue_loc (loc,
11339 fold_convert_loc (loc, type,
11340 TREE_OPERAND (arg0, 0)));
11341
11342 /* Disable the transformations below for vectors, since
11343 fold_binary_op_with_conditional_arg may undo them immediately,
11344 yielding an infinite loop. */
11345 if (code == VEC_COND_EXPR)
11346 return NULL_TREE;
11347
11348 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11349 if (integer_zerop (op2)
11350 && truth_value_p (TREE_CODE (arg0))
11351 && truth_value_p (TREE_CODE (arg1))
11352 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11353 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
11354 : TRUTH_ANDIF_EXPR,
11355 type, fold_convert_loc (loc, type, arg0), op1);
11356
11357 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11358 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
11359 && truth_value_p (TREE_CODE (arg0))
11360 && truth_value_p (TREE_CODE (arg1))
11361 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11362 {
11363 location_t loc0 = expr_location_or (arg0, loc);
11364 /* Only perform transformation if ARG0 is easily inverted. */
11365 tem = fold_invert_truthvalue (loc0, arg0);
11366 if (tem)
11367 return fold_build2_loc (loc, code == VEC_COND_EXPR
11368 ? BIT_IOR_EXPR
11369 : TRUTH_ORIF_EXPR,
11370 type, fold_convert_loc (loc, type, tem),
11371 op1);
11372 }
11373
11374 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11375 if (integer_zerop (arg1)
11376 && truth_value_p (TREE_CODE (arg0))
11377 && truth_value_p (TREE_CODE (op2))
11378 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11379 {
11380 location_t loc0 = expr_location_or (arg0, loc);
11381 /* Only perform transformation if ARG0 is easily inverted. */
11382 tem = fold_invert_truthvalue (loc0, arg0);
11383 if (tem)
11384 return fold_build2_loc (loc, code == VEC_COND_EXPR
11385 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
11386 type, fold_convert_loc (loc, type, tem),
11387 op2);
11388 }
11389
11390 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11391 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
11392 && truth_value_p (TREE_CODE (arg0))
11393 && truth_value_p (TREE_CODE (op2))
11394 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11395 return fold_build2_loc (loc, code == VEC_COND_EXPR
11396 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
11397 type, fold_convert_loc (loc, type, arg0), op2);
11398
11399 return NULL_TREE;
11400
11401 case CALL_EXPR:
11402 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
11403 of fold_ternary on them. */
11404 gcc_unreachable ();
11405
11406 case BIT_FIELD_REF:
11407 if (TREE_CODE (arg0) == VECTOR_CST
11408 && (type == TREE_TYPE (TREE_TYPE (arg0))
11409 || (TREE_CODE (type) == VECTOR_TYPE
11410 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
11411 {
11412 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
11413 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
11414 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
11415 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
11416
11417 if (n != 0
11418 && (idx % width) == 0
11419 && (n % width) == 0
11420 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
11421 {
11422 idx = idx / width;
11423 n = n / width;
11424
11425 if (TREE_CODE (arg0) == VECTOR_CST)
11426 {
11427 if (n == 1)
11428 return VECTOR_CST_ELT (arg0, idx);
11429
11430 tree_vector_builder vals (type, n, 1);
11431 for (unsigned i = 0; i < n; ++i)
11432 vals.quick_push (VECTOR_CST_ELT (arg0, idx + i));
11433 return vals.build ();
11434 }
11435 }
11436 }
11437
11438 /* On constants we can use native encode/interpret to constant
11439 fold (nearly) all BIT_FIELD_REFs. */
11440 if (CONSTANT_CLASS_P (arg0)
11441 && can_native_interpret_type_p (type)
11442 && BITS_PER_UNIT == 8)
11443 {
11444 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11445 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
11446 /* Limit us to a reasonable amount of work. To relax the
11447 other limitations we need bit-shifting of the buffer
11448 and rounding up the size. */
11449 if (bitpos % BITS_PER_UNIT == 0
11450 && bitsize % BITS_PER_UNIT == 0
11451 && bitsize <= MAX_BITSIZE_MODE_ANY_MODE)
11452 {
11453 unsigned char b[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
11454 unsigned HOST_WIDE_INT len
11455 = native_encode_expr (arg0, b, bitsize / BITS_PER_UNIT,
11456 bitpos / BITS_PER_UNIT);
11457 if (len > 0
11458 && len * BITS_PER_UNIT >= bitsize)
11459 {
11460 tree v = native_interpret_expr (type, b,
11461 bitsize / BITS_PER_UNIT);
11462 if (v)
11463 return v;
11464 }
11465 }
11466 }
11467
11468 return NULL_TREE;
11469
11470 case FMA_EXPR:
11471 /* For integers we can decompose the FMA if possible. */
11472 if (TREE_CODE (arg0) == INTEGER_CST
11473 && TREE_CODE (arg1) == INTEGER_CST)
11474 return fold_build2_loc (loc, PLUS_EXPR, type,
11475 const_binop (MULT_EXPR, arg0, arg1), arg2);
11476 if (integer_zerop (arg2))
11477 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11478
11479 return fold_fma (loc, type, arg0, arg1, arg2);
11480
11481 case VEC_PERM_EXPR:
11482 if (TREE_CODE (arg2) == VECTOR_CST)
11483 {
11484 unsigned int nelts = VECTOR_CST_NELTS (arg2), i, mask, mask2;
11485 bool need_mask_canon = false;
11486 bool need_mask_canon2 = false;
11487 bool all_in_vec0 = true;
11488 bool all_in_vec1 = true;
11489 bool maybe_identity = true;
11490 bool single_arg = (op0 == op1);
11491 bool changed = false;
11492
11493 mask2 = 2 * nelts - 1;
11494 mask = single_arg ? (nelts - 1) : mask2;
11495 gcc_assert (nelts == TYPE_VECTOR_SUBPARTS (type));
11496 auto_vec_perm_indices sel (nelts);
11497 auto_vec_perm_indices sel2 (nelts);
11498 for (i = 0; i < nelts; i++)
11499 {
11500 tree val = VECTOR_CST_ELT (arg2, i);
11501 if (TREE_CODE (val) != INTEGER_CST)
11502 return NULL_TREE;
11503
11504 /* Make sure that the perm value is in an acceptable
11505 range. */
11506 wi::tree_to_wide_ref t = wi::to_wide (val);
11507 need_mask_canon |= wi::gtu_p (t, mask);
11508 need_mask_canon2 |= wi::gtu_p (t, mask2);
11509 unsigned int elt = t.to_uhwi () & mask;
11510 unsigned int elt2 = t.to_uhwi () & mask2;
11511
11512 if (elt < nelts)
11513 all_in_vec1 = false;
11514 else
11515 all_in_vec0 = false;
11516
11517 if ((elt & (nelts - 1)) != i)
11518 maybe_identity = false;
11519
11520 sel.quick_push (elt);
11521 sel2.quick_push (elt2);
11522 }
11523
11524 if (maybe_identity)
11525 {
11526 if (all_in_vec0)
11527 return op0;
11528 if (all_in_vec1)
11529 return op1;
11530 }
11531
11532 if (all_in_vec0)
11533 op1 = op0;
11534 else if (all_in_vec1)
11535 {
11536 op0 = op1;
11537 for (i = 0; i < nelts; i++)
11538 sel[i] -= nelts;
11539 need_mask_canon = true;
11540 }
11541
11542 if ((TREE_CODE (op0) == VECTOR_CST
11543 || TREE_CODE (op0) == CONSTRUCTOR)
11544 && (TREE_CODE (op1) == VECTOR_CST
11545 || TREE_CODE (op1) == CONSTRUCTOR))
11546 {
11547 tree t = fold_vec_perm (type, op0, op1, sel);
11548 if (t != NULL_TREE)
11549 return t;
11550 }
11551
11552 if (op0 == op1 && !single_arg)
11553 changed = true;
11554
11555 /* Some targets are deficient and fail to expand a single
11556 argument permutation while still allowing an equivalent
11557 2-argument version. */
11558 if (need_mask_canon && arg2 == op2
11559 && !can_vec_perm_p (TYPE_MODE (type), false, &sel)
11560 && can_vec_perm_p (TYPE_MODE (type), false, &sel2))
11561 {
11562 need_mask_canon = need_mask_canon2;
11563 sel = sel2;
11564 }
11565
11566 if (need_mask_canon && arg2 == op2)
11567 {
11568 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
11569 tree_vector_builder tsel (TREE_TYPE (arg2), nelts, 1);
11570 for (i = 0; i < nelts; i++)
11571 tsel.quick_push (build_int_cst (eltype, sel[i]));
11572 op2 = tsel.build ();
11573 changed = true;
11574 }
11575
11576 if (changed)
11577 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
11578 }
11579 return NULL_TREE;
11580
11581 case BIT_INSERT_EXPR:
11582 /* Perform (partial) constant folding of BIT_INSERT_EXPR. */
11583 if (TREE_CODE (arg0) == INTEGER_CST
11584 && TREE_CODE (arg1) == INTEGER_CST)
11585 {
11586 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11587 unsigned bitsize = TYPE_PRECISION (TREE_TYPE (arg1));
11588 wide_int tem = (wi::to_wide (arg0)
11589 & wi::shifted_mask (bitpos, bitsize, true,
11590 TYPE_PRECISION (type)));
11591 wide_int tem2
11592 = wi::lshift (wi::zext (wi::to_wide (arg1, TYPE_PRECISION (type)),
11593 bitsize), bitpos);
11594 return wide_int_to_tree (type, wi::bit_or (tem, tem2));
11595 }
11596 else if (TREE_CODE (arg0) == VECTOR_CST
11597 && CONSTANT_CLASS_P (arg1)
11598 && types_compatible_p (TREE_TYPE (TREE_TYPE (arg0)),
11599 TREE_TYPE (arg1)))
11600 {
11601 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11602 unsigned HOST_WIDE_INT elsize
11603 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (arg1)));
11604 if (bitpos % elsize == 0)
11605 {
11606 unsigned k = bitpos / elsize;
11607 if (operand_equal_p (VECTOR_CST_ELT (arg0, k), arg1, 0))
11608 return arg0;
11609 else
11610 {
11611 unsigned int nelts = VECTOR_CST_NELTS (arg0);
11612 tree_vector_builder elts (type, nelts, 1);
11613 elts.quick_grow (nelts);
11614 for (unsigned int i = 0; i < nelts; ++i)
11615 elts[i] = (i == k ? arg1 : VECTOR_CST_ELT (arg0, i));
11616 return elts.build ();
11617 }
11618 }
11619 }
11620 return NULL_TREE;
11621
11622 default:
11623 return NULL_TREE;
11624 } /* switch (code) */
11625 }
11626
11627 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
11628 of an array (or vector). */
11629
11630 tree
11631 get_array_ctor_element_at_index (tree ctor, offset_int access_index)
11632 {
11633 tree index_type = NULL_TREE;
11634 offset_int low_bound = 0;
11635
11636 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
11637 {
11638 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
11639 if (domain_type && TYPE_MIN_VALUE (domain_type))
11640 {
11641 /* Static constructors for variably sized objects makes no sense. */
11642 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
11643 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
11644 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
11645 }
11646 }
11647
11648 if (index_type)
11649 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
11650 TYPE_SIGN (index_type));
11651
11652 offset_int index = low_bound - 1;
11653 if (index_type)
11654 index = wi::ext (index, TYPE_PRECISION (index_type),
11655 TYPE_SIGN (index_type));
11656
11657 offset_int max_index;
11658 unsigned HOST_WIDE_INT cnt;
11659 tree cfield, cval;
11660
11661 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
11662 {
11663 /* Array constructor might explicitly set index, or specify a range,
11664 or leave index NULL meaning that it is next index after previous
11665 one. */
11666 if (cfield)
11667 {
11668 if (TREE_CODE (cfield) == INTEGER_CST)
11669 max_index = index = wi::to_offset (cfield);
11670 else
11671 {
11672 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
11673 index = wi::to_offset (TREE_OPERAND (cfield, 0));
11674 max_index = wi::to_offset (TREE_OPERAND (cfield, 1));
11675 }
11676 }
11677 else
11678 {
11679 index += 1;
11680 if (index_type)
11681 index = wi::ext (index, TYPE_PRECISION (index_type),
11682 TYPE_SIGN (index_type));
11683 max_index = index;
11684 }
11685
11686 /* Do we have match? */
11687 if (wi::cmpu (access_index, index) >= 0
11688 && wi::cmpu (access_index, max_index) <= 0)
11689 return cval;
11690 }
11691 return NULL_TREE;
11692 }
11693
11694 /* Perform constant folding and related simplification of EXPR.
11695 The related simplifications include x*1 => x, x*0 => 0, etc.,
11696 and application of the associative law.
11697 NOP_EXPR conversions may be removed freely (as long as we
11698 are careful not to change the type of the overall expression).
11699 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
11700 but we can constant-fold them if they have constant operands. */
11701
11702 #ifdef ENABLE_FOLD_CHECKING
11703 # define fold(x) fold_1 (x)
11704 static tree fold_1 (tree);
11705 static
11706 #endif
11707 tree
11708 fold (tree expr)
11709 {
11710 const tree t = expr;
11711 enum tree_code code = TREE_CODE (t);
11712 enum tree_code_class kind = TREE_CODE_CLASS (code);
11713 tree tem;
11714 location_t loc = EXPR_LOCATION (expr);
11715
11716 /* Return right away if a constant. */
11717 if (kind == tcc_constant)
11718 return t;
11719
11720 /* CALL_EXPR-like objects with variable numbers of operands are
11721 treated specially. */
11722 if (kind == tcc_vl_exp)
11723 {
11724 if (code == CALL_EXPR)
11725 {
11726 tem = fold_call_expr (loc, expr, false);
11727 return tem ? tem : expr;
11728 }
11729 return expr;
11730 }
11731
11732 if (IS_EXPR_CODE_CLASS (kind))
11733 {
11734 tree type = TREE_TYPE (t);
11735 tree op0, op1, op2;
11736
11737 switch (TREE_CODE_LENGTH (code))
11738 {
11739 case 1:
11740 op0 = TREE_OPERAND (t, 0);
11741 tem = fold_unary_loc (loc, code, type, op0);
11742 return tem ? tem : expr;
11743 case 2:
11744 op0 = TREE_OPERAND (t, 0);
11745 op1 = TREE_OPERAND (t, 1);
11746 tem = fold_binary_loc (loc, code, type, op0, op1);
11747 return tem ? tem : expr;
11748 case 3:
11749 op0 = TREE_OPERAND (t, 0);
11750 op1 = TREE_OPERAND (t, 1);
11751 op2 = TREE_OPERAND (t, 2);
11752 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
11753 return tem ? tem : expr;
11754 default:
11755 break;
11756 }
11757 }
11758
11759 switch (code)
11760 {
11761 case ARRAY_REF:
11762 {
11763 tree op0 = TREE_OPERAND (t, 0);
11764 tree op1 = TREE_OPERAND (t, 1);
11765
11766 if (TREE_CODE (op1) == INTEGER_CST
11767 && TREE_CODE (op0) == CONSTRUCTOR
11768 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
11769 {
11770 tree val = get_array_ctor_element_at_index (op0,
11771 wi::to_offset (op1));
11772 if (val)
11773 return val;
11774 }
11775
11776 return t;
11777 }
11778
11779 /* Return a VECTOR_CST if possible. */
11780 case CONSTRUCTOR:
11781 {
11782 tree type = TREE_TYPE (t);
11783 if (TREE_CODE (type) != VECTOR_TYPE)
11784 return t;
11785
11786 unsigned i;
11787 tree val;
11788 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
11789 if (! CONSTANT_CLASS_P (val))
11790 return t;
11791
11792 return build_vector_from_ctor (type, CONSTRUCTOR_ELTS (t));
11793 }
11794
11795 case CONST_DECL:
11796 return fold (DECL_INITIAL (t));
11797
11798 default:
11799 return t;
11800 } /* switch (code) */
11801 }
11802
11803 #ifdef ENABLE_FOLD_CHECKING
11804 #undef fold
11805
11806 static void fold_checksum_tree (const_tree, struct md5_ctx *,
11807 hash_table<nofree_ptr_hash<const tree_node> > *);
11808 static void fold_check_failed (const_tree, const_tree);
11809 void print_fold_checksum (const_tree);
11810
11811 /* When --enable-checking=fold, compute a digest of expr before
11812 and after actual fold call to see if fold did not accidentally
11813 change original expr. */
11814
11815 tree
11816 fold (tree expr)
11817 {
11818 tree ret;
11819 struct md5_ctx ctx;
11820 unsigned char checksum_before[16], checksum_after[16];
11821 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
11822
11823 md5_init_ctx (&ctx);
11824 fold_checksum_tree (expr, &ctx, &ht);
11825 md5_finish_ctx (&ctx, checksum_before);
11826 ht.empty ();
11827
11828 ret = fold_1 (expr);
11829
11830 md5_init_ctx (&ctx);
11831 fold_checksum_tree (expr, &ctx, &ht);
11832 md5_finish_ctx (&ctx, checksum_after);
11833
11834 if (memcmp (checksum_before, checksum_after, 16))
11835 fold_check_failed (expr, ret);
11836
11837 return ret;
11838 }
11839
11840 void
11841 print_fold_checksum (const_tree expr)
11842 {
11843 struct md5_ctx ctx;
11844 unsigned char checksum[16], cnt;
11845 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
11846
11847 md5_init_ctx (&ctx);
11848 fold_checksum_tree (expr, &ctx, &ht);
11849 md5_finish_ctx (&ctx, checksum);
11850 for (cnt = 0; cnt < 16; ++cnt)
11851 fprintf (stderr, "%02x", checksum[cnt]);
11852 putc ('\n', stderr);
11853 }
11854
11855 static void
11856 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
11857 {
11858 internal_error ("fold check: original tree changed by fold");
11859 }
11860
11861 static void
11862 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
11863 hash_table<nofree_ptr_hash <const tree_node> > *ht)
11864 {
11865 const tree_node **slot;
11866 enum tree_code code;
11867 union tree_node buf;
11868 int i, len;
11869
11870 recursive_label:
11871 if (expr == NULL)
11872 return;
11873 slot = ht->find_slot (expr, INSERT);
11874 if (*slot != NULL)
11875 return;
11876 *slot = expr;
11877 code = TREE_CODE (expr);
11878 if (TREE_CODE_CLASS (code) == tcc_declaration
11879 && HAS_DECL_ASSEMBLER_NAME_P (expr))
11880 {
11881 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
11882 memcpy ((char *) &buf, expr, tree_size (expr));
11883 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
11884 buf.decl_with_vis.symtab_node = NULL;
11885 expr = (tree) &buf;
11886 }
11887 else if (TREE_CODE_CLASS (code) == tcc_type
11888 && (TYPE_POINTER_TO (expr)
11889 || TYPE_REFERENCE_TO (expr)
11890 || TYPE_CACHED_VALUES_P (expr)
11891 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
11892 || TYPE_NEXT_VARIANT (expr)
11893 || TYPE_ALIAS_SET_KNOWN_P (expr)))
11894 {
11895 /* Allow these fields to be modified. */
11896 tree tmp;
11897 memcpy ((char *) &buf, expr, tree_size (expr));
11898 expr = tmp = (tree) &buf;
11899 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
11900 TYPE_POINTER_TO (tmp) = NULL;
11901 TYPE_REFERENCE_TO (tmp) = NULL;
11902 TYPE_NEXT_VARIANT (tmp) = NULL;
11903 TYPE_ALIAS_SET (tmp) = -1;
11904 if (TYPE_CACHED_VALUES_P (tmp))
11905 {
11906 TYPE_CACHED_VALUES_P (tmp) = 0;
11907 TYPE_CACHED_VALUES (tmp) = NULL;
11908 }
11909 }
11910 md5_process_bytes (expr, tree_size (expr), ctx);
11911 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
11912 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
11913 if (TREE_CODE_CLASS (code) != tcc_type
11914 && TREE_CODE_CLASS (code) != tcc_declaration
11915 && code != TREE_LIST
11916 && code != SSA_NAME
11917 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
11918 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
11919 switch (TREE_CODE_CLASS (code))
11920 {
11921 case tcc_constant:
11922 switch (code)
11923 {
11924 case STRING_CST:
11925 md5_process_bytes (TREE_STRING_POINTER (expr),
11926 TREE_STRING_LENGTH (expr), ctx);
11927 break;
11928 case COMPLEX_CST:
11929 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
11930 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
11931 break;
11932 case VECTOR_CST:
11933 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
11934 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
11935 break;
11936 default:
11937 break;
11938 }
11939 break;
11940 case tcc_exceptional:
11941 switch (code)
11942 {
11943 case TREE_LIST:
11944 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
11945 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
11946 expr = TREE_CHAIN (expr);
11947 goto recursive_label;
11948 break;
11949 case TREE_VEC:
11950 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
11951 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
11952 break;
11953 default:
11954 break;
11955 }
11956 break;
11957 case tcc_expression:
11958 case tcc_reference:
11959 case tcc_comparison:
11960 case tcc_unary:
11961 case tcc_binary:
11962 case tcc_statement:
11963 case tcc_vl_exp:
11964 len = TREE_OPERAND_LENGTH (expr);
11965 for (i = 0; i < len; ++i)
11966 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
11967 break;
11968 case tcc_declaration:
11969 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
11970 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
11971 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
11972 {
11973 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
11974 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
11975 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
11976 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
11977 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
11978 }
11979
11980 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
11981 {
11982 if (TREE_CODE (expr) == FUNCTION_DECL)
11983 {
11984 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
11985 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
11986 }
11987 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
11988 }
11989 break;
11990 case tcc_type:
11991 if (TREE_CODE (expr) == ENUMERAL_TYPE)
11992 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
11993 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
11994 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
11995 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
11996 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
11997 if (INTEGRAL_TYPE_P (expr)
11998 || SCALAR_FLOAT_TYPE_P (expr))
11999 {
12000 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12001 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
12002 }
12003 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
12004 if (TREE_CODE (expr) == RECORD_TYPE
12005 || TREE_CODE (expr) == UNION_TYPE
12006 || TREE_CODE (expr) == QUAL_UNION_TYPE)
12007 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
12008 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12009 break;
12010 default:
12011 break;
12012 }
12013 }
12014
12015 /* Helper function for outputting the checksum of a tree T. When
12016 debugging with gdb, you can "define mynext" to be "next" followed
12017 by "call debug_fold_checksum (op0)", then just trace down till the
12018 outputs differ. */
12019
12020 DEBUG_FUNCTION void
12021 debug_fold_checksum (const_tree t)
12022 {
12023 int i;
12024 unsigned char checksum[16];
12025 struct md5_ctx ctx;
12026 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12027
12028 md5_init_ctx (&ctx);
12029 fold_checksum_tree (t, &ctx, &ht);
12030 md5_finish_ctx (&ctx, checksum);
12031 ht.empty ();
12032
12033 for (i = 0; i < 16; i++)
12034 fprintf (stderr, "%d ", checksum[i]);
12035
12036 fprintf (stderr, "\n");
12037 }
12038
12039 #endif
12040
12041 /* Fold a unary tree expression with code CODE of type TYPE with an
12042 operand OP0. LOC is the location of the resulting expression.
12043 Return a folded expression if successful. Otherwise, return a tree
12044 expression with code CODE of type TYPE with an operand OP0. */
12045
12046 tree
12047 fold_build1_loc (location_t loc,
12048 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
12049 {
12050 tree tem;
12051 #ifdef ENABLE_FOLD_CHECKING
12052 unsigned char checksum_before[16], checksum_after[16];
12053 struct md5_ctx ctx;
12054 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12055
12056 md5_init_ctx (&ctx);
12057 fold_checksum_tree (op0, &ctx, &ht);
12058 md5_finish_ctx (&ctx, checksum_before);
12059 ht.empty ();
12060 #endif
12061
12062 tem = fold_unary_loc (loc, code, type, op0);
12063 if (!tem)
12064 tem = build1_loc (loc, code, type, op0 PASS_MEM_STAT);
12065
12066 #ifdef ENABLE_FOLD_CHECKING
12067 md5_init_ctx (&ctx);
12068 fold_checksum_tree (op0, &ctx, &ht);
12069 md5_finish_ctx (&ctx, checksum_after);
12070
12071 if (memcmp (checksum_before, checksum_after, 16))
12072 fold_check_failed (op0, tem);
12073 #endif
12074 return tem;
12075 }
12076
12077 /* Fold a binary tree expression with code CODE of type TYPE with
12078 operands OP0 and OP1. LOC is the location of the resulting
12079 expression. Return a folded expression if successful. Otherwise,
12080 return a tree expression with code CODE of type TYPE with operands
12081 OP0 and OP1. */
12082
12083 tree
12084 fold_build2_loc (location_t loc,
12085 enum tree_code code, tree type, tree op0, tree op1
12086 MEM_STAT_DECL)
12087 {
12088 tree tem;
12089 #ifdef ENABLE_FOLD_CHECKING
12090 unsigned char checksum_before_op0[16],
12091 checksum_before_op1[16],
12092 checksum_after_op0[16],
12093 checksum_after_op1[16];
12094 struct md5_ctx ctx;
12095 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12096
12097 md5_init_ctx (&ctx);
12098 fold_checksum_tree (op0, &ctx, &ht);
12099 md5_finish_ctx (&ctx, checksum_before_op0);
12100 ht.empty ();
12101
12102 md5_init_ctx (&ctx);
12103 fold_checksum_tree (op1, &ctx, &ht);
12104 md5_finish_ctx (&ctx, checksum_before_op1);
12105 ht.empty ();
12106 #endif
12107
12108 tem = fold_binary_loc (loc, code, type, op0, op1);
12109 if (!tem)
12110 tem = build2_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
12111
12112 #ifdef ENABLE_FOLD_CHECKING
12113 md5_init_ctx (&ctx);
12114 fold_checksum_tree (op0, &ctx, &ht);
12115 md5_finish_ctx (&ctx, checksum_after_op0);
12116 ht.empty ();
12117
12118 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12119 fold_check_failed (op0, tem);
12120
12121 md5_init_ctx (&ctx);
12122 fold_checksum_tree (op1, &ctx, &ht);
12123 md5_finish_ctx (&ctx, checksum_after_op1);
12124
12125 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12126 fold_check_failed (op1, tem);
12127 #endif
12128 return tem;
12129 }
12130
12131 /* Fold a ternary tree expression with code CODE of type TYPE with
12132 operands OP0, OP1, and OP2. Return a folded expression if
12133 successful. Otherwise, return a tree expression with code CODE of
12134 type TYPE with operands OP0, OP1, and OP2. */
12135
12136 tree
12137 fold_build3_loc (location_t loc, enum tree_code code, tree type,
12138 tree op0, tree op1, tree op2 MEM_STAT_DECL)
12139 {
12140 tree tem;
12141 #ifdef ENABLE_FOLD_CHECKING
12142 unsigned char checksum_before_op0[16],
12143 checksum_before_op1[16],
12144 checksum_before_op2[16],
12145 checksum_after_op0[16],
12146 checksum_after_op1[16],
12147 checksum_after_op2[16];
12148 struct md5_ctx ctx;
12149 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12150
12151 md5_init_ctx (&ctx);
12152 fold_checksum_tree (op0, &ctx, &ht);
12153 md5_finish_ctx (&ctx, checksum_before_op0);
12154 ht.empty ();
12155
12156 md5_init_ctx (&ctx);
12157 fold_checksum_tree (op1, &ctx, &ht);
12158 md5_finish_ctx (&ctx, checksum_before_op1);
12159 ht.empty ();
12160
12161 md5_init_ctx (&ctx);
12162 fold_checksum_tree (op2, &ctx, &ht);
12163 md5_finish_ctx (&ctx, checksum_before_op2);
12164 ht.empty ();
12165 #endif
12166
12167 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
12168 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12169 if (!tem)
12170 tem = build3_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
12171
12172 #ifdef ENABLE_FOLD_CHECKING
12173 md5_init_ctx (&ctx);
12174 fold_checksum_tree (op0, &ctx, &ht);
12175 md5_finish_ctx (&ctx, checksum_after_op0);
12176 ht.empty ();
12177
12178 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12179 fold_check_failed (op0, tem);
12180
12181 md5_init_ctx (&ctx);
12182 fold_checksum_tree (op1, &ctx, &ht);
12183 md5_finish_ctx (&ctx, checksum_after_op1);
12184 ht.empty ();
12185
12186 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12187 fold_check_failed (op1, tem);
12188
12189 md5_init_ctx (&ctx);
12190 fold_checksum_tree (op2, &ctx, &ht);
12191 md5_finish_ctx (&ctx, checksum_after_op2);
12192
12193 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12194 fold_check_failed (op2, tem);
12195 #endif
12196 return tem;
12197 }
12198
12199 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
12200 arguments in ARGARRAY, and a null static chain.
12201 Return a folded expression if successful. Otherwise, return a CALL_EXPR
12202 of type TYPE from the given operands as constructed by build_call_array. */
12203
12204 tree
12205 fold_build_call_array_loc (location_t loc, tree type, tree fn,
12206 int nargs, tree *argarray)
12207 {
12208 tree tem;
12209 #ifdef ENABLE_FOLD_CHECKING
12210 unsigned char checksum_before_fn[16],
12211 checksum_before_arglist[16],
12212 checksum_after_fn[16],
12213 checksum_after_arglist[16];
12214 struct md5_ctx ctx;
12215 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12216 int i;
12217
12218 md5_init_ctx (&ctx);
12219 fold_checksum_tree (fn, &ctx, &ht);
12220 md5_finish_ctx (&ctx, checksum_before_fn);
12221 ht.empty ();
12222
12223 md5_init_ctx (&ctx);
12224 for (i = 0; i < nargs; i++)
12225 fold_checksum_tree (argarray[i], &ctx, &ht);
12226 md5_finish_ctx (&ctx, checksum_before_arglist);
12227 ht.empty ();
12228 #endif
12229
12230 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
12231 if (!tem)
12232 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
12233
12234 #ifdef ENABLE_FOLD_CHECKING
12235 md5_init_ctx (&ctx);
12236 fold_checksum_tree (fn, &ctx, &ht);
12237 md5_finish_ctx (&ctx, checksum_after_fn);
12238 ht.empty ();
12239
12240 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
12241 fold_check_failed (fn, tem);
12242
12243 md5_init_ctx (&ctx);
12244 for (i = 0; i < nargs; i++)
12245 fold_checksum_tree (argarray[i], &ctx, &ht);
12246 md5_finish_ctx (&ctx, checksum_after_arglist);
12247
12248 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
12249 fold_check_failed (NULL_TREE, tem);
12250 #endif
12251 return tem;
12252 }
12253
12254 /* Perform constant folding and related simplification of initializer
12255 expression EXPR. These behave identically to "fold_buildN" but ignore
12256 potential run-time traps and exceptions that fold must preserve. */
12257
12258 #define START_FOLD_INIT \
12259 int saved_signaling_nans = flag_signaling_nans;\
12260 int saved_trapping_math = flag_trapping_math;\
12261 int saved_rounding_math = flag_rounding_math;\
12262 int saved_trapv = flag_trapv;\
12263 int saved_folding_initializer = folding_initializer;\
12264 flag_signaling_nans = 0;\
12265 flag_trapping_math = 0;\
12266 flag_rounding_math = 0;\
12267 flag_trapv = 0;\
12268 folding_initializer = 1;
12269
12270 #define END_FOLD_INIT \
12271 flag_signaling_nans = saved_signaling_nans;\
12272 flag_trapping_math = saved_trapping_math;\
12273 flag_rounding_math = saved_rounding_math;\
12274 flag_trapv = saved_trapv;\
12275 folding_initializer = saved_folding_initializer;
12276
12277 tree
12278 fold_build1_initializer_loc (location_t loc, enum tree_code code,
12279 tree type, tree op)
12280 {
12281 tree result;
12282 START_FOLD_INIT;
12283
12284 result = fold_build1_loc (loc, code, type, op);
12285
12286 END_FOLD_INIT;
12287 return result;
12288 }
12289
12290 tree
12291 fold_build2_initializer_loc (location_t loc, enum tree_code code,
12292 tree type, tree op0, tree op1)
12293 {
12294 tree result;
12295 START_FOLD_INIT;
12296
12297 result = fold_build2_loc (loc, code, type, op0, op1);
12298
12299 END_FOLD_INIT;
12300 return result;
12301 }
12302
12303 tree
12304 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
12305 int nargs, tree *argarray)
12306 {
12307 tree result;
12308 START_FOLD_INIT;
12309
12310 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
12311
12312 END_FOLD_INIT;
12313 return result;
12314 }
12315
12316 #undef START_FOLD_INIT
12317 #undef END_FOLD_INIT
12318
12319 /* Determine if first argument is a multiple of second argument. Return 0 if
12320 it is not, or we cannot easily determined it to be.
12321
12322 An example of the sort of thing we care about (at this point; this routine
12323 could surely be made more general, and expanded to do what the *_DIV_EXPR's
12324 fold cases do now) is discovering that
12325
12326 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12327
12328 is a multiple of
12329
12330 SAVE_EXPR (J * 8)
12331
12332 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12333
12334 This code also handles discovering that
12335
12336 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12337
12338 is a multiple of 8 so we don't have to worry about dealing with a
12339 possible remainder.
12340
12341 Note that we *look* inside a SAVE_EXPR only to determine how it was
12342 calculated; it is not safe for fold to do much of anything else with the
12343 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12344 at run time. For example, the latter example above *cannot* be implemented
12345 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12346 evaluation time of the original SAVE_EXPR is not necessarily the same at
12347 the time the new expression is evaluated. The only optimization of this
12348 sort that would be valid is changing
12349
12350 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12351
12352 divided by 8 to
12353
12354 SAVE_EXPR (I) * SAVE_EXPR (J)
12355
12356 (where the same SAVE_EXPR (J) is used in the original and the
12357 transformed version). */
12358
12359 int
12360 multiple_of_p (tree type, const_tree top, const_tree bottom)
12361 {
12362 gimple *stmt;
12363 tree t1, op1, op2;
12364
12365 if (operand_equal_p (top, bottom, 0))
12366 return 1;
12367
12368 if (TREE_CODE (type) != INTEGER_TYPE)
12369 return 0;
12370
12371 switch (TREE_CODE (top))
12372 {
12373 case BIT_AND_EXPR:
12374 /* Bitwise and provides a power of two multiple. If the mask is
12375 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
12376 if (!integer_pow2p (bottom))
12377 return 0;
12378 /* FALLTHRU */
12379
12380 case MULT_EXPR:
12381 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12382 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
12383
12384 case MINUS_EXPR:
12385 /* It is impossible to prove if op0 - op1 is multiple of bottom
12386 precisely, so be conservative here checking if both op0 and op1
12387 are multiple of bottom. Note we check the second operand first
12388 since it's usually simpler. */
12389 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12390 && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
12391
12392 case PLUS_EXPR:
12393 /* The same as MINUS_EXPR, but handle cases like op0 + 0xfffffffd
12394 as op0 - 3 if the expression has unsigned type. For example,
12395 (X / 3) + 0xfffffffd is multiple of 3, but 0xfffffffd is not. */
12396 op1 = TREE_OPERAND (top, 1);
12397 if (TYPE_UNSIGNED (type)
12398 && TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sign_bit (op1))
12399 op1 = fold_build1 (NEGATE_EXPR, type, op1);
12400 return (multiple_of_p (type, op1, bottom)
12401 && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
12402
12403 case LSHIFT_EXPR:
12404 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
12405 {
12406 op1 = TREE_OPERAND (top, 1);
12407 /* const_binop may not detect overflow correctly,
12408 so check for it explicitly here. */
12409 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
12410 wi::to_wide (op1))
12411 && 0 != (t1 = fold_convert (type,
12412 const_binop (LSHIFT_EXPR,
12413 size_one_node,
12414 op1)))
12415 && !TREE_OVERFLOW (t1))
12416 return multiple_of_p (type, t1, bottom);
12417 }
12418 return 0;
12419
12420 case NOP_EXPR:
12421 /* Can't handle conversions from non-integral or wider integral type. */
12422 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
12423 || (TYPE_PRECISION (type)
12424 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
12425 return 0;
12426
12427 /* fall through */
12428
12429 case SAVE_EXPR:
12430 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
12431
12432 case COND_EXPR:
12433 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12434 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
12435
12436 case INTEGER_CST:
12437 if (TREE_CODE (bottom) != INTEGER_CST
12438 || integer_zerop (bottom)
12439 || (TYPE_UNSIGNED (type)
12440 && (tree_int_cst_sgn (top) < 0
12441 || tree_int_cst_sgn (bottom) < 0)))
12442 return 0;
12443 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
12444 SIGNED);
12445
12446 case SSA_NAME:
12447 if (TREE_CODE (bottom) == INTEGER_CST
12448 && (stmt = SSA_NAME_DEF_STMT (top)) != NULL
12449 && gimple_code (stmt) == GIMPLE_ASSIGN)
12450 {
12451 enum tree_code code = gimple_assign_rhs_code (stmt);
12452
12453 /* Check for special cases to see if top is defined as multiple
12454 of bottom:
12455
12456 top = (X & ~(bottom - 1) ; bottom is power of 2
12457
12458 or
12459
12460 Y = X % bottom
12461 top = X - Y. */
12462 if (code == BIT_AND_EXPR
12463 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
12464 && TREE_CODE (op2) == INTEGER_CST
12465 && integer_pow2p (bottom)
12466 && wi::multiple_of_p (wi::to_widest (op2),
12467 wi::to_widest (bottom), UNSIGNED))
12468 return 1;
12469
12470 op1 = gimple_assign_rhs1 (stmt);
12471 if (code == MINUS_EXPR
12472 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
12473 && TREE_CODE (op2) == SSA_NAME
12474 && (stmt = SSA_NAME_DEF_STMT (op2)) != NULL
12475 && gimple_code (stmt) == GIMPLE_ASSIGN
12476 && (code = gimple_assign_rhs_code (stmt)) == TRUNC_MOD_EXPR
12477 && operand_equal_p (op1, gimple_assign_rhs1 (stmt), 0)
12478 && operand_equal_p (bottom, gimple_assign_rhs2 (stmt), 0))
12479 return 1;
12480 }
12481
12482 /* fall through */
12483
12484 default:
12485 return 0;
12486 }
12487 }
12488
12489 #define tree_expr_nonnegative_warnv_p(X, Y) \
12490 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
12491
12492 #define RECURSE(X) \
12493 ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
12494
12495 /* Return true if CODE or TYPE is known to be non-negative. */
12496
12497 static bool
12498 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
12499 {
12500 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
12501 && truth_value_p (code))
12502 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
12503 have a signed:1 type (where the value is -1 and 0). */
12504 return true;
12505 return false;
12506 }
12507
12508 /* Return true if (CODE OP0) is known to be non-negative. If the return
12509 value is based on the assumption that signed overflow is undefined,
12510 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12511 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12512
12513 bool
12514 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12515 bool *strict_overflow_p, int depth)
12516 {
12517 if (TYPE_UNSIGNED (type))
12518 return true;
12519
12520 switch (code)
12521 {
12522 case ABS_EXPR:
12523 /* We can't return 1 if flag_wrapv is set because
12524 ABS_EXPR<INT_MIN> = INT_MIN. */
12525 if (!ANY_INTEGRAL_TYPE_P (type))
12526 return true;
12527 if (TYPE_OVERFLOW_UNDEFINED (type))
12528 {
12529 *strict_overflow_p = true;
12530 return true;
12531 }
12532 break;
12533
12534 case NON_LVALUE_EXPR:
12535 case FLOAT_EXPR:
12536 case FIX_TRUNC_EXPR:
12537 return RECURSE (op0);
12538
12539 CASE_CONVERT:
12540 {
12541 tree inner_type = TREE_TYPE (op0);
12542 tree outer_type = type;
12543
12544 if (TREE_CODE (outer_type) == REAL_TYPE)
12545 {
12546 if (TREE_CODE (inner_type) == REAL_TYPE)
12547 return RECURSE (op0);
12548 if (INTEGRAL_TYPE_P (inner_type))
12549 {
12550 if (TYPE_UNSIGNED (inner_type))
12551 return true;
12552 return RECURSE (op0);
12553 }
12554 }
12555 else if (INTEGRAL_TYPE_P (outer_type))
12556 {
12557 if (TREE_CODE (inner_type) == REAL_TYPE)
12558 return RECURSE (op0);
12559 if (INTEGRAL_TYPE_P (inner_type))
12560 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
12561 && TYPE_UNSIGNED (inner_type);
12562 }
12563 }
12564 break;
12565
12566 default:
12567 return tree_simple_nonnegative_warnv_p (code, type);
12568 }
12569
12570 /* We don't know sign of `t', so be conservative and return false. */
12571 return false;
12572 }
12573
12574 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
12575 value is based on the assumption that signed overflow is undefined,
12576 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12577 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12578
12579 bool
12580 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12581 tree op1, bool *strict_overflow_p,
12582 int depth)
12583 {
12584 if (TYPE_UNSIGNED (type))
12585 return true;
12586
12587 switch (code)
12588 {
12589 case POINTER_PLUS_EXPR:
12590 case PLUS_EXPR:
12591 if (FLOAT_TYPE_P (type))
12592 return RECURSE (op0) && RECURSE (op1);
12593
12594 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12595 both unsigned and at least 2 bits shorter than the result. */
12596 if (TREE_CODE (type) == INTEGER_TYPE
12597 && TREE_CODE (op0) == NOP_EXPR
12598 && TREE_CODE (op1) == NOP_EXPR)
12599 {
12600 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
12601 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
12602 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12603 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12604 {
12605 unsigned int prec = MAX (TYPE_PRECISION (inner1),
12606 TYPE_PRECISION (inner2)) + 1;
12607 return prec < TYPE_PRECISION (type);
12608 }
12609 }
12610 break;
12611
12612 case MULT_EXPR:
12613 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12614 {
12615 /* x * x is always non-negative for floating point x
12616 or without overflow. */
12617 if (operand_equal_p (op0, op1, 0)
12618 || (RECURSE (op0) && RECURSE (op1)))
12619 {
12620 if (ANY_INTEGRAL_TYPE_P (type)
12621 && TYPE_OVERFLOW_UNDEFINED (type))
12622 *strict_overflow_p = true;
12623 return true;
12624 }
12625 }
12626
12627 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12628 both unsigned and their total bits is shorter than the result. */
12629 if (TREE_CODE (type) == INTEGER_TYPE
12630 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
12631 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
12632 {
12633 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
12634 ? TREE_TYPE (TREE_OPERAND (op0, 0))
12635 : TREE_TYPE (op0);
12636 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
12637 ? TREE_TYPE (TREE_OPERAND (op1, 0))
12638 : TREE_TYPE (op1);
12639
12640 bool unsigned0 = TYPE_UNSIGNED (inner0);
12641 bool unsigned1 = TYPE_UNSIGNED (inner1);
12642
12643 if (TREE_CODE (op0) == INTEGER_CST)
12644 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
12645
12646 if (TREE_CODE (op1) == INTEGER_CST)
12647 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
12648
12649 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
12650 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
12651 {
12652 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
12653 ? tree_int_cst_min_precision (op0, UNSIGNED)
12654 : TYPE_PRECISION (inner0);
12655
12656 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
12657 ? tree_int_cst_min_precision (op1, UNSIGNED)
12658 : TYPE_PRECISION (inner1);
12659
12660 return precision0 + precision1 < TYPE_PRECISION (type);
12661 }
12662 }
12663 return false;
12664
12665 case BIT_AND_EXPR:
12666 case MAX_EXPR:
12667 return RECURSE (op0) || RECURSE (op1);
12668
12669 case BIT_IOR_EXPR:
12670 case BIT_XOR_EXPR:
12671 case MIN_EXPR:
12672 case RDIV_EXPR:
12673 case TRUNC_DIV_EXPR:
12674 case CEIL_DIV_EXPR:
12675 case FLOOR_DIV_EXPR:
12676 case ROUND_DIV_EXPR:
12677 return RECURSE (op0) && RECURSE (op1);
12678
12679 case TRUNC_MOD_EXPR:
12680 return RECURSE (op0);
12681
12682 case FLOOR_MOD_EXPR:
12683 return RECURSE (op1);
12684
12685 case CEIL_MOD_EXPR:
12686 case ROUND_MOD_EXPR:
12687 default:
12688 return tree_simple_nonnegative_warnv_p (code, type);
12689 }
12690
12691 /* We don't know sign of `t', so be conservative and return false. */
12692 return false;
12693 }
12694
12695 /* Return true if T is known to be non-negative. If the return
12696 value is based on the assumption that signed overflow is undefined,
12697 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12698 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12699
12700 bool
12701 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
12702 {
12703 if (TYPE_UNSIGNED (TREE_TYPE (t)))
12704 return true;
12705
12706 switch (TREE_CODE (t))
12707 {
12708 case INTEGER_CST:
12709 return tree_int_cst_sgn (t) >= 0;
12710
12711 case REAL_CST:
12712 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
12713
12714 case FIXED_CST:
12715 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
12716
12717 case COND_EXPR:
12718 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
12719
12720 case SSA_NAME:
12721 /* Limit the depth of recursion to avoid quadratic behavior.
12722 This is expected to catch almost all occurrences in practice.
12723 If this code misses important cases that unbounded recursion
12724 would not, passes that need this information could be revised
12725 to provide it through dataflow propagation. */
12726 return (!name_registered_for_update_p (t)
12727 && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
12728 && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
12729 strict_overflow_p, depth));
12730
12731 default:
12732 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
12733 }
12734 }
12735
12736 /* Return true if T is known to be non-negative. If the return
12737 value is based on the assumption that signed overflow is undefined,
12738 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12739 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12740
12741 bool
12742 tree_call_nonnegative_warnv_p (tree type, combined_fn fn, tree arg0, tree arg1,
12743 bool *strict_overflow_p, int depth)
12744 {
12745 switch (fn)
12746 {
12747 CASE_CFN_ACOS:
12748 CASE_CFN_ACOSH:
12749 CASE_CFN_CABS:
12750 CASE_CFN_COSH:
12751 CASE_CFN_ERFC:
12752 CASE_CFN_EXP:
12753 CASE_CFN_EXP10:
12754 CASE_CFN_EXP2:
12755 CASE_CFN_FABS:
12756 CASE_CFN_FDIM:
12757 CASE_CFN_HYPOT:
12758 CASE_CFN_POW10:
12759 CASE_CFN_FFS:
12760 CASE_CFN_PARITY:
12761 CASE_CFN_POPCOUNT:
12762 CASE_CFN_CLZ:
12763 CASE_CFN_CLRSB:
12764 case CFN_BUILT_IN_BSWAP32:
12765 case CFN_BUILT_IN_BSWAP64:
12766 /* Always true. */
12767 return true;
12768
12769 CASE_CFN_SQRT:
12770 CASE_CFN_SQRT_FN:
12771 /* sqrt(-0.0) is -0.0. */
12772 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
12773 return true;
12774 return RECURSE (arg0);
12775
12776 CASE_CFN_ASINH:
12777 CASE_CFN_ATAN:
12778 CASE_CFN_ATANH:
12779 CASE_CFN_CBRT:
12780 CASE_CFN_CEIL:
12781 CASE_CFN_ERF:
12782 CASE_CFN_EXPM1:
12783 CASE_CFN_FLOOR:
12784 CASE_CFN_FMOD:
12785 CASE_CFN_FREXP:
12786 CASE_CFN_ICEIL:
12787 CASE_CFN_IFLOOR:
12788 CASE_CFN_IRINT:
12789 CASE_CFN_IROUND:
12790 CASE_CFN_LCEIL:
12791 CASE_CFN_LDEXP:
12792 CASE_CFN_LFLOOR:
12793 CASE_CFN_LLCEIL:
12794 CASE_CFN_LLFLOOR:
12795 CASE_CFN_LLRINT:
12796 CASE_CFN_LLROUND:
12797 CASE_CFN_LRINT:
12798 CASE_CFN_LROUND:
12799 CASE_CFN_MODF:
12800 CASE_CFN_NEARBYINT:
12801 CASE_CFN_RINT:
12802 CASE_CFN_ROUND:
12803 CASE_CFN_SCALB:
12804 CASE_CFN_SCALBLN:
12805 CASE_CFN_SCALBN:
12806 CASE_CFN_SIGNBIT:
12807 CASE_CFN_SIGNIFICAND:
12808 CASE_CFN_SINH:
12809 CASE_CFN_TANH:
12810 CASE_CFN_TRUNC:
12811 /* True if the 1st argument is nonnegative. */
12812 return RECURSE (arg0);
12813
12814 CASE_CFN_FMAX:
12815 CASE_CFN_FMAX_FN:
12816 /* True if the 1st OR 2nd arguments are nonnegative. */
12817 return RECURSE (arg0) || RECURSE (arg1);
12818
12819 CASE_CFN_FMIN:
12820 CASE_CFN_FMIN_FN:
12821 /* True if the 1st AND 2nd arguments are nonnegative. */
12822 return RECURSE (arg0) && RECURSE (arg1);
12823
12824 CASE_CFN_COPYSIGN:
12825 CASE_CFN_COPYSIGN_FN:
12826 /* True if the 2nd argument is nonnegative. */
12827 return RECURSE (arg1);
12828
12829 CASE_CFN_POWI:
12830 /* True if the 1st argument is nonnegative or the second
12831 argument is an even integer. */
12832 if (TREE_CODE (arg1) == INTEGER_CST
12833 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
12834 return true;
12835 return RECURSE (arg0);
12836
12837 CASE_CFN_POW:
12838 /* True if the 1st argument is nonnegative or the second
12839 argument is an even integer valued real. */
12840 if (TREE_CODE (arg1) == REAL_CST)
12841 {
12842 REAL_VALUE_TYPE c;
12843 HOST_WIDE_INT n;
12844
12845 c = TREE_REAL_CST (arg1);
12846 n = real_to_integer (&c);
12847 if ((n & 1) == 0)
12848 {
12849 REAL_VALUE_TYPE cint;
12850 real_from_integer (&cint, VOIDmode, n, SIGNED);
12851 if (real_identical (&c, &cint))
12852 return true;
12853 }
12854 }
12855 return RECURSE (arg0);
12856
12857 default:
12858 break;
12859 }
12860 return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
12861 }
12862
12863 /* Return true if T is known to be non-negative. If the return
12864 value is based on the assumption that signed overflow is undefined,
12865 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12866 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12867
12868 static bool
12869 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
12870 {
12871 enum tree_code code = TREE_CODE (t);
12872 if (TYPE_UNSIGNED (TREE_TYPE (t)))
12873 return true;
12874
12875 switch (code)
12876 {
12877 case TARGET_EXPR:
12878 {
12879 tree temp = TARGET_EXPR_SLOT (t);
12880 t = TARGET_EXPR_INITIAL (t);
12881
12882 /* If the initializer is non-void, then it's a normal expression
12883 that will be assigned to the slot. */
12884 if (!VOID_TYPE_P (t))
12885 return RECURSE (t);
12886
12887 /* Otherwise, the initializer sets the slot in some way. One common
12888 way is an assignment statement at the end of the initializer. */
12889 while (1)
12890 {
12891 if (TREE_CODE (t) == BIND_EXPR)
12892 t = expr_last (BIND_EXPR_BODY (t));
12893 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
12894 || TREE_CODE (t) == TRY_CATCH_EXPR)
12895 t = expr_last (TREE_OPERAND (t, 0));
12896 else if (TREE_CODE (t) == STATEMENT_LIST)
12897 t = expr_last (t);
12898 else
12899 break;
12900 }
12901 if (TREE_CODE (t) == MODIFY_EXPR
12902 && TREE_OPERAND (t, 0) == temp)
12903 return RECURSE (TREE_OPERAND (t, 1));
12904
12905 return false;
12906 }
12907
12908 case CALL_EXPR:
12909 {
12910 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
12911 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
12912
12913 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
12914 get_call_combined_fn (t),
12915 arg0,
12916 arg1,
12917 strict_overflow_p, depth);
12918 }
12919 case COMPOUND_EXPR:
12920 case MODIFY_EXPR:
12921 return RECURSE (TREE_OPERAND (t, 1));
12922
12923 case BIND_EXPR:
12924 return RECURSE (expr_last (TREE_OPERAND (t, 1)));
12925
12926 case SAVE_EXPR:
12927 return RECURSE (TREE_OPERAND (t, 0));
12928
12929 default:
12930 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
12931 }
12932 }
12933
12934 #undef RECURSE
12935 #undef tree_expr_nonnegative_warnv_p
12936
12937 /* Return true if T is known to be non-negative. If the return
12938 value is based on the assumption that signed overflow is undefined,
12939 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12940 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12941
12942 bool
12943 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
12944 {
12945 enum tree_code code;
12946 if (t == error_mark_node)
12947 return false;
12948
12949 code = TREE_CODE (t);
12950 switch (TREE_CODE_CLASS (code))
12951 {
12952 case tcc_binary:
12953 case tcc_comparison:
12954 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
12955 TREE_TYPE (t),
12956 TREE_OPERAND (t, 0),
12957 TREE_OPERAND (t, 1),
12958 strict_overflow_p, depth);
12959
12960 case tcc_unary:
12961 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
12962 TREE_TYPE (t),
12963 TREE_OPERAND (t, 0),
12964 strict_overflow_p, depth);
12965
12966 case tcc_constant:
12967 case tcc_declaration:
12968 case tcc_reference:
12969 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
12970
12971 default:
12972 break;
12973 }
12974
12975 switch (code)
12976 {
12977 case TRUTH_AND_EXPR:
12978 case TRUTH_OR_EXPR:
12979 case TRUTH_XOR_EXPR:
12980 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
12981 TREE_TYPE (t),
12982 TREE_OPERAND (t, 0),
12983 TREE_OPERAND (t, 1),
12984 strict_overflow_p, depth);
12985 case TRUTH_NOT_EXPR:
12986 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
12987 TREE_TYPE (t),
12988 TREE_OPERAND (t, 0),
12989 strict_overflow_p, depth);
12990
12991 case COND_EXPR:
12992 case CONSTRUCTOR:
12993 case OBJ_TYPE_REF:
12994 case ASSERT_EXPR:
12995 case ADDR_EXPR:
12996 case WITH_SIZE_EXPR:
12997 case SSA_NAME:
12998 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
12999
13000 default:
13001 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
13002 }
13003 }
13004
13005 /* Return true if `t' is known to be non-negative. Handle warnings
13006 about undefined signed overflow. */
13007
13008 bool
13009 tree_expr_nonnegative_p (tree t)
13010 {
13011 bool ret, strict_overflow_p;
13012
13013 strict_overflow_p = false;
13014 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
13015 if (strict_overflow_p)
13016 fold_overflow_warning (("assuming signed overflow does not occur when "
13017 "determining that expression is always "
13018 "non-negative"),
13019 WARN_STRICT_OVERFLOW_MISC);
13020 return ret;
13021 }
13022
13023
13024 /* Return true when (CODE OP0) is an address and is known to be nonzero.
13025 For floating point we further ensure that T is not denormal.
13026 Similar logic is present in nonzero_address in rtlanal.h.
13027
13028 If the return value is based on the assumption that signed overflow
13029 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13030 change *STRICT_OVERFLOW_P. */
13031
13032 bool
13033 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
13034 bool *strict_overflow_p)
13035 {
13036 switch (code)
13037 {
13038 case ABS_EXPR:
13039 return tree_expr_nonzero_warnv_p (op0,
13040 strict_overflow_p);
13041
13042 case NOP_EXPR:
13043 {
13044 tree inner_type = TREE_TYPE (op0);
13045 tree outer_type = type;
13046
13047 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
13048 && tree_expr_nonzero_warnv_p (op0,
13049 strict_overflow_p));
13050 }
13051 break;
13052
13053 case NON_LVALUE_EXPR:
13054 return tree_expr_nonzero_warnv_p (op0,
13055 strict_overflow_p);
13056
13057 default:
13058 break;
13059 }
13060
13061 return false;
13062 }
13063
13064 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
13065 For floating point we further ensure that T is not denormal.
13066 Similar logic is present in nonzero_address in rtlanal.h.
13067
13068 If the return value is based on the assumption that signed overflow
13069 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13070 change *STRICT_OVERFLOW_P. */
13071
13072 bool
13073 tree_binary_nonzero_warnv_p (enum tree_code code,
13074 tree type,
13075 tree op0,
13076 tree op1, bool *strict_overflow_p)
13077 {
13078 bool sub_strict_overflow_p;
13079 switch (code)
13080 {
13081 case POINTER_PLUS_EXPR:
13082 case PLUS_EXPR:
13083 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
13084 {
13085 /* With the presence of negative values it is hard
13086 to say something. */
13087 sub_strict_overflow_p = false;
13088 if (!tree_expr_nonnegative_warnv_p (op0,
13089 &sub_strict_overflow_p)
13090 || !tree_expr_nonnegative_warnv_p (op1,
13091 &sub_strict_overflow_p))
13092 return false;
13093 /* One of operands must be positive and the other non-negative. */
13094 /* We don't set *STRICT_OVERFLOW_P here: even if this value
13095 overflows, on a twos-complement machine the sum of two
13096 nonnegative numbers can never be zero. */
13097 return (tree_expr_nonzero_warnv_p (op0,
13098 strict_overflow_p)
13099 || tree_expr_nonzero_warnv_p (op1,
13100 strict_overflow_p));
13101 }
13102 break;
13103
13104 case MULT_EXPR:
13105 if (TYPE_OVERFLOW_UNDEFINED (type))
13106 {
13107 if (tree_expr_nonzero_warnv_p (op0,
13108 strict_overflow_p)
13109 && tree_expr_nonzero_warnv_p (op1,
13110 strict_overflow_p))
13111 {
13112 *strict_overflow_p = true;
13113 return true;
13114 }
13115 }
13116 break;
13117
13118 case MIN_EXPR:
13119 sub_strict_overflow_p = false;
13120 if (tree_expr_nonzero_warnv_p (op0,
13121 &sub_strict_overflow_p)
13122 && tree_expr_nonzero_warnv_p (op1,
13123 &sub_strict_overflow_p))
13124 {
13125 if (sub_strict_overflow_p)
13126 *strict_overflow_p = true;
13127 }
13128 break;
13129
13130 case MAX_EXPR:
13131 sub_strict_overflow_p = false;
13132 if (tree_expr_nonzero_warnv_p (op0,
13133 &sub_strict_overflow_p))
13134 {
13135 if (sub_strict_overflow_p)
13136 *strict_overflow_p = true;
13137
13138 /* When both operands are nonzero, then MAX must be too. */
13139 if (tree_expr_nonzero_warnv_p (op1,
13140 strict_overflow_p))
13141 return true;
13142
13143 /* MAX where operand 0 is positive is positive. */
13144 return tree_expr_nonnegative_warnv_p (op0,
13145 strict_overflow_p);
13146 }
13147 /* MAX where operand 1 is positive is positive. */
13148 else if (tree_expr_nonzero_warnv_p (op1,
13149 &sub_strict_overflow_p)
13150 && tree_expr_nonnegative_warnv_p (op1,
13151 &sub_strict_overflow_p))
13152 {
13153 if (sub_strict_overflow_p)
13154 *strict_overflow_p = true;
13155 return true;
13156 }
13157 break;
13158
13159 case BIT_IOR_EXPR:
13160 return (tree_expr_nonzero_warnv_p (op1,
13161 strict_overflow_p)
13162 || tree_expr_nonzero_warnv_p (op0,
13163 strict_overflow_p));
13164
13165 default:
13166 break;
13167 }
13168
13169 return false;
13170 }
13171
13172 /* Return true when T is an address and is known to be nonzero.
13173 For floating point we further ensure that T is not denormal.
13174 Similar logic is present in nonzero_address in rtlanal.h.
13175
13176 If the return value is based on the assumption that signed overflow
13177 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13178 change *STRICT_OVERFLOW_P. */
13179
13180 bool
13181 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
13182 {
13183 bool sub_strict_overflow_p;
13184 switch (TREE_CODE (t))
13185 {
13186 case INTEGER_CST:
13187 return !integer_zerop (t);
13188
13189 case ADDR_EXPR:
13190 {
13191 tree base = TREE_OPERAND (t, 0);
13192
13193 if (!DECL_P (base))
13194 base = get_base_address (base);
13195
13196 if (base && TREE_CODE (base) == TARGET_EXPR)
13197 base = TARGET_EXPR_SLOT (base);
13198
13199 if (!base)
13200 return false;
13201
13202 /* For objects in symbol table check if we know they are non-zero.
13203 Don't do anything for variables and functions before symtab is built;
13204 it is quite possible that they will be declared weak later. */
13205 int nonzero_addr = maybe_nonzero_address (base);
13206 if (nonzero_addr >= 0)
13207 return nonzero_addr;
13208
13209 /* Constants are never weak. */
13210 if (CONSTANT_CLASS_P (base))
13211 return true;
13212
13213 return false;
13214 }
13215
13216 case COND_EXPR:
13217 sub_strict_overflow_p = false;
13218 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13219 &sub_strict_overflow_p)
13220 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
13221 &sub_strict_overflow_p))
13222 {
13223 if (sub_strict_overflow_p)
13224 *strict_overflow_p = true;
13225 return true;
13226 }
13227 break;
13228
13229 case SSA_NAME:
13230 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
13231 break;
13232 return expr_not_equal_to (t, wi::zero (TYPE_PRECISION (TREE_TYPE (t))));
13233
13234 default:
13235 break;
13236 }
13237 return false;
13238 }
13239
13240 #define integer_valued_real_p(X) \
13241 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
13242
13243 #define RECURSE(X) \
13244 ((integer_valued_real_p) (X, depth + 1))
13245
13246 /* Return true if the floating point result of (CODE OP0) has an
13247 integer value. We also allow +Inf, -Inf and NaN to be considered
13248 integer values. Return false for signaling NaN.
13249
13250 DEPTH is the current nesting depth of the query. */
13251
13252 bool
13253 integer_valued_real_unary_p (tree_code code, tree op0, int depth)
13254 {
13255 switch (code)
13256 {
13257 case FLOAT_EXPR:
13258 return true;
13259
13260 case ABS_EXPR:
13261 return RECURSE (op0);
13262
13263 CASE_CONVERT:
13264 {
13265 tree type = TREE_TYPE (op0);
13266 if (TREE_CODE (type) == INTEGER_TYPE)
13267 return true;
13268 if (TREE_CODE (type) == REAL_TYPE)
13269 return RECURSE (op0);
13270 break;
13271 }
13272
13273 default:
13274 break;
13275 }
13276 return false;
13277 }
13278
13279 /* Return true if the floating point result of (CODE OP0 OP1) has an
13280 integer value. We also allow +Inf, -Inf and NaN to be considered
13281 integer values. Return false for signaling NaN.
13282
13283 DEPTH is the current nesting depth of the query. */
13284
13285 bool
13286 integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth)
13287 {
13288 switch (code)
13289 {
13290 case PLUS_EXPR:
13291 case MINUS_EXPR:
13292 case MULT_EXPR:
13293 case MIN_EXPR:
13294 case MAX_EXPR:
13295 return RECURSE (op0) && RECURSE (op1);
13296
13297 default:
13298 break;
13299 }
13300 return false;
13301 }
13302
13303 /* Return true if the floating point result of calling FNDECL with arguments
13304 ARG0 and ARG1 has an integer value. We also allow +Inf, -Inf and NaN to be
13305 considered integer values. Return false for signaling NaN. If FNDECL
13306 takes fewer than 2 arguments, the remaining ARGn are null.
13307
13308 DEPTH is the current nesting depth of the query. */
13309
13310 bool
13311 integer_valued_real_call_p (combined_fn fn, tree arg0, tree arg1, int depth)
13312 {
13313 switch (fn)
13314 {
13315 CASE_CFN_CEIL:
13316 CASE_CFN_FLOOR:
13317 CASE_CFN_NEARBYINT:
13318 CASE_CFN_RINT:
13319 CASE_CFN_ROUND:
13320 CASE_CFN_TRUNC:
13321 return true;
13322
13323 CASE_CFN_FMIN:
13324 CASE_CFN_FMIN_FN:
13325 CASE_CFN_FMAX:
13326 CASE_CFN_FMAX_FN:
13327 return RECURSE (arg0) && RECURSE (arg1);
13328
13329 default:
13330 break;
13331 }
13332 return false;
13333 }
13334
13335 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
13336 has an integer value. We also allow +Inf, -Inf and NaN to be
13337 considered integer values. Return false for signaling NaN.
13338
13339 DEPTH is the current nesting depth of the query. */
13340
13341 bool
13342 integer_valued_real_single_p (tree t, int depth)
13343 {
13344 switch (TREE_CODE (t))
13345 {
13346 case REAL_CST:
13347 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
13348
13349 case COND_EXPR:
13350 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
13351
13352 case SSA_NAME:
13353 /* Limit the depth of recursion to avoid quadratic behavior.
13354 This is expected to catch almost all occurrences in practice.
13355 If this code misses important cases that unbounded recursion
13356 would not, passes that need this information could be revised
13357 to provide it through dataflow propagation. */
13358 return (!name_registered_for_update_p (t)
13359 && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
13360 && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t),
13361 depth));
13362
13363 default:
13364 break;
13365 }
13366 return false;
13367 }
13368
13369 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
13370 has an integer value. We also allow +Inf, -Inf and NaN to be
13371 considered integer values. Return false for signaling NaN.
13372
13373 DEPTH is the current nesting depth of the query. */
13374
13375 static bool
13376 integer_valued_real_invalid_p (tree t, int depth)
13377 {
13378 switch (TREE_CODE (t))
13379 {
13380 case COMPOUND_EXPR:
13381 case MODIFY_EXPR:
13382 case BIND_EXPR:
13383 return RECURSE (TREE_OPERAND (t, 1));
13384
13385 case SAVE_EXPR:
13386 return RECURSE (TREE_OPERAND (t, 0));
13387
13388 default:
13389 break;
13390 }
13391 return false;
13392 }
13393
13394 #undef RECURSE
13395 #undef integer_valued_real_p
13396
13397 /* Return true if the floating point expression T has an integer value.
13398 We also allow +Inf, -Inf and NaN to be considered integer values.
13399 Return false for signaling NaN.
13400
13401 DEPTH is the current nesting depth of the query. */
13402
13403 bool
13404 integer_valued_real_p (tree t, int depth)
13405 {
13406 if (t == error_mark_node)
13407 return false;
13408
13409 tree_code code = TREE_CODE (t);
13410 switch (TREE_CODE_CLASS (code))
13411 {
13412 case tcc_binary:
13413 case tcc_comparison:
13414 return integer_valued_real_binary_p (code, TREE_OPERAND (t, 0),
13415 TREE_OPERAND (t, 1), depth);
13416
13417 case tcc_unary:
13418 return integer_valued_real_unary_p (code, TREE_OPERAND (t, 0), depth);
13419
13420 case tcc_constant:
13421 case tcc_declaration:
13422 case tcc_reference:
13423 return integer_valued_real_single_p (t, depth);
13424
13425 default:
13426 break;
13427 }
13428
13429 switch (code)
13430 {
13431 case COND_EXPR:
13432 case SSA_NAME:
13433 return integer_valued_real_single_p (t, depth);
13434
13435 case CALL_EXPR:
13436 {
13437 tree arg0 = (call_expr_nargs (t) > 0
13438 ? CALL_EXPR_ARG (t, 0)
13439 : NULL_TREE);
13440 tree arg1 = (call_expr_nargs (t) > 1
13441 ? CALL_EXPR_ARG (t, 1)
13442 : NULL_TREE);
13443 return integer_valued_real_call_p (get_call_combined_fn (t),
13444 arg0, arg1, depth);
13445 }
13446
13447 default:
13448 return integer_valued_real_invalid_p (t, depth);
13449 }
13450 }
13451
13452 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
13453 attempt to fold the expression to a constant without modifying TYPE,
13454 OP0 or OP1.
13455
13456 If the expression could be simplified to a constant, then return
13457 the constant. If the expression would not be simplified to a
13458 constant, then return NULL_TREE. */
13459
13460 tree
13461 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
13462 {
13463 tree tem = fold_binary (code, type, op0, op1);
13464 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13465 }
13466
13467 /* Given the components of a unary expression CODE, TYPE and OP0,
13468 attempt to fold the expression to a constant without modifying
13469 TYPE or OP0.
13470
13471 If the expression could be simplified to a constant, then return
13472 the constant. If the expression would not be simplified to a
13473 constant, then return NULL_TREE. */
13474
13475 tree
13476 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
13477 {
13478 tree tem = fold_unary (code, type, op0);
13479 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13480 }
13481
13482 /* If EXP represents referencing an element in a constant string
13483 (either via pointer arithmetic or array indexing), return the
13484 tree representing the value accessed, otherwise return NULL. */
13485
13486 tree
13487 fold_read_from_constant_string (tree exp)
13488 {
13489 if ((TREE_CODE (exp) == INDIRECT_REF
13490 || TREE_CODE (exp) == ARRAY_REF)
13491 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
13492 {
13493 tree exp1 = TREE_OPERAND (exp, 0);
13494 tree index;
13495 tree string;
13496 location_t loc = EXPR_LOCATION (exp);
13497
13498 if (TREE_CODE (exp) == INDIRECT_REF)
13499 string = string_constant (exp1, &index);
13500 else
13501 {
13502 tree low_bound = array_ref_low_bound (exp);
13503 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
13504
13505 /* Optimize the special-case of a zero lower bound.
13506
13507 We convert the low_bound to sizetype to avoid some problems
13508 with constant folding. (E.g. suppose the lower bound is 1,
13509 and its mode is QI. Without the conversion,l (ARRAY
13510 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
13511 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
13512 if (! integer_zerop (low_bound))
13513 index = size_diffop_loc (loc, index,
13514 fold_convert_loc (loc, sizetype, low_bound));
13515
13516 string = exp1;
13517 }
13518
13519 scalar_int_mode char_mode;
13520 if (string
13521 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
13522 && TREE_CODE (string) == STRING_CST
13523 && TREE_CODE (index) == INTEGER_CST
13524 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
13525 && is_int_mode (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))),
13526 &char_mode)
13527 && GET_MODE_SIZE (char_mode) == 1)
13528 return build_int_cst_type (TREE_TYPE (exp),
13529 (TREE_STRING_POINTER (string)
13530 [TREE_INT_CST_LOW (index)]));
13531 }
13532 return NULL;
13533 }
13534
13535 /* Return the tree for neg (ARG0) when ARG0 is known to be either
13536 an integer constant, real, or fixed-point constant.
13537
13538 TYPE is the type of the result. */
13539
13540 static tree
13541 fold_negate_const (tree arg0, tree type)
13542 {
13543 tree t = NULL_TREE;
13544
13545 switch (TREE_CODE (arg0))
13546 {
13547 case INTEGER_CST:
13548 {
13549 bool overflow;
13550 wide_int val = wi::neg (wi::to_wide (arg0), &overflow);
13551 t = force_fit_type (type, val, 1,
13552 (overflow && ! TYPE_UNSIGNED (type))
13553 || TREE_OVERFLOW (arg0));
13554 break;
13555 }
13556
13557 case REAL_CST:
13558 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13559 break;
13560
13561 case FIXED_CST:
13562 {
13563 FIXED_VALUE_TYPE f;
13564 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
13565 &(TREE_FIXED_CST (arg0)), NULL,
13566 TYPE_SATURATING (type));
13567 t = build_fixed (type, f);
13568 /* Propagate overflow flags. */
13569 if (overflow_p | TREE_OVERFLOW (arg0))
13570 TREE_OVERFLOW (t) = 1;
13571 break;
13572 }
13573
13574 default:
13575 gcc_unreachable ();
13576 }
13577
13578 return t;
13579 }
13580
13581 /* Return the tree for abs (ARG0) when ARG0 is known to be either
13582 an integer constant or real constant.
13583
13584 TYPE is the type of the result. */
13585
13586 tree
13587 fold_abs_const (tree arg0, tree type)
13588 {
13589 tree t = NULL_TREE;
13590
13591 switch (TREE_CODE (arg0))
13592 {
13593 case INTEGER_CST:
13594 {
13595 /* If the value is unsigned or non-negative, then the absolute value
13596 is the same as the ordinary value. */
13597 if (!wi::neg_p (wi::to_wide (arg0), TYPE_SIGN (type)))
13598 t = arg0;
13599
13600 /* If the value is negative, then the absolute value is
13601 its negation. */
13602 else
13603 {
13604 bool overflow;
13605 wide_int val = wi::neg (wi::to_wide (arg0), &overflow);
13606 t = force_fit_type (type, val, -1,
13607 overflow | TREE_OVERFLOW (arg0));
13608 }
13609 }
13610 break;
13611
13612 case REAL_CST:
13613 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
13614 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13615 else
13616 t = arg0;
13617 break;
13618
13619 default:
13620 gcc_unreachable ();
13621 }
13622
13623 return t;
13624 }
13625
13626 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13627 constant. TYPE is the type of the result. */
13628
13629 static tree
13630 fold_not_const (const_tree arg0, tree type)
13631 {
13632 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
13633
13634 return force_fit_type (type, ~wi::to_wide (arg0), 0, TREE_OVERFLOW (arg0));
13635 }
13636
13637 /* Given CODE, a relational operator, the target type, TYPE and two
13638 constant operands OP0 and OP1, return the result of the
13639 relational operation. If the result is not a compile time
13640 constant, then return NULL_TREE. */
13641
13642 static tree
13643 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
13644 {
13645 int result, invert;
13646
13647 /* From here on, the only cases we handle are when the result is
13648 known to be a constant. */
13649
13650 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
13651 {
13652 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
13653 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
13654
13655 /* Handle the cases where either operand is a NaN. */
13656 if (real_isnan (c0) || real_isnan (c1))
13657 {
13658 switch (code)
13659 {
13660 case EQ_EXPR:
13661 case ORDERED_EXPR:
13662 result = 0;
13663 break;
13664
13665 case NE_EXPR:
13666 case UNORDERED_EXPR:
13667 case UNLT_EXPR:
13668 case UNLE_EXPR:
13669 case UNGT_EXPR:
13670 case UNGE_EXPR:
13671 case UNEQ_EXPR:
13672 result = 1;
13673 break;
13674
13675 case LT_EXPR:
13676 case LE_EXPR:
13677 case GT_EXPR:
13678 case GE_EXPR:
13679 case LTGT_EXPR:
13680 if (flag_trapping_math)
13681 return NULL_TREE;
13682 result = 0;
13683 break;
13684
13685 default:
13686 gcc_unreachable ();
13687 }
13688
13689 return constant_boolean_node (result, type);
13690 }
13691
13692 return constant_boolean_node (real_compare (code, c0, c1), type);
13693 }
13694
13695 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
13696 {
13697 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
13698 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
13699 return constant_boolean_node (fixed_compare (code, c0, c1), type);
13700 }
13701
13702 /* Handle equality/inequality of complex constants. */
13703 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
13704 {
13705 tree rcond = fold_relational_const (code, type,
13706 TREE_REALPART (op0),
13707 TREE_REALPART (op1));
13708 tree icond = fold_relational_const (code, type,
13709 TREE_IMAGPART (op0),
13710 TREE_IMAGPART (op1));
13711 if (code == EQ_EXPR)
13712 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
13713 else if (code == NE_EXPR)
13714 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
13715 else
13716 return NULL_TREE;
13717 }
13718
13719 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
13720 {
13721 if (!VECTOR_TYPE_P (type))
13722 {
13723 /* Have vector comparison with scalar boolean result. */
13724 gcc_assert ((code == EQ_EXPR || code == NE_EXPR)
13725 && VECTOR_CST_NELTS (op0) == VECTOR_CST_NELTS (op1));
13726 for (unsigned i = 0; i < VECTOR_CST_NELTS (op0); i++)
13727 {
13728 tree elem0 = VECTOR_CST_ELT (op0, i);
13729 tree elem1 = VECTOR_CST_ELT (op1, i);
13730 tree tmp = fold_relational_const (code, type, elem0, elem1);
13731 if (tmp == NULL_TREE)
13732 return NULL_TREE;
13733 if (integer_zerop (tmp))
13734 return constant_boolean_node (false, type);
13735 }
13736 return constant_boolean_node (true, type);
13737 }
13738 unsigned count = VECTOR_CST_NELTS (op0);
13739 gcc_assert (VECTOR_CST_NELTS (op1) == count
13740 && TYPE_VECTOR_SUBPARTS (type) == count);
13741
13742 auto_vec<tree, 32> elts (count);
13743 for (unsigned i = 0; i < count; i++)
13744 {
13745 tree elem_type = TREE_TYPE (type);
13746 tree elem0 = VECTOR_CST_ELT (op0, i);
13747 tree elem1 = VECTOR_CST_ELT (op1, i);
13748
13749 tree tem = fold_relational_const (code, elem_type,
13750 elem0, elem1);
13751
13752 if (tem == NULL_TREE)
13753 return NULL_TREE;
13754
13755 elts.quick_push (build_int_cst (elem_type,
13756 integer_zerop (tem) ? 0 : -1));
13757 }
13758
13759 return build_vector (type, elts);
13760 }
13761
13762 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
13763
13764 To compute GT, swap the arguments and do LT.
13765 To compute GE, do LT and invert the result.
13766 To compute LE, swap the arguments, do LT and invert the result.
13767 To compute NE, do EQ and invert the result.
13768
13769 Therefore, the code below must handle only EQ and LT. */
13770
13771 if (code == LE_EXPR || code == GT_EXPR)
13772 {
13773 std::swap (op0, op1);
13774 code = swap_tree_comparison (code);
13775 }
13776
13777 /* Note that it is safe to invert for real values here because we
13778 have already handled the one case that it matters. */
13779
13780 invert = 0;
13781 if (code == NE_EXPR || code == GE_EXPR)
13782 {
13783 invert = 1;
13784 code = invert_tree_comparison (code, false);
13785 }
13786
13787 /* Compute a result for LT or EQ if args permit;
13788 Otherwise return T. */
13789 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
13790 {
13791 if (code == EQ_EXPR)
13792 result = tree_int_cst_equal (op0, op1);
13793 else
13794 result = tree_int_cst_lt (op0, op1);
13795 }
13796 else
13797 return NULL_TREE;
13798
13799 if (invert)
13800 result ^= 1;
13801 return constant_boolean_node (result, type);
13802 }
13803
13804 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
13805 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
13806 itself. */
13807
13808 tree
13809 fold_build_cleanup_point_expr (tree type, tree expr)
13810 {
13811 /* If the expression does not have side effects then we don't have to wrap
13812 it with a cleanup point expression. */
13813 if (!TREE_SIDE_EFFECTS (expr))
13814 return expr;
13815
13816 /* If the expression is a return, check to see if the expression inside the
13817 return has no side effects or the right hand side of the modify expression
13818 inside the return. If either don't have side effects set we don't need to
13819 wrap the expression in a cleanup point expression. Note we don't check the
13820 left hand side of the modify because it should always be a return decl. */
13821 if (TREE_CODE (expr) == RETURN_EXPR)
13822 {
13823 tree op = TREE_OPERAND (expr, 0);
13824 if (!op || !TREE_SIDE_EFFECTS (op))
13825 return expr;
13826 op = TREE_OPERAND (op, 1);
13827 if (!TREE_SIDE_EFFECTS (op))
13828 return expr;
13829 }
13830
13831 return build1_loc (EXPR_LOCATION (expr), CLEANUP_POINT_EXPR, type, expr);
13832 }
13833
13834 /* Given a pointer value OP0 and a type TYPE, return a simplified version
13835 of an indirection through OP0, or NULL_TREE if no simplification is
13836 possible. */
13837
13838 tree
13839 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
13840 {
13841 tree sub = op0;
13842 tree subtype;
13843
13844 STRIP_NOPS (sub);
13845 subtype = TREE_TYPE (sub);
13846 if (!POINTER_TYPE_P (subtype)
13847 || TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (op0)))
13848 return NULL_TREE;
13849
13850 if (TREE_CODE (sub) == ADDR_EXPR)
13851 {
13852 tree op = TREE_OPERAND (sub, 0);
13853 tree optype = TREE_TYPE (op);
13854 /* *&CONST_DECL -> to the value of the const decl. */
13855 if (TREE_CODE (op) == CONST_DECL)
13856 return DECL_INITIAL (op);
13857 /* *&p => p; make sure to handle *&"str"[cst] here. */
13858 if (type == optype)
13859 {
13860 tree fop = fold_read_from_constant_string (op);
13861 if (fop)
13862 return fop;
13863 else
13864 return op;
13865 }
13866 /* *(foo *)&fooarray => fooarray[0] */
13867 else if (TREE_CODE (optype) == ARRAY_TYPE
13868 && type == TREE_TYPE (optype)
13869 && (!in_gimple_form
13870 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
13871 {
13872 tree type_domain = TYPE_DOMAIN (optype);
13873 tree min_val = size_zero_node;
13874 if (type_domain && TYPE_MIN_VALUE (type_domain))
13875 min_val = TYPE_MIN_VALUE (type_domain);
13876 if (in_gimple_form
13877 && TREE_CODE (min_val) != INTEGER_CST)
13878 return NULL_TREE;
13879 return build4_loc (loc, ARRAY_REF, type, op, min_val,
13880 NULL_TREE, NULL_TREE);
13881 }
13882 /* *(foo *)&complexfoo => __real__ complexfoo */
13883 else if (TREE_CODE (optype) == COMPLEX_TYPE
13884 && type == TREE_TYPE (optype))
13885 return fold_build1_loc (loc, REALPART_EXPR, type, op);
13886 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
13887 else if (TREE_CODE (optype) == VECTOR_TYPE
13888 && type == TREE_TYPE (optype))
13889 {
13890 tree part_width = TYPE_SIZE (type);
13891 tree index = bitsize_int (0);
13892 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
13893 }
13894 }
13895
13896 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
13897 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
13898 {
13899 tree op00 = TREE_OPERAND (sub, 0);
13900 tree op01 = TREE_OPERAND (sub, 1);
13901
13902 STRIP_NOPS (op00);
13903 if (TREE_CODE (op00) == ADDR_EXPR)
13904 {
13905 tree op00type;
13906 op00 = TREE_OPERAND (op00, 0);
13907 op00type = TREE_TYPE (op00);
13908
13909 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
13910 if (TREE_CODE (op00type) == VECTOR_TYPE
13911 && type == TREE_TYPE (op00type))
13912 {
13913 tree part_width = TYPE_SIZE (type);
13914 unsigned HOST_WIDE_INT max_offset
13915 = (tree_to_uhwi (part_width) / BITS_PER_UNIT
13916 * TYPE_VECTOR_SUBPARTS (op00type));
13917 if (tree_int_cst_sign_bit (op01) == 0
13918 && compare_tree_int (op01, max_offset) == -1)
13919 {
13920 unsigned HOST_WIDE_INT offset = tree_to_uhwi (op01);
13921 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
13922 tree index = bitsize_int (indexi);
13923 return fold_build3_loc (loc,
13924 BIT_FIELD_REF, type, op00,
13925 part_width, index);
13926 }
13927 }
13928 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
13929 else if (TREE_CODE (op00type) == COMPLEX_TYPE
13930 && type == TREE_TYPE (op00type))
13931 {
13932 tree size = TYPE_SIZE_UNIT (type);
13933 if (tree_int_cst_equal (size, op01))
13934 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
13935 }
13936 /* ((foo *)&fooarray)[1] => fooarray[1] */
13937 else if (TREE_CODE (op00type) == ARRAY_TYPE
13938 && type == TREE_TYPE (op00type))
13939 {
13940 tree type_domain = TYPE_DOMAIN (op00type);
13941 tree min = size_zero_node;
13942 if (type_domain && TYPE_MIN_VALUE (type_domain))
13943 min = TYPE_MIN_VALUE (type_domain);
13944 offset_int off = wi::to_offset (op01);
13945 offset_int el_sz = wi::to_offset (TYPE_SIZE_UNIT (type));
13946 offset_int remainder;
13947 off = wi::divmod_trunc (off, el_sz, SIGNED, &remainder);
13948 if (remainder == 0 && TREE_CODE (min) == INTEGER_CST)
13949 {
13950 off = off + wi::to_offset (min);
13951 op01 = wide_int_to_tree (sizetype, off);
13952 return build4_loc (loc, ARRAY_REF, type, op00, op01,
13953 NULL_TREE, NULL_TREE);
13954 }
13955 }
13956 }
13957 }
13958
13959 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
13960 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
13961 && type == TREE_TYPE (TREE_TYPE (subtype))
13962 && (!in_gimple_form
13963 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
13964 {
13965 tree type_domain;
13966 tree min_val = size_zero_node;
13967 sub = build_fold_indirect_ref_loc (loc, sub);
13968 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
13969 if (type_domain && TYPE_MIN_VALUE (type_domain))
13970 min_val = TYPE_MIN_VALUE (type_domain);
13971 if (in_gimple_form
13972 && TREE_CODE (min_val) != INTEGER_CST)
13973 return NULL_TREE;
13974 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
13975 NULL_TREE);
13976 }
13977
13978 return NULL_TREE;
13979 }
13980
13981 /* Builds an expression for an indirection through T, simplifying some
13982 cases. */
13983
13984 tree
13985 build_fold_indirect_ref_loc (location_t loc, tree t)
13986 {
13987 tree type = TREE_TYPE (TREE_TYPE (t));
13988 tree sub = fold_indirect_ref_1 (loc, type, t);
13989
13990 if (sub)
13991 return sub;
13992
13993 return build1_loc (loc, INDIRECT_REF, type, t);
13994 }
13995
13996 /* Given an INDIRECT_REF T, return either T or a simplified version. */
13997
13998 tree
13999 fold_indirect_ref_loc (location_t loc, tree t)
14000 {
14001 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
14002
14003 if (sub)
14004 return sub;
14005 else
14006 return t;
14007 }
14008
14009 /* Strip non-trapping, non-side-effecting tree nodes from an expression
14010 whose result is ignored. The type of the returned tree need not be
14011 the same as the original expression. */
14012
14013 tree
14014 fold_ignored_result (tree t)
14015 {
14016 if (!TREE_SIDE_EFFECTS (t))
14017 return integer_zero_node;
14018
14019 for (;;)
14020 switch (TREE_CODE_CLASS (TREE_CODE (t)))
14021 {
14022 case tcc_unary:
14023 t = TREE_OPERAND (t, 0);
14024 break;
14025
14026 case tcc_binary:
14027 case tcc_comparison:
14028 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14029 t = TREE_OPERAND (t, 0);
14030 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
14031 t = TREE_OPERAND (t, 1);
14032 else
14033 return t;
14034 break;
14035
14036 case tcc_expression:
14037 switch (TREE_CODE (t))
14038 {
14039 case COMPOUND_EXPR:
14040 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14041 return t;
14042 t = TREE_OPERAND (t, 0);
14043 break;
14044
14045 case COND_EXPR:
14046 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
14047 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
14048 return t;
14049 t = TREE_OPERAND (t, 0);
14050 break;
14051
14052 default:
14053 return t;
14054 }
14055 break;
14056
14057 default:
14058 return t;
14059 }
14060 }
14061
14062 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
14063
14064 tree
14065 round_up_loc (location_t loc, tree value, unsigned int divisor)
14066 {
14067 tree div = NULL_TREE;
14068
14069 if (divisor == 1)
14070 return value;
14071
14072 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14073 have to do anything. Only do this when we are not given a const,
14074 because in that case, this check is more expensive than just
14075 doing it. */
14076 if (TREE_CODE (value) != INTEGER_CST)
14077 {
14078 div = build_int_cst (TREE_TYPE (value), divisor);
14079
14080 if (multiple_of_p (TREE_TYPE (value), value, div))
14081 return value;
14082 }
14083
14084 /* If divisor is a power of two, simplify this to bit manipulation. */
14085 if (pow2_or_zerop (divisor))
14086 {
14087 if (TREE_CODE (value) == INTEGER_CST)
14088 {
14089 wide_int val = wi::to_wide (value);
14090 bool overflow_p;
14091
14092 if ((val & (divisor - 1)) == 0)
14093 return value;
14094
14095 overflow_p = TREE_OVERFLOW (value);
14096 val += divisor - 1;
14097 val &= (int) -divisor;
14098 if (val == 0)
14099 overflow_p = true;
14100
14101 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
14102 }
14103 else
14104 {
14105 tree t;
14106
14107 t = build_int_cst (TREE_TYPE (value), divisor - 1);
14108 value = size_binop_loc (loc, PLUS_EXPR, value, t);
14109 t = build_int_cst (TREE_TYPE (value), - (int) divisor);
14110 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14111 }
14112 }
14113 else
14114 {
14115 if (!div)
14116 div = build_int_cst (TREE_TYPE (value), divisor);
14117 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
14118 value = size_binop_loc (loc, MULT_EXPR, value, div);
14119 }
14120
14121 return value;
14122 }
14123
14124 /* Likewise, but round down. */
14125
14126 tree
14127 round_down_loc (location_t loc, tree value, int divisor)
14128 {
14129 tree div = NULL_TREE;
14130
14131 gcc_assert (divisor > 0);
14132 if (divisor == 1)
14133 return value;
14134
14135 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14136 have to do anything. Only do this when we are not given a const,
14137 because in that case, this check is more expensive than just
14138 doing it. */
14139 if (TREE_CODE (value) != INTEGER_CST)
14140 {
14141 div = build_int_cst (TREE_TYPE (value), divisor);
14142
14143 if (multiple_of_p (TREE_TYPE (value), value, div))
14144 return value;
14145 }
14146
14147 /* If divisor is a power of two, simplify this to bit manipulation. */
14148 if (pow2_or_zerop (divisor))
14149 {
14150 tree t;
14151
14152 t = build_int_cst (TREE_TYPE (value), -divisor);
14153 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14154 }
14155 else
14156 {
14157 if (!div)
14158 div = build_int_cst (TREE_TYPE (value), divisor);
14159 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
14160 value = size_binop_loc (loc, MULT_EXPR, value, div);
14161 }
14162
14163 return value;
14164 }
14165
14166 /* Returns the pointer to the base of the object addressed by EXP and
14167 extracts the information about the offset of the access, storing it
14168 to PBITPOS and POFFSET. */
14169
14170 static tree
14171 split_address_to_core_and_offset (tree exp,
14172 HOST_WIDE_INT *pbitpos, tree *poffset)
14173 {
14174 tree core;
14175 machine_mode mode;
14176 int unsignedp, reversep, volatilep;
14177 HOST_WIDE_INT bitsize;
14178 location_t loc = EXPR_LOCATION (exp);
14179
14180 if (TREE_CODE (exp) == ADDR_EXPR)
14181 {
14182 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
14183 poffset, &mode, &unsignedp, &reversep,
14184 &volatilep);
14185 core = build_fold_addr_expr_loc (loc, core);
14186 }
14187 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
14188 {
14189 core = TREE_OPERAND (exp, 0);
14190 STRIP_NOPS (core);
14191 *pbitpos = 0;
14192 *poffset = TREE_OPERAND (exp, 1);
14193 if (TREE_CODE (*poffset) == INTEGER_CST)
14194 {
14195 offset_int tem = wi::sext (wi::to_offset (*poffset),
14196 TYPE_PRECISION (TREE_TYPE (*poffset)));
14197 tem <<= LOG2_BITS_PER_UNIT;
14198 if (wi::fits_shwi_p (tem))
14199 {
14200 *pbitpos = tem.to_shwi ();
14201 *poffset = NULL_TREE;
14202 }
14203 }
14204 }
14205 else
14206 {
14207 core = exp;
14208 *pbitpos = 0;
14209 *poffset = NULL_TREE;
14210 }
14211
14212 return core;
14213 }
14214
14215 /* Returns true if addresses of E1 and E2 differ by a constant, false
14216 otherwise. If they do, E1 - E2 is stored in *DIFF. */
14217
14218 bool
14219 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
14220 {
14221 tree core1, core2;
14222 HOST_WIDE_INT bitpos1, bitpos2;
14223 tree toffset1, toffset2, tdiff, type;
14224
14225 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
14226 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
14227
14228 if (bitpos1 % BITS_PER_UNIT != 0
14229 || bitpos2 % BITS_PER_UNIT != 0
14230 || !operand_equal_p (core1, core2, 0))
14231 return false;
14232
14233 if (toffset1 && toffset2)
14234 {
14235 type = TREE_TYPE (toffset1);
14236 if (type != TREE_TYPE (toffset2))
14237 toffset2 = fold_convert (type, toffset2);
14238
14239 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
14240 if (!cst_and_fits_in_hwi (tdiff))
14241 return false;
14242
14243 *diff = int_cst_value (tdiff);
14244 }
14245 else if (toffset1 || toffset2)
14246 {
14247 /* If only one of the offsets is non-constant, the difference cannot
14248 be a constant. */
14249 return false;
14250 }
14251 else
14252 *diff = 0;
14253
14254 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
14255 return true;
14256 }
14257
14258 /* Return OFF converted to a pointer offset type suitable as offset for
14259 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
14260 tree
14261 convert_to_ptrofftype_loc (location_t loc, tree off)
14262 {
14263 return fold_convert_loc (loc, sizetype, off);
14264 }
14265
14266 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14267 tree
14268 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
14269 {
14270 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14271 ptr, convert_to_ptrofftype_loc (loc, off));
14272 }
14273
14274 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14275 tree
14276 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
14277 {
14278 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14279 ptr, size_int (off));
14280 }
14281
14282 /* Return a char pointer for a C string if it is a string constant
14283 or sum of string constant and integer constant. We only support
14284 string constants properly terminated with '\0' character.
14285 If STRLEN is a valid pointer, length (including terminating character)
14286 of returned string is stored to the argument. */
14287
14288 const char *
14289 c_getstr (tree src, unsigned HOST_WIDE_INT *strlen)
14290 {
14291 tree offset_node;
14292
14293 if (strlen)
14294 *strlen = 0;
14295
14296 src = string_constant (src, &offset_node);
14297 if (src == 0)
14298 return NULL;
14299
14300 unsigned HOST_WIDE_INT offset = 0;
14301 if (offset_node != NULL_TREE)
14302 {
14303 if (!tree_fits_uhwi_p (offset_node))
14304 return NULL;
14305 else
14306 offset = tree_to_uhwi (offset_node);
14307 }
14308
14309 unsigned HOST_WIDE_INT string_length = TREE_STRING_LENGTH (src);
14310 const char *string = TREE_STRING_POINTER (src);
14311
14312 /* Support only properly null-terminated strings. */
14313 if (string_length == 0
14314 || string[string_length - 1] != '\0'
14315 || offset >= string_length)
14316 return NULL;
14317
14318 if (strlen)
14319 *strlen = string_length - offset;
14320 return string + offset;
14321 }
14322
14323 #if CHECKING_P
14324
14325 namespace selftest {
14326
14327 /* Helper functions for writing tests of folding trees. */
14328
14329 /* Verify that the binary op (LHS CODE RHS) folds to CONSTANT. */
14330
14331 static void
14332 assert_binop_folds_to_const (tree lhs, enum tree_code code, tree rhs,
14333 tree constant)
14334 {
14335 ASSERT_EQ (constant, fold_build2 (code, TREE_TYPE (lhs), lhs, rhs));
14336 }
14337
14338 /* Verify that the binary op (LHS CODE RHS) folds to an NON_LVALUE_EXPR
14339 wrapping WRAPPED_EXPR. */
14340
14341 static void
14342 assert_binop_folds_to_nonlvalue (tree lhs, enum tree_code code, tree rhs,
14343 tree wrapped_expr)
14344 {
14345 tree result = fold_build2 (code, TREE_TYPE (lhs), lhs, rhs);
14346 ASSERT_NE (wrapped_expr, result);
14347 ASSERT_EQ (NON_LVALUE_EXPR, TREE_CODE (result));
14348 ASSERT_EQ (wrapped_expr, TREE_OPERAND (result, 0));
14349 }
14350
14351 /* Verify that various arithmetic binary operations are folded
14352 correctly. */
14353
14354 static void
14355 test_arithmetic_folding ()
14356 {
14357 tree type = integer_type_node;
14358 tree x = create_tmp_var_raw (type, "x");
14359 tree zero = build_zero_cst (type);
14360 tree one = build_int_cst (type, 1);
14361
14362 /* Addition. */
14363 /* 1 <-- (0 + 1) */
14364 assert_binop_folds_to_const (zero, PLUS_EXPR, one,
14365 one);
14366 assert_binop_folds_to_const (one, PLUS_EXPR, zero,
14367 one);
14368
14369 /* (nonlvalue)x <-- (x + 0) */
14370 assert_binop_folds_to_nonlvalue (x, PLUS_EXPR, zero,
14371 x);
14372
14373 /* Subtraction. */
14374 /* 0 <-- (x - x) */
14375 assert_binop_folds_to_const (x, MINUS_EXPR, x,
14376 zero);
14377 assert_binop_folds_to_nonlvalue (x, MINUS_EXPR, zero,
14378 x);
14379
14380 /* Multiplication. */
14381 /* 0 <-- (x * 0) */
14382 assert_binop_folds_to_const (x, MULT_EXPR, zero,
14383 zero);
14384
14385 /* (nonlvalue)x <-- (x * 1) */
14386 assert_binop_folds_to_nonlvalue (x, MULT_EXPR, one,
14387 x);
14388 }
14389
14390 /* Verify that various binary operations on vectors are folded
14391 correctly. */
14392
14393 static void
14394 test_vector_folding ()
14395 {
14396 tree inner_type = integer_type_node;
14397 tree type = build_vector_type (inner_type, 4);
14398 tree zero = build_zero_cst (type);
14399 tree one = build_one_cst (type);
14400
14401 /* Verify equality tests that return a scalar boolean result. */
14402 tree res_type = boolean_type_node;
14403 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, one)));
14404 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, zero)));
14405 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, zero, one)));
14406 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, one, one)));
14407 }
14408
14409 /* Run all of the selftests within this file. */
14410
14411 void
14412 fold_const_c_tests ()
14413 {
14414 test_arithmetic_folding ();
14415 test_vector_folding ();
14416 }
14417
14418 } // namespace selftest
14419
14420 #endif /* CHECKING_P */