re PR sanitizer/81148 (UBSAN: two more false positives)
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2017 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
27
28 /* The entry points in this file are fold, size_int_wide and size_binop.
29
30 fold takes a tree as argument and returns a simplified tree.
31
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
35
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
38
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
42
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "backend.h"
47 #include "target.h"
48 #include "rtl.h"
49 #include "tree.h"
50 #include "gimple.h"
51 #include "predict.h"
52 #include "memmodel.h"
53 #include "tm_p.h"
54 #include "tree-ssa-operands.h"
55 #include "optabs-query.h"
56 #include "cgraph.h"
57 #include "diagnostic-core.h"
58 #include "flags.h"
59 #include "alias.h"
60 #include "fold-const.h"
61 #include "fold-const-call.h"
62 #include "stor-layout.h"
63 #include "calls.h"
64 #include "tree-iterator.h"
65 #include "expr.h"
66 #include "intl.h"
67 #include "langhooks.h"
68 #include "tree-eh.h"
69 #include "gimplify.h"
70 #include "tree-dfa.h"
71 #include "builtins.h"
72 #include "generic-match.h"
73 #include "gimple-fold.h"
74 #include "params.h"
75 #include "tree-into-ssa.h"
76 #include "md5.h"
77 #include "case-cfn-macros.h"
78 #include "stringpool.h"
79 #include "tree-vrp.h"
80 #include "tree-ssanames.h"
81 #include "selftest.h"
82
83 /* Nonzero if we are folding constants inside an initializer; zero
84 otherwise. */
85 int folding_initializer = 0;
86
87 /* The following constants represent a bit based encoding of GCC's
88 comparison operators. This encoding simplifies transformations
89 on relational comparison operators, such as AND and OR. */
90 enum comparison_code {
91 COMPCODE_FALSE = 0,
92 COMPCODE_LT = 1,
93 COMPCODE_EQ = 2,
94 COMPCODE_LE = 3,
95 COMPCODE_GT = 4,
96 COMPCODE_LTGT = 5,
97 COMPCODE_GE = 6,
98 COMPCODE_ORD = 7,
99 COMPCODE_UNORD = 8,
100 COMPCODE_UNLT = 9,
101 COMPCODE_UNEQ = 10,
102 COMPCODE_UNLE = 11,
103 COMPCODE_UNGT = 12,
104 COMPCODE_NE = 13,
105 COMPCODE_UNGE = 14,
106 COMPCODE_TRUE = 15
107 };
108
109 static bool negate_expr_p (tree);
110 static tree negate_expr (tree);
111 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
112 static enum comparison_code comparison_to_compcode (enum tree_code);
113 static enum tree_code compcode_to_comparison (enum comparison_code);
114 static int operand_equal_for_comparison_p (tree, tree, tree);
115 static int twoval_comparison_p (tree, tree *, tree *, int *);
116 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
117 static tree optimize_bit_field_compare (location_t, enum tree_code,
118 tree, tree, tree);
119 static int simple_operand_p (const_tree);
120 static bool simple_operand_p_2 (tree);
121 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
122 static tree range_predecessor (tree);
123 static tree range_successor (tree);
124 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
125 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
126 static tree unextend (tree, int, int, tree);
127 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
128 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
129 static tree fold_binary_op_with_conditional_arg (location_t,
130 enum tree_code, tree,
131 tree, tree,
132 tree, tree, int);
133 static tree fold_negate_const (tree, tree);
134 static tree fold_not_const (const_tree, tree);
135 static tree fold_relational_const (enum tree_code, tree, tree, tree);
136 static tree fold_convert_const (enum tree_code, tree, tree);
137 static tree fold_view_convert_expr (tree, tree);
138 static bool vec_cst_ctor_to_array (tree, tree *);
139 static tree fold_negate_expr (location_t, tree);
140
141
142 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
143 Otherwise, return LOC. */
144
145 static location_t
146 expr_location_or (tree t, location_t loc)
147 {
148 location_t tloc = EXPR_LOCATION (t);
149 return tloc == UNKNOWN_LOCATION ? loc : tloc;
150 }
151
152 /* Similar to protected_set_expr_location, but never modify x in place,
153 if location can and needs to be set, unshare it. */
154
155 static inline tree
156 protected_set_expr_location_unshare (tree x, location_t loc)
157 {
158 if (CAN_HAVE_LOCATION_P (x)
159 && EXPR_LOCATION (x) != loc
160 && !(TREE_CODE (x) == SAVE_EXPR
161 || TREE_CODE (x) == TARGET_EXPR
162 || TREE_CODE (x) == BIND_EXPR))
163 {
164 x = copy_node (x);
165 SET_EXPR_LOCATION (x, loc);
166 }
167 return x;
168 }
169 \f
170 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
171 division and returns the quotient. Otherwise returns
172 NULL_TREE. */
173
174 tree
175 div_if_zero_remainder (const_tree arg1, const_tree arg2)
176 {
177 widest_int quo;
178
179 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
180 SIGNED, &quo))
181 return wide_int_to_tree (TREE_TYPE (arg1), quo);
182
183 return NULL_TREE;
184 }
185 \f
186 /* This is nonzero if we should defer warnings about undefined
187 overflow. This facility exists because these warnings are a
188 special case. The code to estimate loop iterations does not want
189 to issue any warnings, since it works with expressions which do not
190 occur in user code. Various bits of cleanup code call fold(), but
191 only use the result if it has certain characteristics (e.g., is a
192 constant); that code only wants to issue a warning if the result is
193 used. */
194
195 static int fold_deferring_overflow_warnings;
196
197 /* If a warning about undefined overflow is deferred, this is the
198 warning. Note that this may cause us to turn two warnings into
199 one, but that is fine since it is sufficient to only give one
200 warning per expression. */
201
202 static const char* fold_deferred_overflow_warning;
203
204 /* If a warning about undefined overflow is deferred, this is the
205 level at which the warning should be emitted. */
206
207 static enum warn_strict_overflow_code fold_deferred_overflow_code;
208
209 /* Start deferring overflow warnings. We could use a stack here to
210 permit nested calls, but at present it is not necessary. */
211
212 void
213 fold_defer_overflow_warnings (void)
214 {
215 ++fold_deferring_overflow_warnings;
216 }
217
218 /* Stop deferring overflow warnings. If there is a pending warning,
219 and ISSUE is true, then issue the warning if appropriate. STMT is
220 the statement with which the warning should be associated (used for
221 location information); STMT may be NULL. CODE is the level of the
222 warning--a warn_strict_overflow_code value. This function will use
223 the smaller of CODE and the deferred code when deciding whether to
224 issue the warning. CODE may be zero to mean to always use the
225 deferred code. */
226
227 void
228 fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
229 {
230 const char *warnmsg;
231 location_t locus;
232
233 gcc_assert (fold_deferring_overflow_warnings > 0);
234 --fold_deferring_overflow_warnings;
235 if (fold_deferring_overflow_warnings > 0)
236 {
237 if (fold_deferred_overflow_warning != NULL
238 && code != 0
239 && code < (int) fold_deferred_overflow_code)
240 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
241 return;
242 }
243
244 warnmsg = fold_deferred_overflow_warning;
245 fold_deferred_overflow_warning = NULL;
246
247 if (!issue || warnmsg == NULL)
248 return;
249
250 if (gimple_no_warning_p (stmt))
251 return;
252
253 /* Use the smallest code level when deciding to issue the
254 warning. */
255 if (code == 0 || code > (int) fold_deferred_overflow_code)
256 code = fold_deferred_overflow_code;
257
258 if (!issue_strict_overflow_warning (code))
259 return;
260
261 if (stmt == NULL)
262 locus = input_location;
263 else
264 locus = gimple_location (stmt);
265 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
266 }
267
268 /* Stop deferring overflow warnings, ignoring any deferred
269 warnings. */
270
271 void
272 fold_undefer_and_ignore_overflow_warnings (void)
273 {
274 fold_undefer_overflow_warnings (false, NULL, 0);
275 }
276
277 /* Whether we are deferring overflow warnings. */
278
279 bool
280 fold_deferring_overflow_warnings_p (void)
281 {
282 return fold_deferring_overflow_warnings > 0;
283 }
284
285 /* This is called when we fold something based on the fact that signed
286 overflow is undefined. */
287
288 void
289 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
290 {
291 if (fold_deferring_overflow_warnings > 0)
292 {
293 if (fold_deferred_overflow_warning == NULL
294 || wc < fold_deferred_overflow_code)
295 {
296 fold_deferred_overflow_warning = gmsgid;
297 fold_deferred_overflow_code = wc;
298 }
299 }
300 else if (issue_strict_overflow_warning (wc))
301 warning (OPT_Wstrict_overflow, gmsgid);
302 }
303 \f
304 /* Return true if the built-in mathematical function specified by CODE
305 is odd, i.e. -f(x) == f(-x). */
306
307 bool
308 negate_mathfn_p (combined_fn fn)
309 {
310 switch (fn)
311 {
312 CASE_CFN_ASIN:
313 CASE_CFN_ASINH:
314 CASE_CFN_ATAN:
315 CASE_CFN_ATANH:
316 CASE_CFN_CASIN:
317 CASE_CFN_CASINH:
318 CASE_CFN_CATAN:
319 CASE_CFN_CATANH:
320 CASE_CFN_CBRT:
321 CASE_CFN_CPROJ:
322 CASE_CFN_CSIN:
323 CASE_CFN_CSINH:
324 CASE_CFN_CTAN:
325 CASE_CFN_CTANH:
326 CASE_CFN_ERF:
327 CASE_CFN_LLROUND:
328 CASE_CFN_LROUND:
329 CASE_CFN_ROUND:
330 CASE_CFN_SIN:
331 CASE_CFN_SINH:
332 CASE_CFN_TAN:
333 CASE_CFN_TANH:
334 CASE_CFN_TRUNC:
335 return true;
336
337 CASE_CFN_LLRINT:
338 CASE_CFN_LRINT:
339 CASE_CFN_NEARBYINT:
340 CASE_CFN_RINT:
341 return !flag_rounding_math;
342
343 default:
344 break;
345 }
346 return false;
347 }
348
349 /* Check whether we may negate an integer constant T without causing
350 overflow. */
351
352 bool
353 may_negate_without_overflow_p (const_tree t)
354 {
355 tree type;
356
357 gcc_assert (TREE_CODE (t) == INTEGER_CST);
358
359 type = TREE_TYPE (t);
360 if (TYPE_UNSIGNED (type))
361 return false;
362
363 return !wi::only_sign_bit_p (t);
364 }
365
366 /* Determine whether an expression T can be cheaply negated using
367 the function negate_expr without introducing undefined overflow. */
368
369 static bool
370 negate_expr_p (tree t)
371 {
372 tree type;
373
374 if (t == 0)
375 return false;
376
377 type = TREE_TYPE (t);
378
379 STRIP_SIGN_NOPS (t);
380 switch (TREE_CODE (t))
381 {
382 case INTEGER_CST:
383 if (INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type))
384 return true;
385
386 /* Check that -CST will not overflow type. */
387 return may_negate_without_overflow_p (t);
388 case BIT_NOT_EXPR:
389 return (INTEGRAL_TYPE_P (type)
390 && TYPE_OVERFLOW_WRAPS (type));
391
392 case FIXED_CST:
393 return true;
394
395 case NEGATE_EXPR:
396 return !TYPE_OVERFLOW_SANITIZED (type);
397
398 case REAL_CST:
399 /* We want to canonicalize to positive real constants. Pretend
400 that only negative ones can be easily negated. */
401 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
402
403 case COMPLEX_CST:
404 return negate_expr_p (TREE_REALPART (t))
405 && negate_expr_p (TREE_IMAGPART (t));
406
407 case VECTOR_CST:
408 {
409 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
410 return true;
411
412 int count = TYPE_VECTOR_SUBPARTS (type), i;
413
414 for (i = 0; i < count; i++)
415 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
416 return false;
417
418 return true;
419 }
420
421 case COMPLEX_EXPR:
422 return negate_expr_p (TREE_OPERAND (t, 0))
423 && negate_expr_p (TREE_OPERAND (t, 1));
424
425 case CONJ_EXPR:
426 return negate_expr_p (TREE_OPERAND (t, 0));
427
428 case PLUS_EXPR:
429 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
430 || HONOR_SIGNED_ZEROS (element_mode (type))
431 || (INTEGRAL_TYPE_P (type)
432 && ! TYPE_OVERFLOW_WRAPS (type)))
433 return false;
434 /* -(A + B) -> (-B) - A. */
435 if (negate_expr_p (TREE_OPERAND (t, 1)))
436 return true;
437 /* -(A + B) -> (-A) - B. */
438 return negate_expr_p (TREE_OPERAND (t, 0));
439
440 case MINUS_EXPR:
441 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
442 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
443 && !HONOR_SIGNED_ZEROS (element_mode (type))
444 && (! INTEGRAL_TYPE_P (type)
445 || TYPE_OVERFLOW_WRAPS (type));
446
447 case MULT_EXPR:
448 if (TYPE_UNSIGNED (type))
449 break;
450 /* INT_MIN/n * n doesn't overflow while negating one operand it does
451 if n is a (negative) power of two. */
452 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
453 && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
454 && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
455 && wi::popcount (wi::abs (TREE_OPERAND (t, 0))) != 1)
456 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
457 && wi::popcount (wi::abs (TREE_OPERAND (t, 1))) != 1)))
458 break;
459
460 /* Fall through. */
461
462 case RDIV_EXPR:
463 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
464 return negate_expr_p (TREE_OPERAND (t, 1))
465 || negate_expr_p (TREE_OPERAND (t, 0));
466 break;
467
468 case TRUNC_DIV_EXPR:
469 case ROUND_DIV_EXPR:
470 case EXACT_DIV_EXPR:
471 if (TYPE_UNSIGNED (type))
472 break;
473 if (negate_expr_p (TREE_OPERAND (t, 0)))
474 return true;
475 /* In general we can't negate B in A / B, because if A is INT_MIN and
476 B is 1, we may turn this into INT_MIN / -1 which is undefined
477 and actually traps on some architectures. */
478 if (! INTEGRAL_TYPE_P (TREE_TYPE (t))
479 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
480 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
481 && ! integer_onep (TREE_OPERAND (t, 1))))
482 return negate_expr_p (TREE_OPERAND (t, 1));
483 break;
484
485 case NOP_EXPR:
486 /* Negate -((double)float) as (double)(-float). */
487 if (TREE_CODE (type) == REAL_TYPE)
488 {
489 tree tem = strip_float_extensions (t);
490 if (tem != t)
491 return negate_expr_p (tem);
492 }
493 break;
494
495 case CALL_EXPR:
496 /* Negate -f(x) as f(-x). */
497 if (negate_mathfn_p (get_call_combined_fn (t)))
498 return negate_expr_p (CALL_EXPR_ARG (t, 0));
499 break;
500
501 case RSHIFT_EXPR:
502 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
503 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
504 {
505 tree op1 = TREE_OPERAND (t, 1);
506 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
507 return true;
508 }
509 break;
510
511 default:
512 break;
513 }
514 return false;
515 }
516
517 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
518 simplification is possible.
519 If negate_expr_p would return true for T, NULL_TREE will never be
520 returned. */
521
522 static tree
523 fold_negate_expr_1 (location_t loc, tree t)
524 {
525 tree type = TREE_TYPE (t);
526 tree tem;
527
528 switch (TREE_CODE (t))
529 {
530 /* Convert - (~A) to A + 1. */
531 case BIT_NOT_EXPR:
532 if (INTEGRAL_TYPE_P (type))
533 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
534 build_one_cst (type));
535 break;
536
537 case INTEGER_CST:
538 tem = fold_negate_const (t, type);
539 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
540 || (ANY_INTEGRAL_TYPE_P (type)
541 && !TYPE_OVERFLOW_TRAPS (type)
542 && TYPE_OVERFLOW_WRAPS (type))
543 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
544 return tem;
545 break;
546
547 case REAL_CST:
548 tem = fold_negate_const (t, type);
549 return tem;
550
551 case FIXED_CST:
552 tem = fold_negate_const (t, type);
553 return tem;
554
555 case COMPLEX_CST:
556 {
557 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
558 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
559 if (rpart && ipart)
560 return build_complex (type, rpart, ipart);
561 }
562 break;
563
564 case VECTOR_CST:
565 {
566 int count = TYPE_VECTOR_SUBPARTS (type), i;
567 tree *elts = XALLOCAVEC (tree, count);
568
569 for (i = 0; i < count; i++)
570 {
571 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
572 if (elts[i] == NULL_TREE)
573 return NULL_TREE;
574 }
575
576 return build_vector (type, elts);
577 }
578
579 case COMPLEX_EXPR:
580 if (negate_expr_p (t))
581 return fold_build2_loc (loc, COMPLEX_EXPR, type,
582 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
583 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
584 break;
585
586 case CONJ_EXPR:
587 if (negate_expr_p (t))
588 return fold_build1_loc (loc, CONJ_EXPR, type,
589 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
590 break;
591
592 case NEGATE_EXPR:
593 if (!TYPE_OVERFLOW_SANITIZED (type))
594 return TREE_OPERAND (t, 0);
595 break;
596
597 case PLUS_EXPR:
598 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
599 && !HONOR_SIGNED_ZEROS (element_mode (type)))
600 {
601 /* -(A + B) -> (-B) - A. */
602 if (negate_expr_p (TREE_OPERAND (t, 1)))
603 {
604 tem = negate_expr (TREE_OPERAND (t, 1));
605 return fold_build2_loc (loc, MINUS_EXPR, type,
606 tem, TREE_OPERAND (t, 0));
607 }
608
609 /* -(A + B) -> (-A) - B. */
610 if (negate_expr_p (TREE_OPERAND (t, 0)))
611 {
612 tem = negate_expr (TREE_OPERAND (t, 0));
613 return fold_build2_loc (loc, MINUS_EXPR, type,
614 tem, TREE_OPERAND (t, 1));
615 }
616 }
617 break;
618
619 case MINUS_EXPR:
620 /* - (A - B) -> B - A */
621 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
622 && !HONOR_SIGNED_ZEROS (element_mode (type)))
623 return fold_build2_loc (loc, MINUS_EXPR, type,
624 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
625 break;
626
627 case MULT_EXPR:
628 if (TYPE_UNSIGNED (type))
629 break;
630
631 /* Fall through. */
632
633 case RDIV_EXPR:
634 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
635 {
636 tem = TREE_OPERAND (t, 1);
637 if (negate_expr_p (tem))
638 return fold_build2_loc (loc, TREE_CODE (t), type,
639 TREE_OPERAND (t, 0), negate_expr (tem));
640 tem = TREE_OPERAND (t, 0);
641 if (negate_expr_p (tem))
642 return fold_build2_loc (loc, TREE_CODE (t), type,
643 negate_expr (tem), TREE_OPERAND (t, 1));
644 }
645 break;
646
647 case TRUNC_DIV_EXPR:
648 case ROUND_DIV_EXPR:
649 case EXACT_DIV_EXPR:
650 if (TYPE_UNSIGNED (type))
651 break;
652 if (negate_expr_p (TREE_OPERAND (t, 0)))
653 return fold_build2_loc (loc, TREE_CODE (t), type,
654 negate_expr (TREE_OPERAND (t, 0)),
655 TREE_OPERAND (t, 1));
656 /* In general we can't negate B in A / B, because if A is INT_MIN and
657 B is 1, we may turn this into INT_MIN / -1 which is undefined
658 and actually traps on some architectures. */
659 if ((! INTEGRAL_TYPE_P (TREE_TYPE (t))
660 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
661 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
662 && ! integer_onep (TREE_OPERAND (t, 1))))
663 && negate_expr_p (TREE_OPERAND (t, 1)))
664 return fold_build2_loc (loc, TREE_CODE (t), type,
665 TREE_OPERAND (t, 0),
666 negate_expr (TREE_OPERAND (t, 1)));
667 break;
668
669 case NOP_EXPR:
670 /* Convert -((double)float) into (double)(-float). */
671 if (TREE_CODE (type) == REAL_TYPE)
672 {
673 tem = strip_float_extensions (t);
674 if (tem != t && negate_expr_p (tem))
675 return fold_convert_loc (loc, type, negate_expr (tem));
676 }
677 break;
678
679 case CALL_EXPR:
680 /* Negate -f(x) as f(-x). */
681 if (negate_mathfn_p (get_call_combined_fn (t))
682 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
683 {
684 tree fndecl, arg;
685
686 fndecl = get_callee_fndecl (t);
687 arg = negate_expr (CALL_EXPR_ARG (t, 0));
688 return build_call_expr_loc (loc, fndecl, 1, arg);
689 }
690 break;
691
692 case RSHIFT_EXPR:
693 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
694 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
695 {
696 tree op1 = TREE_OPERAND (t, 1);
697 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
698 {
699 tree ntype = TYPE_UNSIGNED (type)
700 ? signed_type_for (type)
701 : unsigned_type_for (type);
702 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
703 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
704 return fold_convert_loc (loc, type, temp);
705 }
706 }
707 break;
708
709 default:
710 break;
711 }
712
713 return NULL_TREE;
714 }
715
716 /* A wrapper for fold_negate_expr_1. */
717
718 static tree
719 fold_negate_expr (location_t loc, tree t)
720 {
721 tree type = TREE_TYPE (t);
722 STRIP_SIGN_NOPS (t);
723 tree tem = fold_negate_expr_1 (loc, t);
724 if (tem == NULL_TREE)
725 return NULL_TREE;
726 return fold_convert_loc (loc, type, tem);
727 }
728
729 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
730 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
731 return NULL_TREE. */
732
733 static tree
734 negate_expr (tree t)
735 {
736 tree type, tem;
737 location_t loc;
738
739 if (t == NULL_TREE)
740 return NULL_TREE;
741
742 loc = EXPR_LOCATION (t);
743 type = TREE_TYPE (t);
744 STRIP_SIGN_NOPS (t);
745
746 tem = fold_negate_expr (loc, t);
747 if (!tem)
748 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
749 return fold_convert_loc (loc, type, tem);
750 }
751 \f
752 /* Split a tree IN into a constant, literal and variable parts that could be
753 combined with CODE to make IN. "constant" means an expression with
754 TREE_CONSTANT but that isn't an actual constant. CODE must be a
755 commutative arithmetic operation. Store the constant part into *CONP,
756 the literal in *LITP and return the variable part. If a part isn't
757 present, set it to null. If the tree does not decompose in this way,
758 return the entire tree as the variable part and the other parts as null.
759
760 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
761 case, we negate an operand that was subtracted. Except if it is a
762 literal for which we use *MINUS_LITP instead.
763
764 If NEGATE_P is true, we are negating all of IN, again except a literal
765 for which we use *MINUS_LITP instead. If a variable part is of pointer
766 type, it is negated after converting to TYPE. This prevents us from
767 generating illegal MINUS pointer expression. LOC is the location of
768 the converted variable part.
769
770 If IN is itself a literal or constant, return it as appropriate.
771
772 Note that we do not guarantee that any of the three values will be the
773 same type as IN, but they will have the same signedness and mode. */
774
775 static tree
776 split_tree (tree in, tree type, enum tree_code code,
777 tree *minus_varp, tree *conp, tree *minus_conp,
778 tree *litp, tree *minus_litp, int negate_p)
779 {
780 tree var = 0;
781 *minus_varp = 0;
782 *conp = 0;
783 *minus_conp = 0;
784 *litp = 0;
785 *minus_litp = 0;
786
787 /* Strip any conversions that don't change the machine mode or signedness. */
788 STRIP_SIGN_NOPS (in);
789
790 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
791 || TREE_CODE (in) == FIXED_CST)
792 *litp = in;
793 else if (TREE_CODE (in) == code
794 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
795 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
796 /* We can associate addition and subtraction together (even
797 though the C standard doesn't say so) for integers because
798 the value is not affected. For reals, the value might be
799 affected, so we can't. */
800 && ((code == PLUS_EXPR && TREE_CODE (in) == POINTER_PLUS_EXPR)
801 || (code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
802 || (code == MINUS_EXPR
803 && (TREE_CODE (in) == PLUS_EXPR
804 || TREE_CODE (in) == POINTER_PLUS_EXPR)))))
805 {
806 tree op0 = TREE_OPERAND (in, 0);
807 tree op1 = TREE_OPERAND (in, 1);
808 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
809 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
810
811 /* First see if either of the operands is a literal, then a constant. */
812 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
813 || TREE_CODE (op0) == FIXED_CST)
814 *litp = op0, op0 = 0;
815 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
816 || TREE_CODE (op1) == FIXED_CST)
817 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
818
819 if (op0 != 0 && TREE_CONSTANT (op0))
820 *conp = op0, op0 = 0;
821 else if (op1 != 0 && TREE_CONSTANT (op1))
822 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
823
824 /* If we haven't dealt with either operand, this is not a case we can
825 decompose. Otherwise, VAR is either of the ones remaining, if any. */
826 if (op0 != 0 && op1 != 0)
827 var = in;
828 else if (op0 != 0)
829 var = op0;
830 else
831 var = op1, neg_var_p = neg1_p;
832
833 /* Now do any needed negations. */
834 if (neg_litp_p)
835 *minus_litp = *litp, *litp = 0;
836 if (neg_conp_p && *conp)
837 *minus_conp = *conp, *conp = 0;
838 if (neg_var_p && var)
839 *minus_varp = var, var = 0;
840 }
841 else if (TREE_CONSTANT (in))
842 *conp = in;
843 else if (TREE_CODE (in) == BIT_NOT_EXPR
844 && code == PLUS_EXPR)
845 {
846 /* -1 - X is folded to ~X, undo that here. Do _not_ do this
847 when IN is constant. */
848 *litp = build_minus_one_cst (type);
849 *minus_varp = TREE_OPERAND (in, 0);
850 }
851 else
852 var = in;
853
854 if (negate_p)
855 {
856 if (*litp)
857 *minus_litp = *litp, *litp = 0;
858 else if (*minus_litp)
859 *litp = *minus_litp, *minus_litp = 0;
860 if (*conp)
861 *minus_conp = *conp, *conp = 0;
862 else if (*minus_conp)
863 *conp = *minus_conp, *minus_conp = 0;
864 if (var)
865 *minus_varp = var, var = 0;
866 else if (*minus_varp)
867 var = *minus_varp, *minus_varp = 0;
868 }
869
870 if (*litp
871 && TREE_OVERFLOW_P (*litp))
872 *litp = drop_tree_overflow (*litp);
873 if (*minus_litp
874 && TREE_OVERFLOW_P (*minus_litp))
875 *minus_litp = drop_tree_overflow (*minus_litp);
876
877 return var;
878 }
879
880 /* Re-associate trees split by the above function. T1 and T2 are
881 either expressions to associate or null. Return the new
882 expression, if any. LOC is the location of the new expression. If
883 we build an operation, do it in TYPE and with CODE. */
884
885 static tree
886 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
887 {
888 if (t1 == 0)
889 {
890 gcc_assert (t2 == 0 || code != MINUS_EXPR);
891 return t2;
892 }
893 else if (t2 == 0)
894 return t1;
895
896 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
897 try to fold this since we will have infinite recursion. But do
898 deal with any NEGATE_EXPRs. */
899 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
900 || TREE_CODE (t1) == PLUS_EXPR || TREE_CODE (t2) == PLUS_EXPR
901 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
902 {
903 if (code == PLUS_EXPR)
904 {
905 if (TREE_CODE (t1) == NEGATE_EXPR)
906 return build2_loc (loc, MINUS_EXPR, type,
907 fold_convert_loc (loc, type, t2),
908 fold_convert_loc (loc, type,
909 TREE_OPERAND (t1, 0)));
910 else if (TREE_CODE (t2) == NEGATE_EXPR)
911 return build2_loc (loc, MINUS_EXPR, type,
912 fold_convert_loc (loc, type, t1),
913 fold_convert_loc (loc, type,
914 TREE_OPERAND (t2, 0)));
915 else if (integer_zerop (t2))
916 return fold_convert_loc (loc, type, t1);
917 }
918 else if (code == MINUS_EXPR)
919 {
920 if (integer_zerop (t2))
921 return fold_convert_loc (loc, type, t1);
922 }
923
924 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
925 fold_convert_loc (loc, type, t2));
926 }
927
928 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
929 fold_convert_loc (loc, type, t2));
930 }
931 \f
932 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
933 for use in int_const_binop, size_binop and size_diffop. */
934
935 static bool
936 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
937 {
938 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
939 return false;
940 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
941 return false;
942
943 switch (code)
944 {
945 case LSHIFT_EXPR:
946 case RSHIFT_EXPR:
947 case LROTATE_EXPR:
948 case RROTATE_EXPR:
949 return true;
950
951 default:
952 break;
953 }
954
955 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
956 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
957 && TYPE_MODE (type1) == TYPE_MODE (type2);
958 }
959
960
961 /* Combine two integer constants ARG1 and ARG2 under operation CODE
962 to produce a new constant. Return NULL_TREE if we don't know how
963 to evaluate CODE at compile-time. */
964
965 static tree
966 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree parg2,
967 int overflowable)
968 {
969 wide_int res;
970 tree t;
971 tree type = TREE_TYPE (arg1);
972 signop sign = TYPE_SIGN (type);
973 bool overflow = false;
974
975 wide_int arg2 = wi::to_wide (parg2, TYPE_PRECISION (type));
976
977 switch (code)
978 {
979 case BIT_IOR_EXPR:
980 res = wi::bit_or (arg1, arg2);
981 break;
982
983 case BIT_XOR_EXPR:
984 res = wi::bit_xor (arg1, arg2);
985 break;
986
987 case BIT_AND_EXPR:
988 res = wi::bit_and (arg1, arg2);
989 break;
990
991 case RSHIFT_EXPR:
992 case LSHIFT_EXPR:
993 if (wi::neg_p (arg2))
994 {
995 arg2 = -arg2;
996 if (code == RSHIFT_EXPR)
997 code = LSHIFT_EXPR;
998 else
999 code = RSHIFT_EXPR;
1000 }
1001
1002 if (code == RSHIFT_EXPR)
1003 /* It's unclear from the C standard whether shifts can overflow.
1004 The following code ignores overflow; perhaps a C standard
1005 interpretation ruling is needed. */
1006 res = wi::rshift (arg1, arg2, sign);
1007 else
1008 res = wi::lshift (arg1, arg2);
1009 break;
1010
1011 case RROTATE_EXPR:
1012 case LROTATE_EXPR:
1013 if (wi::neg_p (arg2))
1014 {
1015 arg2 = -arg2;
1016 if (code == RROTATE_EXPR)
1017 code = LROTATE_EXPR;
1018 else
1019 code = RROTATE_EXPR;
1020 }
1021
1022 if (code == RROTATE_EXPR)
1023 res = wi::rrotate (arg1, arg2);
1024 else
1025 res = wi::lrotate (arg1, arg2);
1026 break;
1027
1028 case PLUS_EXPR:
1029 res = wi::add (arg1, arg2, sign, &overflow);
1030 break;
1031
1032 case MINUS_EXPR:
1033 res = wi::sub (arg1, arg2, sign, &overflow);
1034 break;
1035
1036 case MULT_EXPR:
1037 res = wi::mul (arg1, arg2, sign, &overflow);
1038 break;
1039
1040 case MULT_HIGHPART_EXPR:
1041 res = wi::mul_high (arg1, arg2, sign);
1042 break;
1043
1044 case TRUNC_DIV_EXPR:
1045 case EXACT_DIV_EXPR:
1046 if (arg2 == 0)
1047 return NULL_TREE;
1048 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1049 break;
1050
1051 case FLOOR_DIV_EXPR:
1052 if (arg2 == 0)
1053 return NULL_TREE;
1054 res = wi::div_floor (arg1, arg2, sign, &overflow);
1055 break;
1056
1057 case CEIL_DIV_EXPR:
1058 if (arg2 == 0)
1059 return NULL_TREE;
1060 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1061 break;
1062
1063 case ROUND_DIV_EXPR:
1064 if (arg2 == 0)
1065 return NULL_TREE;
1066 res = wi::div_round (arg1, arg2, sign, &overflow);
1067 break;
1068
1069 case TRUNC_MOD_EXPR:
1070 if (arg2 == 0)
1071 return NULL_TREE;
1072 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1073 break;
1074
1075 case FLOOR_MOD_EXPR:
1076 if (arg2 == 0)
1077 return NULL_TREE;
1078 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1079 break;
1080
1081 case CEIL_MOD_EXPR:
1082 if (arg2 == 0)
1083 return NULL_TREE;
1084 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1085 break;
1086
1087 case ROUND_MOD_EXPR:
1088 if (arg2 == 0)
1089 return NULL_TREE;
1090 res = wi::mod_round (arg1, arg2, sign, &overflow);
1091 break;
1092
1093 case MIN_EXPR:
1094 res = wi::min (arg1, arg2, sign);
1095 break;
1096
1097 case MAX_EXPR:
1098 res = wi::max (arg1, arg2, sign);
1099 break;
1100
1101 default:
1102 return NULL_TREE;
1103 }
1104
1105 t = force_fit_type (type, res, overflowable,
1106 (((sign == SIGNED || overflowable == -1)
1107 && overflow)
1108 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (parg2)));
1109
1110 return t;
1111 }
1112
1113 tree
1114 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1115 {
1116 return int_const_binop_1 (code, arg1, arg2, 1);
1117 }
1118
1119 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1120 constant. We assume ARG1 and ARG2 have the same data type, or at least
1121 are the same kind of constant and the same machine mode. Return zero if
1122 combining the constants is not allowed in the current operating mode. */
1123
1124 static tree
1125 const_binop (enum tree_code code, tree arg1, tree arg2)
1126 {
1127 /* Sanity check for the recursive cases. */
1128 if (!arg1 || !arg2)
1129 return NULL_TREE;
1130
1131 STRIP_NOPS (arg1);
1132 STRIP_NOPS (arg2);
1133
1134 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1135 {
1136 if (code == POINTER_PLUS_EXPR)
1137 return int_const_binop (PLUS_EXPR,
1138 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1139
1140 return int_const_binop (code, arg1, arg2);
1141 }
1142
1143 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1144 {
1145 machine_mode mode;
1146 REAL_VALUE_TYPE d1;
1147 REAL_VALUE_TYPE d2;
1148 REAL_VALUE_TYPE value;
1149 REAL_VALUE_TYPE result;
1150 bool inexact;
1151 tree t, type;
1152
1153 /* The following codes are handled by real_arithmetic. */
1154 switch (code)
1155 {
1156 case PLUS_EXPR:
1157 case MINUS_EXPR:
1158 case MULT_EXPR:
1159 case RDIV_EXPR:
1160 case MIN_EXPR:
1161 case MAX_EXPR:
1162 break;
1163
1164 default:
1165 return NULL_TREE;
1166 }
1167
1168 d1 = TREE_REAL_CST (arg1);
1169 d2 = TREE_REAL_CST (arg2);
1170
1171 type = TREE_TYPE (arg1);
1172 mode = TYPE_MODE (type);
1173
1174 /* Don't perform operation if we honor signaling NaNs and
1175 either operand is a signaling NaN. */
1176 if (HONOR_SNANS (mode)
1177 && (REAL_VALUE_ISSIGNALING_NAN (d1)
1178 || REAL_VALUE_ISSIGNALING_NAN (d2)))
1179 return NULL_TREE;
1180
1181 /* Don't perform operation if it would raise a division
1182 by zero exception. */
1183 if (code == RDIV_EXPR
1184 && real_equal (&d2, &dconst0)
1185 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1186 return NULL_TREE;
1187
1188 /* If either operand is a NaN, just return it. Otherwise, set up
1189 for floating-point trap; we return an overflow. */
1190 if (REAL_VALUE_ISNAN (d1))
1191 {
1192 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1193 is off. */
1194 d1.signalling = 0;
1195 t = build_real (type, d1);
1196 return t;
1197 }
1198 else if (REAL_VALUE_ISNAN (d2))
1199 {
1200 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1201 is off. */
1202 d2.signalling = 0;
1203 t = build_real (type, d2);
1204 return t;
1205 }
1206
1207 inexact = real_arithmetic (&value, code, &d1, &d2);
1208 real_convert (&result, mode, &value);
1209
1210 /* Don't constant fold this floating point operation if
1211 the result has overflowed and flag_trapping_math. */
1212 if (flag_trapping_math
1213 && MODE_HAS_INFINITIES (mode)
1214 && REAL_VALUE_ISINF (result)
1215 && !REAL_VALUE_ISINF (d1)
1216 && !REAL_VALUE_ISINF (d2))
1217 return NULL_TREE;
1218
1219 /* Don't constant fold this floating point operation if the
1220 result may dependent upon the run-time rounding mode and
1221 flag_rounding_math is set, or if GCC's software emulation
1222 is unable to accurately represent the result. */
1223 if ((flag_rounding_math
1224 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1225 && (inexact || !real_identical (&result, &value)))
1226 return NULL_TREE;
1227
1228 t = build_real (type, result);
1229
1230 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1231 return t;
1232 }
1233
1234 if (TREE_CODE (arg1) == FIXED_CST)
1235 {
1236 FIXED_VALUE_TYPE f1;
1237 FIXED_VALUE_TYPE f2;
1238 FIXED_VALUE_TYPE result;
1239 tree t, type;
1240 int sat_p;
1241 bool overflow_p;
1242
1243 /* The following codes are handled by fixed_arithmetic. */
1244 switch (code)
1245 {
1246 case PLUS_EXPR:
1247 case MINUS_EXPR:
1248 case MULT_EXPR:
1249 case TRUNC_DIV_EXPR:
1250 if (TREE_CODE (arg2) != FIXED_CST)
1251 return NULL_TREE;
1252 f2 = TREE_FIXED_CST (arg2);
1253 break;
1254
1255 case LSHIFT_EXPR:
1256 case RSHIFT_EXPR:
1257 {
1258 if (TREE_CODE (arg2) != INTEGER_CST)
1259 return NULL_TREE;
1260 wide_int w2 = arg2;
1261 f2.data.high = w2.elt (1);
1262 f2.data.low = w2.ulow ();
1263 f2.mode = SImode;
1264 }
1265 break;
1266
1267 default:
1268 return NULL_TREE;
1269 }
1270
1271 f1 = TREE_FIXED_CST (arg1);
1272 type = TREE_TYPE (arg1);
1273 sat_p = TYPE_SATURATING (type);
1274 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1275 t = build_fixed (type, result);
1276 /* Propagate overflow flags. */
1277 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1278 TREE_OVERFLOW (t) = 1;
1279 return t;
1280 }
1281
1282 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1283 {
1284 tree type = TREE_TYPE (arg1);
1285 tree r1 = TREE_REALPART (arg1);
1286 tree i1 = TREE_IMAGPART (arg1);
1287 tree r2 = TREE_REALPART (arg2);
1288 tree i2 = TREE_IMAGPART (arg2);
1289 tree real, imag;
1290
1291 switch (code)
1292 {
1293 case PLUS_EXPR:
1294 case MINUS_EXPR:
1295 real = const_binop (code, r1, r2);
1296 imag = const_binop (code, i1, i2);
1297 break;
1298
1299 case MULT_EXPR:
1300 if (COMPLEX_FLOAT_TYPE_P (type))
1301 return do_mpc_arg2 (arg1, arg2, type,
1302 /* do_nonfinite= */ folding_initializer,
1303 mpc_mul);
1304
1305 real = const_binop (MINUS_EXPR,
1306 const_binop (MULT_EXPR, r1, r2),
1307 const_binop (MULT_EXPR, i1, i2));
1308 imag = const_binop (PLUS_EXPR,
1309 const_binop (MULT_EXPR, r1, i2),
1310 const_binop (MULT_EXPR, i1, r2));
1311 break;
1312
1313 case RDIV_EXPR:
1314 if (COMPLEX_FLOAT_TYPE_P (type))
1315 return do_mpc_arg2 (arg1, arg2, type,
1316 /* do_nonfinite= */ folding_initializer,
1317 mpc_div);
1318 /* Fallthru. */
1319 case TRUNC_DIV_EXPR:
1320 case CEIL_DIV_EXPR:
1321 case FLOOR_DIV_EXPR:
1322 case ROUND_DIV_EXPR:
1323 if (flag_complex_method == 0)
1324 {
1325 /* Keep this algorithm in sync with
1326 tree-complex.c:expand_complex_div_straight().
1327
1328 Expand complex division to scalars, straightforward algorithm.
1329 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1330 t = br*br + bi*bi
1331 */
1332 tree magsquared
1333 = const_binop (PLUS_EXPR,
1334 const_binop (MULT_EXPR, r2, r2),
1335 const_binop (MULT_EXPR, i2, i2));
1336 tree t1
1337 = const_binop (PLUS_EXPR,
1338 const_binop (MULT_EXPR, r1, r2),
1339 const_binop (MULT_EXPR, i1, i2));
1340 tree t2
1341 = const_binop (MINUS_EXPR,
1342 const_binop (MULT_EXPR, i1, r2),
1343 const_binop (MULT_EXPR, r1, i2));
1344
1345 real = const_binop (code, t1, magsquared);
1346 imag = const_binop (code, t2, magsquared);
1347 }
1348 else
1349 {
1350 /* Keep this algorithm in sync with
1351 tree-complex.c:expand_complex_div_wide().
1352
1353 Expand complex division to scalars, modified algorithm to minimize
1354 overflow with wide input ranges. */
1355 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1356 fold_abs_const (r2, TREE_TYPE (type)),
1357 fold_abs_const (i2, TREE_TYPE (type)));
1358
1359 if (integer_nonzerop (compare))
1360 {
1361 /* In the TRUE branch, we compute
1362 ratio = br/bi;
1363 div = (br * ratio) + bi;
1364 tr = (ar * ratio) + ai;
1365 ti = (ai * ratio) - ar;
1366 tr = tr / div;
1367 ti = ti / div; */
1368 tree ratio = const_binop (code, r2, i2);
1369 tree div = const_binop (PLUS_EXPR, i2,
1370 const_binop (MULT_EXPR, r2, ratio));
1371 real = const_binop (MULT_EXPR, r1, ratio);
1372 real = const_binop (PLUS_EXPR, real, i1);
1373 real = const_binop (code, real, div);
1374
1375 imag = const_binop (MULT_EXPR, i1, ratio);
1376 imag = const_binop (MINUS_EXPR, imag, r1);
1377 imag = const_binop (code, imag, div);
1378 }
1379 else
1380 {
1381 /* In the FALSE branch, we compute
1382 ratio = d/c;
1383 divisor = (d * ratio) + c;
1384 tr = (b * ratio) + a;
1385 ti = b - (a * ratio);
1386 tr = tr / div;
1387 ti = ti / div; */
1388 tree ratio = const_binop (code, i2, r2);
1389 tree div = const_binop (PLUS_EXPR, r2,
1390 const_binop (MULT_EXPR, i2, ratio));
1391
1392 real = const_binop (MULT_EXPR, i1, ratio);
1393 real = const_binop (PLUS_EXPR, real, r1);
1394 real = const_binop (code, real, div);
1395
1396 imag = const_binop (MULT_EXPR, r1, ratio);
1397 imag = const_binop (MINUS_EXPR, i1, imag);
1398 imag = const_binop (code, imag, div);
1399 }
1400 }
1401 break;
1402
1403 default:
1404 return NULL_TREE;
1405 }
1406
1407 if (real && imag)
1408 return build_complex (type, real, imag);
1409 }
1410
1411 if (TREE_CODE (arg1) == VECTOR_CST
1412 && TREE_CODE (arg2) == VECTOR_CST)
1413 {
1414 tree type = TREE_TYPE (arg1);
1415 int count = TYPE_VECTOR_SUBPARTS (type), i;
1416 tree *elts = XALLOCAVEC (tree, count);
1417
1418 for (i = 0; i < count; i++)
1419 {
1420 tree elem1 = VECTOR_CST_ELT (arg1, i);
1421 tree elem2 = VECTOR_CST_ELT (arg2, i);
1422
1423 elts[i] = const_binop (code, elem1, elem2);
1424
1425 /* It is possible that const_binop cannot handle the given
1426 code and return NULL_TREE */
1427 if (elts[i] == NULL_TREE)
1428 return NULL_TREE;
1429 }
1430
1431 return build_vector (type, elts);
1432 }
1433
1434 /* Shifts allow a scalar offset for a vector. */
1435 if (TREE_CODE (arg1) == VECTOR_CST
1436 && TREE_CODE (arg2) == INTEGER_CST)
1437 {
1438 tree type = TREE_TYPE (arg1);
1439 int count = TYPE_VECTOR_SUBPARTS (type), i;
1440 tree *elts = XALLOCAVEC (tree, count);
1441
1442 for (i = 0; i < count; i++)
1443 {
1444 tree elem1 = VECTOR_CST_ELT (arg1, i);
1445
1446 elts[i] = const_binop (code, elem1, arg2);
1447
1448 /* It is possible that const_binop cannot handle the given
1449 code and return NULL_TREE. */
1450 if (elts[i] == NULL_TREE)
1451 return NULL_TREE;
1452 }
1453
1454 return build_vector (type, elts);
1455 }
1456 return NULL_TREE;
1457 }
1458
1459 /* Overload that adds a TYPE parameter to be able to dispatch
1460 to fold_relational_const. */
1461
1462 tree
1463 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1464 {
1465 if (TREE_CODE_CLASS (code) == tcc_comparison)
1466 return fold_relational_const (code, type, arg1, arg2);
1467
1468 /* ??? Until we make the const_binop worker take the type of the
1469 result as argument put those cases that need it here. */
1470 switch (code)
1471 {
1472 case COMPLEX_EXPR:
1473 if ((TREE_CODE (arg1) == REAL_CST
1474 && TREE_CODE (arg2) == REAL_CST)
1475 || (TREE_CODE (arg1) == INTEGER_CST
1476 && TREE_CODE (arg2) == INTEGER_CST))
1477 return build_complex (type, arg1, arg2);
1478 return NULL_TREE;
1479
1480 case VEC_PACK_TRUNC_EXPR:
1481 case VEC_PACK_FIX_TRUNC_EXPR:
1482 {
1483 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1484 tree *elts;
1485
1486 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2
1487 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts / 2);
1488 if (TREE_CODE (arg1) != VECTOR_CST
1489 || TREE_CODE (arg2) != VECTOR_CST)
1490 return NULL_TREE;
1491
1492 elts = XALLOCAVEC (tree, nelts);
1493 if (!vec_cst_ctor_to_array (arg1, elts)
1494 || !vec_cst_ctor_to_array (arg2, elts + nelts / 2))
1495 return NULL_TREE;
1496
1497 for (i = 0; i < nelts; i++)
1498 {
1499 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1500 ? NOP_EXPR : FIX_TRUNC_EXPR,
1501 TREE_TYPE (type), elts[i]);
1502 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1503 return NULL_TREE;
1504 }
1505
1506 return build_vector (type, elts);
1507 }
1508
1509 case VEC_WIDEN_MULT_LO_EXPR:
1510 case VEC_WIDEN_MULT_HI_EXPR:
1511 case VEC_WIDEN_MULT_EVEN_EXPR:
1512 case VEC_WIDEN_MULT_ODD_EXPR:
1513 {
1514 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
1515 unsigned int out, ofs, scale;
1516 tree *elts;
1517
1518 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2
1519 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts * 2);
1520 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1521 return NULL_TREE;
1522
1523 elts = XALLOCAVEC (tree, nelts * 4);
1524 if (!vec_cst_ctor_to_array (arg1, elts)
1525 || !vec_cst_ctor_to_array (arg2, elts + nelts * 2))
1526 return NULL_TREE;
1527
1528 if (code == VEC_WIDEN_MULT_LO_EXPR)
1529 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
1530 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1531 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
1532 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1533 scale = 1, ofs = 0;
1534 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1535 scale = 1, ofs = 1;
1536
1537 for (out = 0; out < nelts; out++)
1538 {
1539 unsigned int in1 = (out << scale) + ofs;
1540 unsigned int in2 = in1 + nelts * 2;
1541 tree t1, t2;
1542
1543 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
1544 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
1545
1546 if (t1 == NULL_TREE || t2 == NULL_TREE)
1547 return NULL_TREE;
1548 elts[out] = const_binop (MULT_EXPR, t1, t2);
1549 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
1550 return NULL_TREE;
1551 }
1552
1553 return build_vector (type, elts);
1554 }
1555
1556 default:;
1557 }
1558
1559 if (TREE_CODE_CLASS (code) != tcc_binary)
1560 return NULL_TREE;
1561
1562 /* Make sure type and arg0 have the same saturating flag. */
1563 gcc_checking_assert (TYPE_SATURATING (type)
1564 == TYPE_SATURATING (TREE_TYPE (arg1)));
1565
1566 return const_binop (code, arg1, arg2);
1567 }
1568
1569 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1570 Return zero if computing the constants is not possible. */
1571
1572 tree
1573 const_unop (enum tree_code code, tree type, tree arg0)
1574 {
1575 /* Don't perform the operation, other than NEGATE and ABS, if
1576 flag_signaling_nans is on and the operand is a signaling NaN. */
1577 if (TREE_CODE (arg0) == REAL_CST
1578 && HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
1579 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0))
1580 && code != NEGATE_EXPR
1581 && code != ABS_EXPR)
1582 return NULL_TREE;
1583
1584 switch (code)
1585 {
1586 CASE_CONVERT:
1587 case FLOAT_EXPR:
1588 case FIX_TRUNC_EXPR:
1589 case FIXED_CONVERT_EXPR:
1590 return fold_convert_const (code, type, arg0);
1591
1592 case ADDR_SPACE_CONVERT_EXPR:
1593 /* If the source address is 0, and the source address space
1594 cannot have a valid object at 0, fold to dest type null. */
1595 if (integer_zerop (arg0)
1596 && !(targetm.addr_space.zero_address_valid
1597 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))))))
1598 return fold_convert_const (code, type, arg0);
1599 break;
1600
1601 case VIEW_CONVERT_EXPR:
1602 return fold_view_convert_expr (type, arg0);
1603
1604 case NEGATE_EXPR:
1605 {
1606 /* Can't call fold_negate_const directly here as that doesn't
1607 handle all cases and we might not be able to negate some
1608 constants. */
1609 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1610 if (tem && CONSTANT_CLASS_P (tem))
1611 return tem;
1612 break;
1613 }
1614
1615 case ABS_EXPR:
1616 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1617 return fold_abs_const (arg0, type);
1618 break;
1619
1620 case CONJ_EXPR:
1621 if (TREE_CODE (arg0) == COMPLEX_CST)
1622 {
1623 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1624 TREE_TYPE (type));
1625 return build_complex (type, TREE_REALPART (arg0), ipart);
1626 }
1627 break;
1628
1629 case BIT_NOT_EXPR:
1630 if (TREE_CODE (arg0) == INTEGER_CST)
1631 return fold_not_const (arg0, type);
1632 /* Perform BIT_NOT_EXPR on each element individually. */
1633 else if (TREE_CODE (arg0) == VECTOR_CST)
1634 {
1635 tree *elements;
1636 tree elem;
1637 unsigned count = VECTOR_CST_NELTS (arg0), i;
1638
1639 elements = XALLOCAVEC (tree, count);
1640 for (i = 0; i < count; i++)
1641 {
1642 elem = VECTOR_CST_ELT (arg0, i);
1643 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1644 if (elem == NULL_TREE)
1645 break;
1646 elements[i] = elem;
1647 }
1648 if (i == count)
1649 return build_vector (type, elements);
1650 }
1651 break;
1652
1653 case TRUTH_NOT_EXPR:
1654 if (TREE_CODE (arg0) == INTEGER_CST)
1655 return constant_boolean_node (integer_zerop (arg0), type);
1656 break;
1657
1658 case REALPART_EXPR:
1659 if (TREE_CODE (arg0) == COMPLEX_CST)
1660 return fold_convert (type, TREE_REALPART (arg0));
1661 break;
1662
1663 case IMAGPART_EXPR:
1664 if (TREE_CODE (arg0) == COMPLEX_CST)
1665 return fold_convert (type, TREE_IMAGPART (arg0));
1666 break;
1667
1668 case VEC_UNPACK_LO_EXPR:
1669 case VEC_UNPACK_HI_EXPR:
1670 case VEC_UNPACK_FLOAT_LO_EXPR:
1671 case VEC_UNPACK_FLOAT_HI_EXPR:
1672 {
1673 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1674 tree *elts;
1675 enum tree_code subcode;
1676
1677 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
1678 if (TREE_CODE (arg0) != VECTOR_CST)
1679 return NULL_TREE;
1680
1681 elts = XALLOCAVEC (tree, nelts * 2);
1682 if (!vec_cst_ctor_to_array (arg0, elts))
1683 return NULL_TREE;
1684
1685 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1686 || code == VEC_UNPACK_FLOAT_LO_EXPR))
1687 elts += nelts;
1688
1689 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1690 subcode = NOP_EXPR;
1691 else
1692 subcode = FLOAT_EXPR;
1693
1694 for (i = 0; i < nelts; i++)
1695 {
1696 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
1697 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1698 return NULL_TREE;
1699 }
1700
1701 return build_vector (type, elts);
1702 }
1703
1704 case REDUC_MIN_EXPR:
1705 case REDUC_MAX_EXPR:
1706 case REDUC_PLUS_EXPR:
1707 {
1708 unsigned int nelts, i;
1709 tree *elts;
1710 enum tree_code subcode;
1711
1712 if (TREE_CODE (arg0) != VECTOR_CST)
1713 return NULL_TREE;
1714 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
1715
1716 elts = XALLOCAVEC (tree, nelts);
1717 if (!vec_cst_ctor_to_array (arg0, elts))
1718 return NULL_TREE;
1719
1720 switch (code)
1721 {
1722 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
1723 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
1724 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
1725 default: gcc_unreachable ();
1726 }
1727
1728 for (i = 1; i < nelts; i++)
1729 {
1730 elts[0] = const_binop (subcode, elts[0], elts[i]);
1731 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
1732 return NULL_TREE;
1733 }
1734
1735 return elts[0];
1736 }
1737
1738 default:
1739 break;
1740 }
1741
1742 return NULL_TREE;
1743 }
1744
1745 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1746 indicates which particular sizetype to create. */
1747
1748 tree
1749 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1750 {
1751 return build_int_cst (sizetype_tab[(int) kind], number);
1752 }
1753 \f
1754 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1755 is a tree code. The type of the result is taken from the operands.
1756 Both must be equivalent integer types, ala int_binop_types_match_p.
1757 If the operands are constant, so is the result. */
1758
1759 tree
1760 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1761 {
1762 tree type = TREE_TYPE (arg0);
1763
1764 if (arg0 == error_mark_node || arg1 == error_mark_node)
1765 return error_mark_node;
1766
1767 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1768 TREE_TYPE (arg1)));
1769
1770 /* Handle the special case of two integer constants faster. */
1771 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1772 {
1773 /* And some specific cases even faster than that. */
1774 if (code == PLUS_EXPR)
1775 {
1776 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1777 return arg1;
1778 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1779 return arg0;
1780 }
1781 else if (code == MINUS_EXPR)
1782 {
1783 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1784 return arg0;
1785 }
1786 else if (code == MULT_EXPR)
1787 {
1788 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1789 return arg1;
1790 }
1791
1792 /* Handle general case of two integer constants. For sizetype
1793 constant calculations we always want to know about overflow,
1794 even in the unsigned case. */
1795 return int_const_binop_1 (code, arg0, arg1, -1);
1796 }
1797
1798 return fold_build2_loc (loc, code, type, arg0, arg1);
1799 }
1800
1801 /* Given two values, either both of sizetype or both of bitsizetype,
1802 compute the difference between the two values. Return the value
1803 in signed type corresponding to the type of the operands. */
1804
1805 tree
1806 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1807 {
1808 tree type = TREE_TYPE (arg0);
1809 tree ctype;
1810
1811 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1812 TREE_TYPE (arg1)));
1813
1814 /* If the type is already signed, just do the simple thing. */
1815 if (!TYPE_UNSIGNED (type))
1816 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1817
1818 if (type == sizetype)
1819 ctype = ssizetype;
1820 else if (type == bitsizetype)
1821 ctype = sbitsizetype;
1822 else
1823 ctype = signed_type_for (type);
1824
1825 /* If either operand is not a constant, do the conversions to the signed
1826 type and subtract. The hardware will do the right thing with any
1827 overflow in the subtraction. */
1828 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1829 return size_binop_loc (loc, MINUS_EXPR,
1830 fold_convert_loc (loc, ctype, arg0),
1831 fold_convert_loc (loc, ctype, arg1));
1832
1833 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1834 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1835 overflow) and negate (which can't either). Special-case a result
1836 of zero while we're here. */
1837 if (tree_int_cst_equal (arg0, arg1))
1838 return build_int_cst (ctype, 0);
1839 else if (tree_int_cst_lt (arg1, arg0))
1840 return fold_convert_loc (loc, ctype,
1841 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1842 else
1843 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1844 fold_convert_loc (loc, ctype,
1845 size_binop_loc (loc,
1846 MINUS_EXPR,
1847 arg1, arg0)));
1848 }
1849 \f
1850 /* A subroutine of fold_convert_const handling conversions of an
1851 INTEGER_CST to another integer type. */
1852
1853 static tree
1854 fold_convert_const_int_from_int (tree type, const_tree arg1)
1855 {
1856 /* Given an integer constant, make new constant with new type,
1857 appropriately sign-extended or truncated. Use widest_int
1858 so that any extension is done according ARG1's type. */
1859 return force_fit_type (type, wi::to_widest (arg1),
1860 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1861 TREE_OVERFLOW (arg1));
1862 }
1863
1864 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1865 to an integer type. */
1866
1867 static tree
1868 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1869 {
1870 bool overflow = false;
1871 tree t;
1872
1873 /* The following code implements the floating point to integer
1874 conversion rules required by the Java Language Specification,
1875 that IEEE NaNs are mapped to zero and values that overflow
1876 the target precision saturate, i.e. values greater than
1877 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1878 are mapped to INT_MIN. These semantics are allowed by the
1879 C and C++ standards that simply state that the behavior of
1880 FP-to-integer conversion is unspecified upon overflow. */
1881
1882 wide_int val;
1883 REAL_VALUE_TYPE r;
1884 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1885
1886 switch (code)
1887 {
1888 case FIX_TRUNC_EXPR:
1889 real_trunc (&r, VOIDmode, &x);
1890 break;
1891
1892 default:
1893 gcc_unreachable ();
1894 }
1895
1896 /* If R is NaN, return zero and show we have an overflow. */
1897 if (REAL_VALUE_ISNAN (r))
1898 {
1899 overflow = true;
1900 val = wi::zero (TYPE_PRECISION (type));
1901 }
1902
1903 /* See if R is less than the lower bound or greater than the
1904 upper bound. */
1905
1906 if (! overflow)
1907 {
1908 tree lt = TYPE_MIN_VALUE (type);
1909 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1910 if (real_less (&r, &l))
1911 {
1912 overflow = true;
1913 val = lt;
1914 }
1915 }
1916
1917 if (! overflow)
1918 {
1919 tree ut = TYPE_MAX_VALUE (type);
1920 if (ut)
1921 {
1922 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1923 if (real_less (&u, &r))
1924 {
1925 overflow = true;
1926 val = ut;
1927 }
1928 }
1929 }
1930
1931 if (! overflow)
1932 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1933
1934 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1935 return t;
1936 }
1937
1938 /* A subroutine of fold_convert_const handling conversions of a
1939 FIXED_CST to an integer type. */
1940
1941 static tree
1942 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1943 {
1944 tree t;
1945 double_int temp, temp_trunc;
1946 unsigned int mode;
1947
1948 /* Right shift FIXED_CST to temp by fbit. */
1949 temp = TREE_FIXED_CST (arg1).data;
1950 mode = TREE_FIXED_CST (arg1).mode;
1951 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1952 {
1953 temp = temp.rshift (GET_MODE_FBIT (mode),
1954 HOST_BITS_PER_DOUBLE_INT,
1955 SIGNED_FIXED_POINT_MODE_P (mode));
1956
1957 /* Left shift temp to temp_trunc by fbit. */
1958 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1959 HOST_BITS_PER_DOUBLE_INT,
1960 SIGNED_FIXED_POINT_MODE_P (mode));
1961 }
1962 else
1963 {
1964 temp = double_int_zero;
1965 temp_trunc = double_int_zero;
1966 }
1967
1968 /* If FIXED_CST is negative, we need to round the value toward 0.
1969 By checking if the fractional bits are not zero to add 1 to temp. */
1970 if (SIGNED_FIXED_POINT_MODE_P (mode)
1971 && temp_trunc.is_negative ()
1972 && TREE_FIXED_CST (arg1).data != temp_trunc)
1973 temp += double_int_one;
1974
1975 /* Given a fixed-point constant, make new constant with new type,
1976 appropriately sign-extended or truncated. */
1977 t = force_fit_type (type, temp, -1,
1978 (temp.is_negative ()
1979 && (TYPE_UNSIGNED (type)
1980 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1981 | TREE_OVERFLOW (arg1));
1982
1983 return t;
1984 }
1985
1986 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1987 to another floating point type. */
1988
1989 static tree
1990 fold_convert_const_real_from_real (tree type, const_tree arg1)
1991 {
1992 REAL_VALUE_TYPE value;
1993 tree t;
1994
1995 /* Don't perform the operation if flag_signaling_nans is on
1996 and the operand is a signaling NaN. */
1997 if (HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
1998 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1)))
1999 return NULL_TREE;
2000
2001 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2002 t = build_real (type, value);
2003
2004 /* If converting an infinity or NAN to a representation that doesn't
2005 have one, set the overflow bit so that we can produce some kind of
2006 error message at the appropriate point if necessary. It's not the
2007 most user-friendly message, but it's better than nothing. */
2008 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2009 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2010 TREE_OVERFLOW (t) = 1;
2011 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2012 && !MODE_HAS_NANS (TYPE_MODE (type)))
2013 TREE_OVERFLOW (t) = 1;
2014 /* Regular overflow, conversion produced an infinity in a mode that
2015 can't represent them. */
2016 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2017 && REAL_VALUE_ISINF (value)
2018 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2019 TREE_OVERFLOW (t) = 1;
2020 else
2021 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2022 return t;
2023 }
2024
2025 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2026 to a floating point type. */
2027
2028 static tree
2029 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2030 {
2031 REAL_VALUE_TYPE value;
2032 tree t;
2033
2034 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2035 t = build_real (type, value);
2036
2037 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2038 return t;
2039 }
2040
2041 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2042 to another fixed-point type. */
2043
2044 static tree
2045 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2046 {
2047 FIXED_VALUE_TYPE value;
2048 tree t;
2049 bool overflow_p;
2050
2051 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2052 TYPE_SATURATING (type));
2053 t = build_fixed (type, value);
2054
2055 /* Propagate overflow flags. */
2056 if (overflow_p | TREE_OVERFLOW (arg1))
2057 TREE_OVERFLOW (t) = 1;
2058 return t;
2059 }
2060
2061 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2062 to a fixed-point type. */
2063
2064 static tree
2065 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2066 {
2067 FIXED_VALUE_TYPE value;
2068 tree t;
2069 bool overflow_p;
2070 double_int di;
2071
2072 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2073
2074 di.low = TREE_INT_CST_ELT (arg1, 0);
2075 if (TREE_INT_CST_NUNITS (arg1) == 1)
2076 di.high = (HOST_WIDE_INT) di.low < 0 ? HOST_WIDE_INT_M1 : 0;
2077 else
2078 di.high = TREE_INT_CST_ELT (arg1, 1);
2079
2080 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di,
2081 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2082 TYPE_SATURATING (type));
2083 t = build_fixed (type, value);
2084
2085 /* Propagate overflow flags. */
2086 if (overflow_p | TREE_OVERFLOW (arg1))
2087 TREE_OVERFLOW (t) = 1;
2088 return t;
2089 }
2090
2091 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2092 to a fixed-point type. */
2093
2094 static tree
2095 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2096 {
2097 FIXED_VALUE_TYPE value;
2098 tree t;
2099 bool overflow_p;
2100
2101 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2102 &TREE_REAL_CST (arg1),
2103 TYPE_SATURATING (type));
2104 t = build_fixed (type, value);
2105
2106 /* Propagate overflow flags. */
2107 if (overflow_p | TREE_OVERFLOW (arg1))
2108 TREE_OVERFLOW (t) = 1;
2109 return t;
2110 }
2111
2112 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2113 type TYPE. If no simplification can be done return NULL_TREE. */
2114
2115 static tree
2116 fold_convert_const (enum tree_code code, tree type, tree arg1)
2117 {
2118 if (TREE_TYPE (arg1) == type)
2119 return arg1;
2120
2121 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2122 || TREE_CODE (type) == OFFSET_TYPE)
2123 {
2124 if (TREE_CODE (arg1) == INTEGER_CST)
2125 return fold_convert_const_int_from_int (type, arg1);
2126 else if (TREE_CODE (arg1) == REAL_CST)
2127 return fold_convert_const_int_from_real (code, type, arg1);
2128 else if (TREE_CODE (arg1) == FIXED_CST)
2129 return fold_convert_const_int_from_fixed (type, arg1);
2130 }
2131 else if (TREE_CODE (type) == REAL_TYPE)
2132 {
2133 if (TREE_CODE (arg1) == INTEGER_CST)
2134 return build_real_from_int_cst (type, arg1);
2135 else if (TREE_CODE (arg1) == REAL_CST)
2136 return fold_convert_const_real_from_real (type, arg1);
2137 else if (TREE_CODE (arg1) == FIXED_CST)
2138 return fold_convert_const_real_from_fixed (type, arg1);
2139 }
2140 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2141 {
2142 if (TREE_CODE (arg1) == FIXED_CST)
2143 return fold_convert_const_fixed_from_fixed (type, arg1);
2144 else if (TREE_CODE (arg1) == INTEGER_CST)
2145 return fold_convert_const_fixed_from_int (type, arg1);
2146 else if (TREE_CODE (arg1) == REAL_CST)
2147 return fold_convert_const_fixed_from_real (type, arg1);
2148 }
2149 else if (TREE_CODE (type) == VECTOR_TYPE)
2150 {
2151 if (TREE_CODE (arg1) == VECTOR_CST
2152 && TYPE_VECTOR_SUBPARTS (type) == VECTOR_CST_NELTS (arg1))
2153 {
2154 int len = TYPE_VECTOR_SUBPARTS (type);
2155 tree elttype = TREE_TYPE (type);
2156 tree *v = XALLOCAVEC (tree, len);
2157 for (int i = 0; i < len; ++i)
2158 {
2159 tree elt = VECTOR_CST_ELT (arg1, i);
2160 tree cvt = fold_convert_const (code, elttype, elt);
2161 if (cvt == NULL_TREE)
2162 return NULL_TREE;
2163 v[i] = cvt;
2164 }
2165 return build_vector (type, v);
2166 }
2167 }
2168 return NULL_TREE;
2169 }
2170
2171 /* Construct a vector of zero elements of vector type TYPE. */
2172
2173 static tree
2174 build_zero_vector (tree type)
2175 {
2176 tree t;
2177
2178 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2179 return build_vector_from_val (type, t);
2180 }
2181
2182 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2183
2184 bool
2185 fold_convertible_p (const_tree type, const_tree arg)
2186 {
2187 tree orig = TREE_TYPE (arg);
2188
2189 if (type == orig)
2190 return true;
2191
2192 if (TREE_CODE (arg) == ERROR_MARK
2193 || TREE_CODE (type) == ERROR_MARK
2194 || TREE_CODE (orig) == ERROR_MARK)
2195 return false;
2196
2197 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2198 return true;
2199
2200 switch (TREE_CODE (type))
2201 {
2202 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2203 case POINTER_TYPE: case REFERENCE_TYPE:
2204 case OFFSET_TYPE:
2205 return (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2206 || TREE_CODE (orig) == OFFSET_TYPE);
2207
2208 case REAL_TYPE:
2209 case FIXED_POINT_TYPE:
2210 case VECTOR_TYPE:
2211 case VOID_TYPE:
2212 return TREE_CODE (type) == TREE_CODE (orig);
2213
2214 default:
2215 return false;
2216 }
2217 }
2218
2219 /* Convert expression ARG to type TYPE. Used by the middle-end for
2220 simple conversions in preference to calling the front-end's convert. */
2221
2222 tree
2223 fold_convert_loc (location_t loc, tree type, tree arg)
2224 {
2225 tree orig = TREE_TYPE (arg);
2226 tree tem;
2227
2228 if (type == orig)
2229 return arg;
2230
2231 if (TREE_CODE (arg) == ERROR_MARK
2232 || TREE_CODE (type) == ERROR_MARK
2233 || TREE_CODE (orig) == ERROR_MARK)
2234 return error_mark_node;
2235
2236 switch (TREE_CODE (type))
2237 {
2238 case POINTER_TYPE:
2239 case REFERENCE_TYPE:
2240 /* Handle conversions between pointers to different address spaces. */
2241 if (POINTER_TYPE_P (orig)
2242 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2243 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2244 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2245 /* fall through */
2246
2247 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2248 case OFFSET_TYPE:
2249 if (TREE_CODE (arg) == INTEGER_CST)
2250 {
2251 tem = fold_convert_const (NOP_EXPR, type, arg);
2252 if (tem != NULL_TREE)
2253 return tem;
2254 }
2255 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2256 || TREE_CODE (orig) == OFFSET_TYPE)
2257 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2258 if (TREE_CODE (orig) == COMPLEX_TYPE)
2259 return fold_convert_loc (loc, type,
2260 fold_build1_loc (loc, REALPART_EXPR,
2261 TREE_TYPE (orig), arg));
2262 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2263 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2264 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2265
2266 case REAL_TYPE:
2267 if (TREE_CODE (arg) == INTEGER_CST)
2268 {
2269 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2270 if (tem != NULL_TREE)
2271 return tem;
2272 }
2273 else if (TREE_CODE (arg) == REAL_CST)
2274 {
2275 tem = fold_convert_const (NOP_EXPR, type, arg);
2276 if (tem != NULL_TREE)
2277 return tem;
2278 }
2279 else if (TREE_CODE (arg) == FIXED_CST)
2280 {
2281 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2282 if (tem != NULL_TREE)
2283 return tem;
2284 }
2285
2286 switch (TREE_CODE (orig))
2287 {
2288 case INTEGER_TYPE:
2289 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2290 case POINTER_TYPE: case REFERENCE_TYPE:
2291 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2292
2293 case REAL_TYPE:
2294 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2295
2296 case FIXED_POINT_TYPE:
2297 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2298
2299 case COMPLEX_TYPE:
2300 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2301 return fold_convert_loc (loc, type, tem);
2302
2303 default:
2304 gcc_unreachable ();
2305 }
2306
2307 case FIXED_POINT_TYPE:
2308 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2309 || TREE_CODE (arg) == REAL_CST)
2310 {
2311 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2312 if (tem != NULL_TREE)
2313 goto fold_convert_exit;
2314 }
2315
2316 switch (TREE_CODE (orig))
2317 {
2318 case FIXED_POINT_TYPE:
2319 case INTEGER_TYPE:
2320 case ENUMERAL_TYPE:
2321 case BOOLEAN_TYPE:
2322 case REAL_TYPE:
2323 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2324
2325 case COMPLEX_TYPE:
2326 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2327 return fold_convert_loc (loc, type, tem);
2328
2329 default:
2330 gcc_unreachable ();
2331 }
2332
2333 case COMPLEX_TYPE:
2334 switch (TREE_CODE (orig))
2335 {
2336 case INTEGER_TYPE:
2337 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2338 case POINTER_TYPE: case REFERENCE_TYPE:
2339 case REAL_TYPE:
2340 case FIXED_POINT_TYPE:
2341 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2342 fold_convert_loc (loc, TREE_TYPE (type), arg),
2343 fold_convert_loc (loc, TREE_TYPE (type),
2344 integer_zero_node));
2345 case COMPLEX_TYPE:
2346 {
2347 tree rpart, ipart;
2348
2349 if (TREE_CODE (arg) == COMPLEX_EXPR)
2350 {
2351 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2352 TREE_OPERAND (arg, 0));
2353 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2354 TREE_OPERAND (arg, 1));
2355 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2356 }
2357
2358 arg = save_expr (arg);
2359 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2360 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2361 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2362 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2363 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2364 }
2365
2366 default:
2367 gcc_unreachable ();
2368 }
2369
2370 case VECTOR_TYPE:
2371 if (integer_zerop (arg))
2372 return build_zero_vector (type);
2373 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2374 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2375 || TREE_CODE (orig) == VECTOR_TYPE);
2376 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2377
2378 case VOID_TYPE:
2379 tem = fold_ignored_result (arg);
2380 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2381
2382 default:
2383 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2384 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2385 gcc_unreachable ();
2386 }
2387 fold_convert_exit:
2388 protected_set_expr_location_unshare (tem, loc);
2389 return tem;
2390 }
2391 \f
2392 /* Return false if expr can be assumed not to be an lvalue, true
2393 otherwise. */
2394
2395 static bool
2396 maybe_lvalue_p (const_tree x)
2397 {
2398 /* We only need to wrap lvalue tree codes. */
2399 switch (TREE_CODE (x))
2400 {
2401 case VAR_DECL:
2402 case PARM_DECL:
2403 case RESULT_DECL:
2404 case LABEL_DECL:
2405 case FUNCTION_DECL:
2406 case SSA_NAME:
2407
2408 case COMPONENT_REF:
2409 case MEM_REF:
2410 case INDIRECT_REF:
2411 case ARRAY_REF:
2412 case ARRAY_RANGE_REF:
2413 case BIT_FIELD_REF:
2414 case OBJ_TYPE_REF:
2415
2416 case REALPART_EXPR:
2417 case IMAGPART_EXPR:
2418 case PREINCREMENT_EXPR:
2419 case PREDECREMENT_EXPR:
2420 case SAVE_EXPR:
2421 case TRY_CATCH_EXPR:
2422 case WITH_CLEANUP_EXPR:
2423 case COMPOUND_EXPR:
2424 case MODIFY_EXPR:
2425 case TARGET_EXPR:
2426 case COND_EXPR:
2427 case BIND_EXPR:
2428 break;
2429
2430 default:
2431 /* Assume the worst for front-end tree codes. */
2432 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2433 break;
2434 return false;
2435 }
2436
2437 return true;
2438 }
2439
2440 /* Return an expr equal to X but certainly not valid as an lvalue. */
2441
2442 tree
2443 non_lvalue_loc (location_t loc, tree x)
2444 {
2445 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2446 us. */
2447 if (in_gimple_form)
2448 return x;
2449
2450 if (! maybe_lvalue_p (x))
2451 return x;
2452 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2453 }
2454
2455 /* When pedantic, return an expr equal to X but certainly not valid as a
2456 pedantic lvalue. Otherwise, return X. */
2457
2458 static tree
2459 pedantic_non_lvalue_loc (location_t loc, tree x)
2460 {
2461 return protected_set_expr_location_unshare (x, loc);
2462 }
2463 \f
2464 /* Given a tree comparison code, return the code that is the logical inverse.
2465 It is generally not safe to do this for floating-point comparisons, except
2466 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2467 ERROR_MARK in this case. */
2468
2469 enum tree_code
2470 invert_tree_comparison (enum tree_code code, bool honor_nans)
2471 {
2472 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2473 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2474 return ERROR_MARK;
2475
2476 switch (code)
2477 {
2478 case EQ_EXPR:
2479 return NE_EXPR;
2480 case NE_EXPR:
2481 return EQ_EXPR;
2482 case GT_EXPR:
2483 return honor_nans ? UNLE_EXPR : LE_EXPR;
2484 case GE_EXPR:
2485 return honor_nans ? UNLT_EXPR : LT_EXPR;
2486 case LT_EXPR:
2487 return honor_nans ? UNGE_EXPR : GE_EXPR;
2488 case LE_EXPR:
2489 return honor_nans ? UNGT_EXPR : GT_EXPR;
2490 case LTGT_EXPR:
2491 return UNEQ_EXPR;
2492 case UNEQ_EXPR:
2493 return LTGT_EXPR;
2494 case UNGT_EXPR:
2495 return LE_EXPR;
2496 case UNGE_EXPR:
2497 return LT_EXPR;
2498 case UNLT_EXPR:
2499 return GE_EXPR;
2500 case UNLE_EXPR:
2501 return GT_EXPR;
2502 case ORDERED_EXPR:
2503 return UNORDERED_EXPR;
2504 case UNORDERED_EXPR:
2505 return ORDERED_EXPR;
2506 default:
2507 gcc_unreachable ();
2508 }
2509 }
2510
2511 /* Similar, but return the comparison that results if the operands are
2512 swapped. This is safe for floating-point. */
2513
2514 enum tree_code
2515 swap_tree_comparison (enum tree_code code)
2516 {
2517 switch (code)
2518 {
2519 case EQ_EXPR:
2520 case NE_EXPR:
2521 case ORDERED_EXPR:
2522 case UNORDERED_EXPR:
2523 case LTGT_EXPR:
2524 case UNEQ_EXPR:
2525 return code;
2526 case GT_EXPR:
2527 return LT_EXPR;
2528 case GE_EXPR:
2529 return LE_EXPR;
2530 case LT_EXPR:
2531 return GT_EXPR;
2532 case LE_EXPR:
2533 return GE_EXPR;
2534 case UNGT_EXPR:
2535 return UNLT_EXPR;
2536 case UNGE_EXPR:
2537 return UNLE_EXPR;
2538 case UNLT_EXPR:
2539 return UNGT_EXPR;
2540 case UNLE_EXPR:
2541 return UNGE_EXPR;
2542 default:
2543 gcc_unreachable ();
2544 }
2545 }
2546
2547
2548 /* Convert a comparison tree code from an enum tree_code representation
2549 into a compcode bit-based encoding. This function is the inverse of
2550 compcode_to_comparison. */
2551
2552 static enum comparison_code
2553 comparison_to_compcode (enum tree_code code)
2554 {
2555 switch (code)
2556 {
2557 case LT_EXPR:
2558 return COMPCODE_LT;
2559 case EQ_EXPR:
2560 return COMPCODE_EQ;
2561 case LE_EXPR:
2562 return COMPCODE_LE;
2563 case GT_EXPR:
2564 return COMPCODE_GT;
2565 case NE_EXPR:
2566 return COMPCODE_NE;
2567 case GE_EXPR:
2568 return COMPCODE_GE;
2569 case ORDERED_EXPR:
2570 return COMPCODE_ORD;
2571 case UNORDERED_EXPR:
2572 return COMPCODE_UNORD;
2573 case UNLT_EXPR:
2574 return COMPCODE_UNLT;
2575 case UNEQ_EXPR:
2576 return COMPCODE_UNEQ;
2577 case UNLE_EXPR:
2578 return COMPCODE_UNLE;
2579 case UNGT_EXPR:
2580 return COMPCODE_UNGT;
2581 case LTGT_EXPR:
2582 return COMPCODE_LTGT;
2583 case UNGE_EXPR:
2584 return COMPCODE_UNGE;
2585 default:
2586 gcc_unreachable ();
2587 }
2588 }
2589
2590 /* Convert a compcode bit-based encoding of a comparison operator back
2591 to GCC's enum tree_code representation. This function is the
2592 inverse of comparison_to_compcode. */
2593
2594 static enum tree_code
2595 compcode_to_comparison (enum comparison_code code)
2596 {
2597 switch (code)
2598 {
2599 case COMPCODE_LT:
2600 return LT_EXPR;
2601 case COMPCODE_EQ:
2602 return EQ_EXPR;
2603 case COMPCODE_LE:
2604 return LE_EXPR;
2605 case COMPCODE_GT:
2606 return GT_EXPR;
2607 case COMPCODE_NE:
2608 return NE_EXPR;
2609 case COMPCODE_GE:
2610 return GE_EXPR;
2611 case COMPCODE_ORD:
2612 return ORDERED_EXPR;
2613 case COMPCODE_UNORD:
2614 return UNORDERED_EXPR;
2615 case COMPCODE_UNLT:
2616 return UNLT_EXPR;
2617 case COMPCODE_UNEQ:
2618 return UNEQ_EXPR;
2619 case COMPCODE_UNLE:
2620 return UNLE_EXPR;
2621 case COMPCODE_UNGT:
2622 return UNGT_EXPR;
2623 case COMPCODE_LTGT:
2624 return LTGT_EXPR;
2625 case COMPCODE_UNGE:
2626 return UNGE_EXPR;
2627 default:
2628 gcc_unreachable ();
2629 }
2630 }
2631
2632 /* Return a tree for the comparison which is the combination of
2633 doing the AND or OR (depending on CODE) of the two operations LCODE
2634 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2635 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2636 if this makes the transformation invalid. */
2637
2638 tree
2639 combine_comparisons (location_t loc,
2640 enum tree_code code, enum tree_code lcode,
2641 enum tree_code rcode, tree truth_type,
2642 tree ll_arg, tree lr_arg)
2643 {
2644 bool honor_nans = HONOR_NANS (ll_arg);
2645 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2646 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2647 int compcode;
2648
2649 switch (code)
2650 {
2651 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2652 compcode = lcompcode & rcompcode;
2653 break;
2654
2655 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2656 compcode = lcompcode | rcompcode;
2657 break;
2658
2659 default:
2660 return NULL_TREE;
2661 }
2662
2663 if (!honor_nans)
2664 {
2665 /* Eliminate unordered comparisons, as well as LTGT and ORD
2666 which are not used unless the mode has NaNs. */
2667 compcode &= ~COMPCODE_UNORD;
2668 if (compcode == COMPCODE_LTGT)
2669 compcode = COMPCODE_NE;
2670 else if (compcode == COMPCODE_ORD)
2671 compcode = COMPCODE_TRUE;
2672 }
2673 else if (flag_trapping_math)
2674 {
2675 /* Check that the original operation and the optimized ones will trap
2676 under the same condition. */
2677 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2678 && (lcompcode != COMPCODE_EQ)
2679 && (lcompcode != COMPCODE_ORD);
2680 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2681 && (rcompcode != COMPCODE_EQ)
2682 && (rcompcode != COMPCODE_ORD);
2683 bool trap = (compcode & COMPCODE_UNORD) == 0
2684 && (compcode != COMPCODE_EQ)
2685 && (compcode != COMPCODE_ORD);
2686
2687 /* In a short-circuited boolean expression the LHS might be
2688 such that the RHS, if evaluated, will never trap. For
2689 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2690 if neither x nor y is NaN. (This is a mixed blessing: for
2691 example, the expression above will never trap, hence
2692 optimizing it to x < y would be invalid). */
2693 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2694 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2695 rtrap = false;
2696
2697 /* If the comparison was short-circuited, and only the RHS
2698 trapped, we may now generate a spurious trap. */
2699 if (rtrap && !ltrap
2700 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2701 return NULL_TREE;
2702
2703 /* If we changed the conditions that cause a trap, we lose. */
2704 if ((ltrap || rtrap) != trap)
2705 return NULL_TREE;
2706 }
2707
2708 if (compcode == COMPCODE_TRUE)
2709 return constant_boolean_node (true, truth_type);
2710 else if (compcode == COMPCODE_FALSE)
2711 return constant_boolean_node (false, truth_type);
2712 else
2713 {
2714 enum tree_code tcode;
2715
2716 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2717 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2718 }
2719 }
2720 \f
2721 /* Return nonzero if two operands (typically of the same tree node)
2722 are necessarily equal. FLAGS modifies behavior as follows:
2723
2724 If OEP_ONLY_CONST is set, only return nonzero for constants.
2725 This function tests whether the operands are indistinguishable;
2726 it does not test whether they are equal using C's == operation.
2727 The distinction is important for IEEE floating point, because
2728 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2729 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2730
2731 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2732 even though it may hold multiple values during a function.
2733 This is because a GCC tree node guarantees that nothing else is
2734 executed between the evaluation of its "operands" (which may often
2735 be evaluated in arbitrary order). Hence if the operands themselves
2736 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2737 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2738 unset means assuming isochronic (or instantaneous) tree equivalence.
2739 Unless comparing arbitrary expression trees, such as from different
2740 statements, this flag can usually be left unset.
2741
2742 If OEP_PURE_SAME is set, then pure functions with identical arguments
2743 are considered the same. It is used when the caller has other ways
2744 to ensure that global memory is unchanged in between.
2745
2746 If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
2747 not values of expressions.
2748
2749 If OEP_LEXICOGRAPHIC is set, then also handle expressions with side-effects
2750 such as MODIFY_EXPR, RETURN_EXPR, as well as STATEMENT_LISTs.
2751
2752 Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
2753 any operand with side effect. This is unnecesarily conservative in the
2754 case we know that arg0 and arg1 are in disjoint code paths (such as in
2755 ?: operator). In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
2756 addresses with TREE_CONSTANT flag set so we know that &var == &var
2757 even if var is volatile. */
2758
2759 int
2760 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2761 {
2762 /* When checking, verify at the outermost operand_equal_p call that
2763 if operand_equal_p returns non-zero then ARG0 and ARG1 has the same
2764 hash value. */
2765 if (flag_checking && !(flags & OEP_NO_HASH_CHECK))
2766 {
2767 if (operand_equal_p (arg0, arg1, flags | OEP_NO_HASH_CHECK))
2768 {
2769 if (arg0 != arg1)
2770 {
2771 inchash::hash hstate0 (0), hstate1 (0);
2772 inchash::add_expr (arg0, hstate0, flags | OEP_HASH_CHECK);
2773 inchash::add_expr (arg1, hstate1, flags | OEP_HASH_CHECK);
2774 hashval_t h0 = hstate0.end ();
2775 hashval_t h1 = hstate1.end ();
2776 gcc_assert (h0 == h1);
2777 }
2778 return 1;
2779 }
2780 else
2781 return 0;
2782 }
2783
2784 /* If either is ERROR_MARK, they aren't equal. */
2785 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2786 || TREE_TYPE (arg0) == error_mark_node
2787 || TREE_TYPE (arg1) == error_mark_node)
2788 return 0;
2789
2790 /* Similar, if either does not have a type (like a released SSA name),
2791 they aren't equal. */
2792 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2793 return 0;
2794
2795 /* We cannot consider pointers to different address space equal. */
2796 if (POINTER_TYPE_P (TREE_TYPE (arg0))
2797 && POINTER_TYPE_P (TREE_TYPE (arg1))
2798 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2799 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2800 return 0;
2801
2802 /* Check equality of integer constants before bailing out due to
2803 precision differences. */
2804 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2805 {
2806 /* Address of INTEGER_CST is not defined; check that we did not forget
2807 to drop the OEP_ADDRESS_OF flags. */
2808 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2809 return tree_int_cst_equal (arg0, arg1);
2810 }
2811
2812 if (!(flags & OEP_ADDRESS_OF))
2813 {
2814 /* If both types don't have the same signedness, then we can't consider
2815 them equal. We must check this before the STRIP_NOPS calls
2816 because they may change the signedness of the arguments. As pointers
2817 strictly don't have a signedness, require either two pointers or
2818 two non-pointers as well. */
2819 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2820 || POINTER_TYPE_P (TREE_TYPE (arg0))
2821 != POINTER_TYPE_P (TREE_TYPE (arg1)))
2822 return 0;
2823
2824 /* If both types don't have the same precision, then it is not safe
2825 to strip NOPs. */
2826 if (element_precision (TREE_TYPE (arg0))
2827 != element_precision (TREE_TYPE (arg1)))
2828 return 0;
2829
2830 STRIP_NOPS (arg0);
2831 STRIP_NOPS (arg1);
2832 }
2833 #if 0
2834 /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
2835 sanity check once the issue is solved. */
2836 else
2837 /* Addresses of conversions and SSA_NAMEs (and many other things)
2838 are not defined. Check that we did not forget to drop the
2839 OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
2840 gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
2841 && TREE_CODE (arg0) != SSA_NAME);
2842 #endif
2843
2844 /* In case both args are comparisons but with different comparison
2845 code, try to swap the comparison operands of one arg to produce
2846 a match and compare that variant. */
2847 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2848 && COMPARISON_CLASS_P (arg0)
2849 && COMPARISON_CLASS_P (arg1))
2850 {
2851 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2852
2853 if (TREE_CODE (arg0) == swap_code)
2854 return operand_equal_p (TREE_OPERAND (arg0, 0),
2855 TREE_OPERAND (arg1, 1), flags)
2856 && operand_equal_p (TREE_OPERAND (arg0, 1),
2857 TREE_OPERAND (arg1, 0), flags);
2858 }
2859
2860 if (TREE_CODE (arg0) != TREE_CODE (arg1))
2861 {
2862 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2863 if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
2864 ;
2865 else if (flags & OEP_ADDRESS_OF)
2866 {
2867 /* If we are interested in comparing addresses ignore
2868 MEM_REF wrappings of the base that can appear just for
2869 TBAA reasons. */
2870 if (TREE_CODE (arg0) == MEM_REF
2871 && DECL_P (arg1)
2872 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
2873 && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
2874 && integer_zerop (TREE_OPERAND (arg0, 1)))
2875 return 1;
2876 else if (TREE_CODE (arg1) == MEM_REF
2877 && DECL_P (arg0)
2878 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
2879 && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
2880 && integer_zerop (TREE_OPERAND (arg1, 1)))
2881 return 1;
2882 return 0;
2883 }
2884 else
2885 return 0;
2886 }
2887
2888 /* When not checking adddresses, this is needed for conversions and for
2889 COMPONENT_REF. Might as well play it safe and always test this. */
2890 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2891 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2892 || (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))
2893 && !(flags & OEP_ADDRESS_OF)))
2894 return 0;
2895
2896 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2897 We don't care about side effects in that case because the SAVE_EXPR
2898 takes care of that for us. In all other cases, two expressions are
2899 equal if they have no side effects. If we have two identical
2900 expressions with side effects that should be treated the same due
2901 to the only side effects being identical SAVE_EXPR's, that will
2902 be detected in the recursive calls below.
2903 If we are taking an invariant address of two identical objects
2904 they are necessarily equal as well. */
2905 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2906 && (TREE_CODE (arg0) == SAVE_EXPR
2907 || (flags & OEP_MATCH_SIDE_EFFECTS)
2908 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2909 return 1;
2910
2911 /* Next handle constant cases, those for which we can return 1 even
2912 if ONLY_CONST is set. */
2913 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2914 switch (TREE_CODE (arg0))
2915 {
2916 case INTEGER_CST:
2917 return tree_int_cst_equal (arg0, arg1);
2918
2919 case FIXED_CST:
2920 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2921 TREE_FIXED_CST (arg1));
2922
2923 case REAL_CST:
2924 if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
2925 return 1;
2926
2927
2928 if (!HONOR_SIGNED_ZEROS (arg0))
2929 {
2930 /* If we do not distinguish between signed and unsigned zero,
2931 consider them equal. */
2932 if (real_zerop (arg0) && real_zerop (arg1))
2933 return 1;
2934 }
2935 return 0;
2936
2937 case VECTOR_CST:
2938 {
2939 unsigned i;
2940
2941 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2942 return 0;
2943
2944 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2945 {
2946 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2947 VECTOR_CST_ELT (arg1, i), flags))
2948 return 0;
2949 }
2950 return 1;
2951 }
2952
2953 case COMPLEX_CST:
2954 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2955 flags)
2956 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2957 flags));
2958
2959 case STRING_CST:
2960 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2961 && ! memcmp (TREE_STRING_POINTER (arg0),
2962 TREE_STRING_POINTER (arg1),
2963 TREE_STRING_LENGTH (arg0)));
2964
2965 case ADDR_EXPR:
2966 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2967 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2968 flags | OEP_ADDRESS_OF
2969 | OEP_MATCH_SIDE_EFFECTS);
2970 case CONSTRUCTOR:
2971 /* In GIMPLE empty constructors are allowed in initializers of
2972 aggregates. */
2973 return !CONSTRUCTOR_NELTS (arg0) && !CONSTRUCTOR_NELTS (arg1);
2974 default:
2975 break;
2976 }
2977
2978 if (flags & OEP_ONLY_CONST)
2979 return 0;
2980
2981 /* Define macros to test an operand from arg0 and arg1 for equality and a
2982 variant that allows null and views null as being different from any
2983 non-null value. In the latter case, if either is null, the both
2984 must be; otherwise, do the normal comparison. */
2985 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2986 TREE_OPERAND (arg1, N), flags)
2987
2988 #define OP_SAME_WITH_NULL(N) \
2989 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2990 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2991
2992 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2993 {
2994 case tcc_unary:
2995 /* Two conversions are equal only if signedness and modes match. */
2996 switch (TREE_CODE (arg0))
2997 {
2998 CASE_CONVERT:
2999 case FIX_TRUNC_EXPR:
3000 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3001 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3002 return 0;
3003 break;
3004 default:
3005 break;
3006 }
3007
3008 return OP_SAME (0);
3009
3010
3011 case tcc_comparison:
3012 case tcc_binary:
3013 if (OP_SAME (0) && OP_SAME (1))
3014 return 1;
3015
3016 /* For commutative ops, allow the other order. */
3017 return (commutative_tree_code (TREE_CODE (arg0))
3018 && operand_equal_p (TREE_OPERAND (arg0, 0),
3019 TREE_OPERAND (arg1, 1), flags)
3020 && operand_equal_p (TREE_OPERAND (arg0, 1),
3021 TREE_OPERAND (arg1, 0), flags));
3022
3023 case tcc_reference:
3024 /* If either of the pointer (or reference) expressions we are
3025 dereferencing contain a side effect, these cannot be equal,
3026 but their addresses can be. */
3027 if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0
3028 && (TREE_SIDE_EFFECTS (arg0)
3029 || TREE_SIDE_EFFECTS (arg1)))
3030 return 0;
3031
3032 switch (TREE_CODE (arg0))
3033 {
3034 case INDIRECT_REF:
3035 if (!(flags & OEP_ADDRESS_OF)
3036 && (TYPE_ALIGN (TREE_TYPE (arg0))
3037 != TYPE_ALIGN (TREE_TYPE (arg1))))
3038 return 0;
3039 flags &= ~OEP_ADDRESS_OF;
3040 return OP_SAME (0);
3041
3042 case IMAGPART_EXPR:
3043 /* Require the same offset. */
3044 if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3045 TYPE_SIZE (TREE_TYPE (arg1)),
3046 flags & ~OEP_ADDRESS_OF))
3047 return 0;
3048
3049 /* Fallthru. */
3050 case REALPART_EXPR:
3051 case VIEW_CONVERT_EXPR:
3052 return OP_SAME (0);
3053
3054 case TARGET_MEM_REF:
3055 case MEM_REF:
3056 if (!(flags & OEP_ADDRESS_OF))
3057 {
3058 /* Require equal access sizes */
3059 if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1))
3060 && (!TYPE_SIZE (TREE_TYPE (arg0))
3061 || !TYPE_SIZE (TREE_TYPE (arg1))
3062 || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3063 TYPE_SIZE (TREE_TYPE (arg1)),
3064 flags)))
3065 return 0;
3066 /* Verify that access happens in similar types. */
3067 if (!types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3068 return 0;
3069 /* Verify that accesses are TBAA compatible. */
3070 if (!alias_ptr_types_compatible_p
3071 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
3072 TREE_TYPE (TREE_OPERAND (arg1, 1)))
3073 || (MR_DEPENDENCE_CLIQUE (arg0)
3074 != MR_DEPENDENCE_CLIQUE (arg1))
3075 || (MR_DEPENDENCE_BASE (arg0)
3076 != MR_DEPENDENCE_BASE (arg1)))
3077 return 0;
3078 /* Verify that alignment is compatible. */
3079 if (TYPE_ALIGN (TREE_TYPE (arg0))
3080 != TYPE_ALIGN (TREE_TYPE (arg1)))
3081 return 0;
3082 }
3083 flags &= ~OEP_ADDRESS_OF;
3084 return (OP_SAME (0) && OP_SAME (1)
3085 /* TARGET_MEM_REF require equal extra operands. */
3086 && (TREE_CODE (arg0) != TARGET_MEM_REF
3087 || (OP_SAME_WITH_NULL (2)
3088 && OP_SAME_WITH_NULL (3)
3089 && OP_SAME_WITH_NULL (4))));
3090
3091 case ARRAY_REF:
3092 case ARRAY_RANGE_REF:
3093 if (!OP_SAME (0))
3094 return 0;
3095 flags &= ~OEP_ADDRESS_OF;
3096 /* Compare the array index by value if it is constant first as we
3097 may have different types but same value here. */
3098 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3099 TREE_OPERAND (arg1, 1))
3100 || OP_SAME (1))
3101 && OP_SAME_WITH_NULL (2)
3102 && OP_SAME_WITH_NULL (3)
3103 /* Compare low bound and element size as with OEP_ADDRESS_OF
3104 we have to account for the offset of the ref. */
3105 && (TREE_TYPE (TREE_OPERAND (arg0, 0))
3106 == TREE_TYPE (TREE_OPERAND (arg1, 0))
3107 || (operand_equal_p (array_ref_low_bound
3108 (CONST_CAST_TREE (arg0)),
3109 array_ref_low_bound
3110 (CONST_CAST_TREE (arg1)), flags)
3111 && operand_equal_p (array_ref_element_size
3112 (CONST_CAST_TREE (arg0)),
3113 array_ref_element_size
3114 (CONST_CAST_TREE (arg1)),
3115 flags))));
3116
3117 case COMPONENT_REF:
3118 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3119 may be NULL when we're called to compare MEM_EXPRs. */
3120 if (!OP_SAME_WITH_NULL (0)
3121 || !OP_SAME (1))
3122 return 0;
3123 flags &= ~OEP_ADDRESS_OF;
3124 return OP_SAME_WITH_NULL (2);
3125
3126 case BIT_FIELD_REF:
3127 if (!OP_SAME (0))
3128 return 0;
3129 flags &= ~OEP_ADDRESS_OF;
3130 return OP_SAME (1) && OP_SAME (2);
3131
3132 default:
3133 return 0;
3134 }
3135
3136 case tcc_expression:
3137 switch (TREE_CODE (arg0))
3138 {
3139 case ADDR_EXPR:
3140 /* Be sure we pass right ADDRESS_OF flag. */
3141 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3142 return operand_equal_p (TREE_OPERAND (arg0, 0),
3143 TREE_OPERAND (arg1, 0),
3144 flags | OEP_ADDRESS_OF);
3145
3146 case TRUTH_NOT_EXPR:
3147 return OP_SAME (0);
3148
3149 case TRUTH_ANDIF_EXPR:
3150 case TRUTH_ORIF_EXPR:
3151 return OP_SAME (0) && OP_SAME (1);
3152
3153 case FMA_EXPR:
3154 case WIDEN_MULT_PLUS_EXPR:
3155 case WIDEN_MULT_MINUS_EXPR:
3156 if (!OP_SAME (2))
3157 return 0;
3158 /* The multiplcation operands are commutative. */
3159 /* FALLTHRU */
3160
3161 case TRUTH_AND_EXPR:
3162 case TRUTH_OR_EXPR:
3163 case TRUTH_XOR_EXPR:
3164 if (OP_SAME (0) && OP_SAME (1))
3165 return 1;
3166
3167 /* Otherwise take into account this is a commutative operation. */
3168 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3169 TREE_OPERAND (arg1, 1), flags)
3170 && operand_equal_p (TREE_OPERAND (arg0, 1),
3171 TREE_OPERAND (arg1, 0), flags));
3172
3173 case COND_EXPR:
3174 if (! OP_SAME (1) || ! OP_SAME_WITH_NULL (2))
3175 return 0;
3176 flags &= ~OEP_ADDRESS_OF;
3177 return OP_SAME (0);
3178
3179 case BIT_INSERT_EXPR:
3180 /* BIT_INSERT_EXPR has an implict operand as the type precision
3181 of op1. Need to check to make sure they are the same. */
3182 if (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
3183 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
3184 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 1)))
3185 != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 1))))
3186 return false;
3187 /* FALLTHRU */
3188
3189 case VEC_COND_EXPR:
3190 case DOT_PROD_EXPR:
3191 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3192
3193 case MODIFY_EXPR:
3194 case INIT_EXPR:
3195 case COMPOUND_EXPR:
3196 case PREDECREMENT_EXPR:
3197 case PREINCREMENT_EXPR:
3198 case POSTDECREMENT_EXPR:
3199 case POSTINCREMENT_EXPR:
3200 if (flags & OEP_LEXICOGRAPHIC)
3201 return OP_SAME (0) && OP_SAME (1);
3202 return 0;
3203
3204 case CLEANUP_POINT_EXPR:
3205 case EXPR_STMT:
3206 if (flags & OEP_LEXICOGRAPHIC)
3207 return OP_SAME (0);
3208 return 0;
3209
3210 default:
3211 return 0;
3212 }
3213
3214 case tcc_vl_exp:
3215 switch (TREE_CODE (arg0))
3216 {
3217 case CALL_EXPR:
3218 if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3219 != (CALL_EXPR_FN (arg1) == NULL_TREE))
3220 /* If not both CALL_EXPRs are either internal or normal function
3221 functions, then they are not equal. */
3222 return 0;
3223 else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3224 {
3225 /* If the CALL_EXPRs call different internal functions, then they
3226 are not equal. */
3227 if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3228 return 0;
3229 }
3230 else
3231 {
3232 /* If the CALL_EXPRs call different functions, then they are not
3233 equal. */
3234 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3235 flags))
3236 return 0;
3237 }
3238
3239 /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */
3240 {
3241 unsigned int cef = call_expr_flags (arg0);
3242 if (flags & OEP_PURE_SAME)
3243 cef &= ECF_CONST | ECF_PURE;
3244 else
3245 cef &= ECF_CONST;
3246 if (!cef && !(flags & OEP_LEXICOGRAPHIC))
3247 return 0;
3248 }
3249
3250 /* Now see if all the arguments are the same. */
3251 {
3252 const_call_expr_arg_iterator iter0, iter1;
3253 const_tree a0, a1;
3254 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3255 a1 = first_const_call_expr_arg (arg1, &iter1);
3256 a0 && a1;
3257 a0 = next_const_call_expr_arg (&iter0),
3258 a1 = next_const_call_expr_arg (&iter1))
3259 if (! operand_equal_p (a0, a1, flags))
3260 return 0;
3261
3262 /* If we get here and both argument lists are exhausted
3263 then the CALL_EXPRs are equal. */
3264 return ! (a0 || a1);
3265 }
3266 default:
3267 return 0;
3268 }
3269
3270 case tcc_declaration:
3271 /* Consider __builtin_sqrt equal to sqrt. */
3272 return (TREE_CODE (arg0) == FUNCTION_DECL
3273 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3274 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3275 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3276
3277 case tcc_exceptional:
3278 if (TREE_CODE (arg0) == CONSTRUCTOR)
3279 {
3280 /* In GIMPLE constructors are used only to build vectors from
3281 elements. Individual elements in the constructor must be
3282 indexed in increasing order and form an initial sequence.
3283
3284 We make no effort to compare constructors in generic.
3285 (see sem_variable::equals in ipa-icf which can do so for
3286 constants). */
3287 if (!VECTOR_TYPE_P (TREE_TYPE (arg0))
3288 || !VECTOR_TYPE_P (TREE_TYPE (arg1)))
3289 return 0;
3290
3291 /* Be sure that vectors constructed have the same representation.
3292 We only tested element precision and modes to match.
3293 Vectors may be BLKmode and thus also check that the number of
3294 parts match. */
3295 if (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))
3296 != TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)))
3297 return 0;
3298
3299 vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3300 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3301 unsigned int len = vec_safe_length (v0);
3302
3303 if (len != vec_safe_length (v1))
3304 return 0;
3305
3306 for (unsigned int i = 0; i < len; i++)
3307 {
3308 constructor_elt *c0 = &(*v0)[i];
3309 constructor_elt *c1 = &(*v1)[i];
3310
3311 if (!operand_equal_p (c0->value, c1->value, flags)
3312 /* In GIMPLE the indexes can be either NULL or matching i.
3313 Double check this so we won't get false
3314 positives for GENERIC. */
3315 || (c0->index
3316 && (TREE_CODE (c0->index) != INTEGER_CST
3317 || !compare_tree_int (c0->index, i)))
3318 || (c1->index
3319 && (TREE_CODE (c1->index) != INTEGER_CST
3320 || !compare_tree_int (c1->index, i))))
3321 return 0;
3322 }
3323 return 1;
3324 }
3325 else if (TREE_CODE (arg0) == STATEMENT_LIST
3326 && (flags & OEP_LEXICOGRAPHIC))
3327 {
3328 /* Compare the STATEMENT_LISTs. */
3329 tree_stmt_iterator tsi1, tsi2;
3330 tree body1 = CONST_CAST_TREE (arg0);
3331 tree body2 = CONST_CAST_TREE (arg1);
3332 for (tsi1 = tsi_start (body1), tsi2 = tsi_start (body2); ;
3333 tsi_next (&tsi1), tsi_next (&tsi2))
3334 {
3335 /* The lists don't have the same number of statements. */
3336 if (tsi_end_p (tsi1) ^ tsi_end_p (tsi2))
3337 return 0;
3338 if (tsi_end_p (tsi1) && tsi_end_p (tsi2))
3339 return 1;
3340 if (!operand_equal_p (tsi_stmt (tsi1), tsi_stmt (tsi2),
3341 OEP_LEXICOGRAPHIC))
3342 return 0;
3343 }
3344 }
3345 return 0;
3346
3347 case tcc_statement:
3348 switch (TREE_CODE (arg0))
3349 {
3350 case RETURN_EXPR:
3351 if (flags & OEP_LEXICOGRAPHIC)
3352 return OP_SAME_WITH_NULL (0);
3353 return 0;
3354 default:
3355 return 0;
3356 }
3357
3358 default:
3359 return 0;
3360 }
3361
3362 #undef OP_SAME
3363 #undef OP_SAME_WITH_NULL
3364 }
3365 \f
3366 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3367 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3368
3369 When in doubt, return 0. */
3370
3371 static int
3372 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3373 {
3374 int unsignedp1, unsignedpo;
3375 tree primarg0, primarg1, primother;
3376 unsigned int correct_width;
3377
3378 if (operand_equal_p (arg0, arg1, 0))
3379 return 1;
3380
3381 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3382 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3383 return 0;
3384
3385 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3386 and see if the inner values are the same. This removes any
3387 signedness comparison, which doesn't matter here. */
3388 primarg0 = arg0, primarg1 = arg1;
3389 STRIP_NOPS (primarg0);
3390 STRIP_NOPS (primarg1);
3391 if (operand_equal_p (primarg0, primarg1, 0))
3392 return 1;
3393
3394 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3395 actual comparison operand, ARG0.
3396
3397 First throw away any conversions to wider types
3398 already present in the operands. */
3399
3400 primarg1 = get_narrower (arg1, &unsignedp1);
3401 primother = get_narrower (other, &unsignedpo);
3402
3403 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3404 if (unsignedp1 == unsignedpo
3405 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3406 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3407 {
3408 tree type = TREE_TYPE (arg0);
3409
3410 /* Make sure shorter operand is extended the right way
3411 to match the longer operand. */
3412 primarg1 = fold_convert (signed_or_unsigned_type_for
3413 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3414
3415 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3416 return 1;
3417 }
3418
3419 return 0;
3420 }
3421 \f
3422 /* See if ARG is an expression that is either a comparison or is performing
3423 arithmetic on comparisons. The comparisons must only be comparing
3424 two different values, which will be stored in *CVAL1 and *CVAL2; if
3425 they are nonzero it means that some operands have already been found.
3426 No variables may be used anywhere else in the expression except in the
3427 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3428 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3429
3430 If this is true, return 1. Otherwise, return zero. */
3431
3432 static int
3433 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3434 {
3435 enum tree_code code = TREE_CODE (arg);
3436 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3437
3438 /* We can handle some of the tcc_expression cases here. */
3439 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3440 tclass = tcc_unary;
3441 else if (tclass == tcc_expression
3442 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3443 || code == COMPOUND_EXPR))
3444 tclass = tcc_binary;
3445
3446 else if (tclass == tcc_expression && code == SAVE_EXPR
3447 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3448 {
3449 /* If we've already found a CVAL1 or CVAL2, this expression is
3450 two complex to handle. */
3451 if (*cval1 || *cval2)
3452 return 0;
3453
3454 tclass = tcc_unary;
3455 *save_p = 1;
3456 }
3457
3458 switch (tclass)
3459 {
3460 case tcc_unary:
3461 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3462
3463 case tcc_binary:
3464 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3465 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3466 cval1, cval2, save_p));
3467
3468 case tcc_constant:
3469 return 1;
3470
3471 case tcc_expression:
3472 if (code == COND_EXPR)
3473 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3474 cval1, cval2, save_p)
3475 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3476 cval1, cval2, save_p)
3477 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3478 cval1, cval2, save_p));
3479 return 0;
3480
3481 case tcc_comparison:
3482 /* First see if we can handle the first operand, then the second. For
3483 the second operand, we know *CVAL1 can't be zero. It must be that
3484 one side of the comparison is each of the values; test for the
3485 case where this isn't true by failing if the two operands
3486 are the same. */
3487
3488 if (operand_equal_p (TREE_OPERAND (arg, 0),
3489 TREE_OPERAND (arg, 1), 0))
3490 return 0;
3491
3492 if (*cval1 == 0)
3493 *cval1 = TREE_OPERAND (arg, 0);
3494 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3495 ;
3496 else if (*cval2 == 0)
3497 *cval2 = TREE_OPERAND (arg, 0);
3498 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3499 ;
3500 else
3501 return 0;
3502
3503 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3504 ;
3505 else if (*cval2 == 0)
3506 *cval2 = TREE_OPERAND (arg, 1);
3507 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3508 ;
3509 else
3510 return 0;
3511
3512 return 1;
3513
3514 default:
3515 return 0;
3516 }
3517 }
3518 \f
3519 /* ARG is a tree that is known to contain just arithmetic operations and
3520 comparisons. Evaluate the operations in the tree substituting NEW0 for
3521 any occurrence of OLD0 as an operand of a comparison and likewise for
3522 NEW1 and OLD1. */
3523
3524 static tree
3525 eval_subst (location_t loc, tree arg, tree old0, tree new0,
3526 tree old1, tree new1)
3527 {
3528 tree type = TREE_TYPE (arg);
3529 enum tree_code code = TREE_CODE (arg);
3530 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3531
3532 /* We can handle some of the tcc_expression cases here. */
3533 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3534 tclass = tcc_unary;
3535 else if (tclass == tcc_expression
3536 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3537 tclass = tcc_binary;
3538
3539 switch (tclass)
3540 {
3541 case tcc_unary:
3542 return fold_build1_loc (loc, code, type,
3543 eval_subst (loc, TREE_OPERAND (arg, 0),
3544 old0, new0, old1, new1));
3545
3546 case tcc_binary:
3547 return fold_build2_loc (loc, code, type,
3548 eval_subst (loc, TREE_OPERAND (arg, 0),
3549 old0, new0, old1, new1),
3550 eval_subst (loc, TREE_OPERAND (arg, 1),
3551 old0, new0, old1, new1));
3552
3553 case tcc_expression:
3554 switch (code)
3555 {
3556 case SAVE_EXPR:
3557 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3558 old1, new1);
3559
3560 case COMPOUND_EXPR:
3561 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3562 old1, new1);
3563
3564 case COND_EXPR:
3565 return fold_build3_loc (loc, code, type,
3566 eval_subst (loc, TREE_OPERAND (arg, 0),
3567 old0, new0, old1, new1),
3568 eval_subst (loc, TREE_OPERAND (arg, 1),
3569 old0, new0, old1, new1),
3570 eval_subst (loc, TREE_OPERAND (arg, 2),
3571 old0, new0, old1, new1));
3572 default:
3573 break;
3574 }
3575 /* Fall through - ??? */
3576
3577 case tcc_comparison:
3578 {
3579 tree arg0 = TREE_OPERAND (arg, 0);
3580 tree arg1 = TREE_OPERAND (arg, 1);
3581
3582 /* We need to check both for exact equality and tree equality. The
3583 former will be true if the operand has a side-effect. In that
3584 case, we know the operand occurred exactly once. */
3585
3586 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3587 arg0 = new0;
3588 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3589 arg0 = new1;
3590
3591 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3592 arg1 = new0;
3593 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3594 arg1 = new1;
3595
3596 return fold_build2_loc (loc, code, type, arg0, arg1);
3597 }
3598
3599 default:
3600 return arg;
3601 }
3602 }
3603 \f
3604 /* Return a tree for the case when the result of an expression is RESULT
3605 converted to TYPE and OMITTED was previously an operand of the expression
3606 but is now not needed (e.g., we folded OMITTED * 0).
3607
3608 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3609 the conversion of RESULT to TYPE. */
3610
3611 tree
3612 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3613 {
3614 tree t = fold_convert_loc (loc, type, result);
3615
3616 /* If the resulting operand is an empty statement, just return the omitted
3617 statement casted to void. */
3618 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3619 return build1_loc (loc, NOP_EXPR, void_type_node,
3620 fold_ignored_result (omitted));
3621
3622 if (TREE_SIDE_EFFECTS (omitted))
3623 return build2_loc (loc, COMPOUND_EXPR, type,
3624 fold_ignored_result (omitted), t);
3625
3626 return non_lvalue_loc (loc, t);
3627 }
3628
3629 /* Return a tree for the case when the result of an expression is RESULT
3630 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3631 of the expression but are now not needed.
3632
3633 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3634 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3635 evaluated before OMITTED2. Otherwise, if neither has side effects,
3636 just do the conversion of RESULT to TYPE. */
3637
3638 tree
3639 omit_two_operands_loc (location_t loc, tree type, tree result,
3640 tree omitted1, tree omitted2)
3641 {
3642 tree t = fold_convert_loc (loc, type, result);
3643
3644 if (TREE_SIDE_EFFECTS (omitted2))
3645 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3646 if (TREE_SIDE_EFFECTS (omitted1))
3647 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3648
3649 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3650 }
3651
3652 \f
3653 /* Return a simplified tree node for the truth-negation of ARG. This
3654 never alters ARG itself. We assume that ARG is an operation that
3655 returns a truth value (0 or 1).
3656
3657 FIXME: one would think we would fold the result, but it causes
3658 problems with the dominator optimizer. */
3659
3660 static tree
3661 fold_truth_not_expr (location_t loc, tree arg)
3662 {
3663 tree type = TREE_TYPE (arg);
3664 enum tree_code code = TREE_CODE (arg);
3665 location_t loc1, loc2;
3666
3667 /* If this is a comparison, we can simply invert it, except for
3668 floating-point non-equality comparisons, in which case we just
3669 enclose a TRUTH_NOT_EXPR around what we have. */
3670
3671 if (TREE_CODE_CLASS (code) == tcc_comparison)
3672 {
3673 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3674 if (FLOAT_TYPE_P (op_type)
3675 && flag_trapping_math
3676 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3677 && code != NE_EXPR && code != EQ_EXPR)
3678 return NULL_TREE;
3679
3680 code = invert_tree_comparison (code, HONOR_NANS (op_type));
3681 if (code == ERROR_MARK)
3682 return NULL_TREE;
3683
3684 tree ret = build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3685 TREE_OPERAND (arg, 1));
3686 if (TREE_NO_WARNING (arg))
3687 TREE_NO_WARNING (ret) = 1;
3688 return ret;
3689 }
3690
3691 switch (code)
3692 {
3693 case INTEGER_CST:
3694 return constant_boolean_node (integer_zerop (arg), type);
3695
3696 case TRUTH_AND_EXPR:
3697 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3698 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3699 return build2_loc (loc, TRUTH_OR_EXPR, type,
3700 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3701 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3702
3703 case TRUTH_OR_EXPR:
3704 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3705 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3706 return build2_loc (loc, TRUTH_AND_EXPR, type,
3707 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3708 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3709
3710 case TRUTH_XOR_EXPR:
3711 /* Here we can invert either operand. We invert the first operand
3712 unless the second operand is a TRUTH_NOT_EXPR in which case our
3713 result is the XOR of the first operand with the inside of the
3714 negation of the second operand. */
3715
3716 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3717 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3718 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3719 else
3720 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3721 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3722 TREE_OPERAND (arg, 1));
3723
3724 case TRUTH_ANDIF_EXPR:
3725 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3726 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3727 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3728 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3729 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3730
3731 case TRUTH_ORIF_EXPR:
3732 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3733 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3734 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3735 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3736 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3737
3738 case TRUTH_NOT_EXPR:
3739 return TREE_OPERAND (arg, 0);
3740
3741 case COND_EXPR:
3742 {
3743 tree arg1 = TREE_OPERAND (arg, 1);
3744 tree arg2 = TREE_OPERAND (arg, 2);
3745
3746 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3747 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3748
3749 /* A COND_EXPR may have a throw as one operand, which
3750 then has void type. Just leave void operands
3751 as they are. */
3752 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3753 VOID_TYPE_P (TREE_TYPE (arg1))
3754 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3755 VOID_TYPE_P (TREE_TYPE (arg2))
3756 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3757 }
3758
3759 case COMPOUND_EXPR:
3760 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3761 return build2_loc (loc, COMPOUND_EXPR, type,
3762 TREE_OPERAND (arg, 0),
3763 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3764
3765 case NON_LVALUE_EXPR:
3766 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3767 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3768
3769 CASE_CONVERT:
3770 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3771 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3772
3773 /* fall through */
3774
3775 case FLOAT_EXPR:
3776 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3777 return build1_loc (loc, TREE_CODE (arg), type,
3778 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3779
3780 case BIT_AND_EXPR:
3781 if (!integer_onep (TREE_OPERAND (arg, 1)))
3782 return NULL_TREE;
3783 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3784
3785 case SAVE_EXPR:
3786 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3787
3788 case CLEANUP_POINT_EXPR:
3789 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3790 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3791 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3792
3793 default:
3794 return NULL_TREE;
3795 }
3796 }
3797
3798 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3799 assume that ARG is an operation that returns a truth value (0 or 1
3800 for scalars, 0 or -1 for vectors). Return the folded expression if
3801 folding is successful. Otherwise, return NULL_TREE. */
3802
3803 static tree
3804 fold_invert_truthvalue (location_t loc, tree arg)
3805 {
3806 tree type = TREE_TYPE (arg);
3807 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3808 ? BIT_NOT_EXPR
3809 : TRUTH_NOT_EXPR,
3810 type, arg);
3811 }
3812
3813 /* Return a simplified tree node for the truth-negation of ARG. This
3814 never alters ARG itself. We assume that ARG is an operation that
3815 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3816
3817 tree
3818 invert_truthvalue_loc (location_t loc, tree arg)
3819 {
3820 if (TREE_CODE (arg) == ERROR_MARK)
3821 return arg;
3822
3823 tree type = TREE_TYPE (arg);
3824 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3825 ? BIT_NOT_EXPR
3826 : TRUTH_NOT_EXPR,
3827 type, arg);
3828 }
3829
3830 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3831 with code CODE. This optimization is unsafe. */
3832 static tree
3833 distribute_real_division (location_t loc, enum tree_code code, tree type,
3834 tree arg0, tree arg1)
3835 {
3836 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3837 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3838
3839 /* (A / C) +- (B / C) -> (A +- B) / C. */
3840 if (mul0 == mul1
3841 && operand_equal_p (TREE_OPERAND (arg0, 1),
3842 TREE_OPERAND (arg1, 1), 0))
3843 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3844 fold_build2_loc (loc, code, type,
3845 TREE_OPERAND (arg0, 0),
3846 TREE_OPERAND (arg1, 0)),
3847 TREE_OPERAND (arg0, 1));
3848
3849 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3850 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3851 TREE_OPERAND (arg1, 0), 0)
3852 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3853 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3854 {
3855 REAL_VALUE_TYPE r0, r1;
3856 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3857 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3858 if (!mul0)
3859 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3860 if (!mul1)
3861 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3862 real_arithmetic (&r0, code, &r0, &r1);
3863 return fold_build2_loc (loc, MULT_EXPR, type,
3864 TREE_OPERAND (arg0, 0),
3865 build_real (type, r0));
3866 }
3867
3868 return NULL_TREE;
3869 }
3870 \f
3871 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3872 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero
3873 and uses reverse storage order if REVERSEP is nonzero. ORIG_INNER
3874 is the original memory reference used to preserve the alias set of
3875 the access. */
3876
3877 static tree
3878 make_bit_field_ref (location_t loc, tree inner, tree orig_inner, tree type,
3879 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
3880 int unsignedp, int reversep)
3881 {
3882 tree result, bftype;
3883
3884 /* Attempt not to lose the access path if possible. */
3885 if (TREE_CODE (orig_inner) == COMPONENT_REF)
3886 {
3887 tree ninner = TREE_OPERAND (orig_inner, 0);
3888 machine_mode nmode;
3889 HOST_WIDE_INT nbitsize, nbitpos;
3890 tree noffset;
3891 int nunsignedp, nreversep, nvolatilep = 0;
3892 tree base = get_inner_reference (ninner, &nbitsize, &nbitpos,
3893 &noffset, &nmode, &nunsignedp,
3894 &nreversep, &nvolatilep);
3895 if (base == inner
3896 && noffset == NULL_TREE
3897 && nbitsize >= bitsize
3898 && nbitpos <= bitpos
3899 && bitpos + bitsize <= nbitpos + nbitsize
3900 && !reversep
3901 && !nreversep
3902 && !nvolatilep)
3903 {
3904 inner = ninner;
3905 bitpos -= nbitpos;
3906 }
3907 }
3908
3909 alias_set_type iset = get_alias_set (orig_inner);
3910 if (iset == 0 && get_alias_set (inner) != iset)
3911 inner = fold_build2 (MEM_REF, TREE_TYPE (inner),
3912 build_fold_addr_expr (inner),
3913 build_int_cst (ptr_type_node, 0));
3914
3915 if (bitpos == 0 && !reversep)
3916 {
3917 tree size = TYPE_SIZE (TREE_TYPE (inner));
3918 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3919 || POINTER_TYPE_P (TREE_TYPE (inner)))
3920 && tree_fits_shwi_p (size)
3921 && tree_to_shwi (size) == bitsize)
3922 return fold_convert_loc (loc, type, inner);
3923 }
3924
3925 bftype = type;
3926 if (TYPE_PRECISION (bftype) != bitsize
3927 || TYPE_UNSIGNED (bftype) == !unsignedp)
3928 bftype = build_nonstandard_integer_type (bitsize, 0);
3929
3930 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3931 bitsize_int (bitsize), bitsize_int (bitpos));
3932 REF_REVERSE_STORAGE_ORDER (result) = reversep;
3933
3934 if (bftype != type)
3935 result = fold_convert_loc (loc, type, result);
3936
3937 return result;
3938 }
3939
3940 /* Optimize a bit-field compare.
3941
3942 There are two cases: First is a compare against a constant and the
3943 second is a comparison of two items where the fields are at the same
3944 bit position relative to the start of a chunk (byte, halfword, word)
3945 large enough to contain it. In these cases we can avoid the shift
3946 implicit in bitfield extractions.
3947
3948 For constants, we emit a compare of the shifted constant with the
3949 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3950 compared. For two fields at the same position, we do the ANDs with the
3951 similar mask and compare the result of the ANDs.
3952
3953 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3954 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3955 are the left and right operands of the comparison, respectively.
3956
3957 If the optimization described above can be done, we return the resulting
3958 tree. Otherwise we return zero. */
3959
3960 static tree
3961 optimize_bit_field_compare (location_t loc, enum tree_code code,
3962 tree compare_type, tree lhs, tree rhs)
3963 {
3964 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3965 tree type = TREE_TYPE (lhs);
3966 tree unsigned_type;
3967 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3968 machine_mode lmode, rmode, nmode;
3969 int lunsignedp, runsignedp;
3970 int lreversep, rreversep;
3971 int lvolatilep = 0, rvolatilep = 0;
3972 tree linner, rinner = NULL_TREE;
3973 tree mask;
3974 tree offset;
3975
3976 /* Get all the information about the extractions being done. If the bit size
3977 if the same as the size of the underlying object, we aren't doing an
3978 extraction at all and so can do nothing. We also don't want to
3979 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3980 then will no longer be able to replace it. */
3981 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3982 &lunsignedp, &lreversep, &lvolatilep);
3983 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3984 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3985 return 0;
3986
3987 if (const_p)
3988 rreversep = lreversep;
3989 else
3990 {
3991 /* If this is not a constant, we can only do something if bit positions,
3992 sizes, signedness and storage order are the same. */
3993 rinner
3994 = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3995 &runsignedp, &rreversep, &rvolatilep);
3996
3997 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3998 || lunsignedp != runsignedp || lreversep != rreversep || offset != 0
3999 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
4000 return 0;
4001 }
4002
4003 /* Honor the C++ memory model and mimic what RTL expansion does. */
4004 unsigned HOST_WIDE_INT bitstart = 0;
4005 unsigned HOST_WIDE_INT bitend = 0;
4006 if (TREE_CODE (lhs) == COMPONENT_REF)
4007 {
4008 get_bit_range (&bitstart, &bitend, lhs, &lbitpos, &offset);
4009 if (offset != NULL_TREE)
4010 return 0;
4011 }
4012
4013 /* See if we can find a mode to refer to this field. We should be able to,
4014 but fail if we can't. */
4015 nmode = get_best_mode (lbitsize, lbitpos, bitstart, bitend,
4016 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
4017 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
4018 TYPE_ALIGN (TREE_TYPE (rinner))),
4019 word_mode, false);
4020 if (nmode == VOIDmode)
4021 return 0;
4022
4023 /* Set signed and unsigned types of the precision of this mode for the
4024 shifts below. */
4025 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
4026
4027 /* Compute the bit position and size for the new reference and our offset
4028 within it. If the new reference is the same size as the original, we
4029 won't optimize anything, so return zero. */
4030 nbitsize = GET_MODE_BITSIZE (nmode);
4031 nbitpos = lbitpos & ~ (nbitsize - 1);
4032 lbitpos -= nbitpos;
4033 if (nbitsize == lbitsize)
4034 return 0;
4035
4036 if (lreversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
4037 lbitpos = nbitsize - lbitsize - lbitpos;
4038
4039 /* Make the mask to be used against the extracted field. */
4040 mask = build_int_cst_type (unsigned_type, -1);
4041 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
4042 mask = const_binop (RSHIFT_EXPR, mask,
4043 size_int (nbitsize - lbitsize - lbitpos));
4044
4045 if (! const_p)
4046 /* If not comparing with constant, just rework the comparison
4047 and return. */
4048 return fold_build2_loc (loc, code, compare_type,
4049 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4050 make_bit_field_ref (loc, linner, lhs,
4051 unsigned_type,
4052 nbitsize, nbitpos,
4053 1, lreversep),
4054 mask),
4055 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4056 make_bit_field_ref (loc, rinner, rhs,
4057 unsigned_type,
4058 nbitsize, nbitpos,
4059 1, rreversep),
4060 mask));
4061
4062 /* Otherwise, we are handling the constant case. See if the constant is too
4063 big for the field. Warn and return a tree for 0 (false) if so. We do
4064 this not only for its own sake, but to avoid having to test for this
4065 error case below. If we didn't, we might generate wrong code.
4066
4067 For unsigned fields, the constant shifted right by the field length should
4068 be all zero. For signed fields, the high-order bits should agree with
4069 the sign bit. */
4070
4071 if (lunsignedp)
4072 {
4073 if (wi::lrshift (rhs, lbitsize) != 0)
4074 {
4075 warning (0, "comparison is always %d due to width of bit-field",
4076 code == NE_EXPR);
4077 return constant_boolean_node (code == NE_EXPR, compare_type);
4078 }
4079 }
4080 else
4081 {
4082 wide_int tem = wi::arshift (rhs, lbitsize - 1);
4083 if (tem != 0 && tem != -1)
4084 {
4085 warning (0, "comparison is always %d due to width of bit-field",
4086 code == NE_EXPR);
4087 return constant_boolean_node (code == NE_EXPR, compare_type);
4088 }
4089 }
4090
4091 /* Single-bit compares should always be against zero. */
4092 if (lbitsize == 1 && ! integer_zerop (rhs))
4093 {
4094 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4095 rhs = build_int_cst (type, 0);
4096 }
4097
4098 /* Make a new bitfield reference, shift the constant over the
4099 appropriate number of bits and mask it with the computed mask
4100 (in case this was a signed field). If we changed it, make a new one. */
4101 lhs = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4102 nbitsize, nbitpos, 1, lreversep);
4103
4104 rhs = const_binop (BIT_AND_EXPR,
4105 const_binop (LSHIFT_EXPR,
4106 fold_convert_loc (loc, unsigned_type, rhs),
4107 size_int (lbitpos)),
4108 mask);
4109
4110 lhs = build2_loc (loc, code, compare_type,
4111 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
4112 return lhs;
4113 }
4114 \f
4115 /* Subroutine for fold_truth_andor_1: decode a field reference.
4116
4117 If EXP is a comparison reference, we return the innermost reference.
4118
4119 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4120 set to the starting bit number.
4121
4122 If the innermost field can be completely contained in a mode-sized
4123 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4124
4125 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4126 otherwise it is not changed.
4127
4128 *PUNSIGNEDP is set to the signedness of the field.
4129
4130 *PREVERSEP is set to the storage order of the field.
4131
4132 *PMASK is set to the mask used. This is either contained in a
4133 BIT_AND_EXPR or derived from the width of the field.
4134
4135 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4136
4137 Return 0 if this is not a component reference or is one that we can't
4138 do anything with. */
4139
4140 static tree
4141 decode_field_reference (location_t loc, tree *exp_, HOST_WIDE_INT *pbitsize,
4142 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
4143 int *punsignedp, int *preversep, int *pvolatilep,
4144 tree *pmask, tree *pand_mask)
4145 {
4146 tree exp = *exp_;
4147 tree outer_type = 0;
4148 tree and_mask = 0;
4149 tree mask, inner, offset;
4150 tree unsigned_type;
4151 unsigned int precision;
4152
4153 /* All the optimizations using this function assume integer fields.
4154 There are problems with FP fields since the type_for_size call
4155 below can fail for, e.g., XFmode. */
4156 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4157 return 0;
4158
4159 /* We are interested in the bare arrangement of bits, so strip everything
4160 that doesn't affect the machine mode. However, record the type of the
4161 outermost expression if it may matter below. */
4162 if (CONVERT_EXPR_P (exp)
4163 || TREE_CODE (exp) == NON_LVALUE_EXPR)
4164 outer_type = TREE_TYPE (exp);
4165 STRIP_NOPS (exp);
4166
4167 if (TREE_CODE (exp) == BIT_AND_EXPR)
4168 {
4169 and_mask = TREE_OPERAND (exp, 1);
4170 exp = TREE_OPERAND (exp, 0);
4171 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4172 if (TREE_CODE (and_mask) != INTEGER_CST)
4173 return 0;
4174 }
4175
4176 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
4177 punsignedp, preversep, pvolatilep);
4178 if ((inner == exp && and_mask == 0)
4179 || *pbitsize < 0 || offset != 0
4180 || TREE_CODE (inner) == PLACEHOLDER_EXPR
4181 /* Reject out-of-bound accesses (PR79731). */
4182 || (! AGGREGATE_TYPE_P (TREE_TYPE (inner))
4183 && compare_tree_int (TYPE_SIZE (TREE_TYPE (inner)),
4184 *pbitpos + *pbitsize) < 0))
4185 return 0;
4186
4187 *exp_ = exp;
4188
4189 /* If the number of bits in the reference is the same as the bitsize of
4190 the outer type, then the outer type gives the signedness. Otherwise
4191 (in case of a small bitfield) the signedness is unchanged. */
4192 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4193 *punsignedp = TYPE_UNSIGNED (outer_type);
4194
4195 /* Compute the mask to access the bitfield. */
4196 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4197 precision = TYPE_PRECISION (unsigned_type);
4198
4199 mask = build_int_cst_type (unsigned_type, -1);
4200
4201 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4202 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4203
4204 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4205 if (and_mask != 0)
4206 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4207 fold_convert_loc (loc, unsigned_type, and_mask), mask);
4208
4209 *pmask = mask;
4210 *pand_mask = and_mask;
4211 return inner;
4212 }
4213
4214 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4215 bit positions and MASK is SIGNED. */
4216
4217 static int
4218 all_ones_mask_p (const_tree mask, unsigned int size)
4219 {
4220 tree type = TREE_TYPE (mask);
4221 unsigned int precision = TYPE_PRECISION (type);
4222
4223 /* If this function returns true when the type of the mask is
4224 UNSIGNED, then there will be errors. In particular see
4225 gcc.c-torture/execute/990326-1.c. There does not appear to be
4226 any documentation paper trail as to why this is so. But the pre
4227 wide-int worked with that restriction and it has been preserved
4228 here. */
4229 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
4230 return false;
4231
4232 return wi::mask (size, false, precision) == mask;
4233 }
4234
4235 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4236 represents the sign bit of EXP's type. If EXP represents a sign
4237 or zero extension, also test VAL against the unextended type.
4238 The return value is the (sub)expression whose sign bit is VAL,
4239 or NULL_TREE otherwise. */
4240
4241 tree
4242 sign_bit_p (tree exp, const_tree val)
4243 {
4244 int width;
4245 tree t;
4246
4247 /* Tree EXP must have an integral type. */
4248 t = TREE_TYPE (exp);
4249 if (! INTEGRAL_TYPE_P (t))
4250 return NULL_TREE;
4251
4252 /* Tree VAL must be an integer constant. */
4253 if (TREE_CODE (val) != INTEGER_CST
4254 || TREE_OVERFLOW (val))
4255 return NULL_TREE;
4256
4257 width = TYPE_PRECISION (t);
4258 if (wi::only_sign_bit_p (val, width))
4259 return exp;
4260
4261 /* Handle extension from a narrower type. */
4262 if (TREE_CODE (exp) == NOP_EXPR
4263 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4264 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4265
4266 return NULL_TREE;
4267 }
4268
4269 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
4270 to be evaluated unconditionally. */
4271
4272 static int
4273 simple_operand_p (const_tree exp)
4274 {
4275 /* Strip any conversions that don't change the machine mode. */
4276 STRIP_NOPS (exp);
4277
4278 return (CONSTANT_CLASS_P (exp)
4279 || TREE_CODE (exp) == SSA_NAME
4280 || (DECL_P (exp)
4281 && ! TREE_ADDRESSABLE (exp)
4282 && ! TREE_THIS_VOLATILE (exp)
4283 && ! DECL_NONLOCAL (exp)
4284 /* Don't regard global variables as simple. They may be
4285 allocated in ways unknown to the compiler (shared memory,
4286 #pragma weak, etc). */
4287 && ! TREE_PUBLIC (exp)
4288 && ! DECL_EXTERNAL (exp)
4289 /* Weakrefs are not safe to be read, since they can be NULL.
4290 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4291 have DECL_WEAK flag set. */
4292 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4293 /* Loading a static variable is unduly expensive, but global
4294 registers aren't expensive. */
4295 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4296 }
4297
4298 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4299 to be evaluated unconditionally.
4300 I addition to simple_operand_p, we assume that comparisons, conversions,
4301 and logic-not operations are simple, if their operands are simple, too. */
4302
4303 static bool
4304 simple_operand_p_2 (tree exp)
4305 {
4306 enum tree_code code;
4307
4308 if (TREE_SIDE_EFFECTS (exp)
4309 || tree_could_trap_p (exp))
4310 return false;
4311
4312 while (CONVERT_EXPR_P (exp))
4313 exp = TREE_OPERAND (exp, 0);
4314
4315 code = TREE_CODE (exp);
4316
4317 if (TREE_CODE_CLASS (code) == tcc_comparison)
4318 return (simple_operand_p (TREE_OPERAND (exp, 0))
4319 && simple_operand_p (TREE_OPERAND (exp, 1)));
4320
4321 if (code == TRUTH_NOT_EXPR)
4322 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4323
4324 return simple_operand_p (exp);
4325 }
4326
4327 \f
4328 /* The following functions are subroutines to fold_range_test and allow it to
4329 try to change a logical combination of comparisons into a range test.
4330
4331 For example, both
4332 X == 2 || X == 3 || X == 4 || X == 5
4333 and
4334 X >= 2 && X <= 5
4335 are converted to
4336 (unsigned) (X - 2) <= 3
4337
4338 We describe each set of comparisons as being either inside or outside
4339 a range, using a variable named like IN_P, and then describe the
4340 range with a lower and upper bound. If one of the bounds is omitted,
4341 it represents either the highest or lowest value of the type.
4342
4343 In the comments below, we represent a range by two numbers in brackets
4344 preceded by a "+" to designate being inside that range, or a "-" to
4345 designate being outside that range, so the condition can be inverted by
4346 flipping the prefix. An omitted bound is represented by a "-". For
4347 example, "- [-, 10]" means being outside the range starting at the lowest
4348 possible value and ending at 10, in other words, being greater than 10.
4349 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4350 always false.
4351
4352 We set up things so that the missing bounds are handled in a consistent
4353 manner so neither a missing bound nor "true" and "false" need to be
4354 handled using a special case. */
4355
4356 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4357 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4358 and UPPER1_P are nonzero if the respective argument is an upper bound
4359 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4360 must be specified for a comparison. ARG1 will be converted to ARG0's
4361 type if both are specified. */
4362
4363 static tree
4364 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4365 tree arg1, int upper1_p)
4366 {
4367 tree tem;
4368 int result;
4369 int sgn0, sgn1;
4370
4371 /* If neither arg represents infinity, do the normal operation.
4372 Else, if not a comparison, return infinity. Else handle the special
4373 comparison rules. Note that most of the cases below won't occur, but
4374 are handled for consistency. */
4375
4376 if (arg0 != 0 && arg1 != 0)
4377 {
4378 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4379 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4380 STRIP_NOPS (tem);
4381 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4382 }
4383
4384 if (TREE_CODE_CLASS (code) != tcc_comparison)
4385 return 0;
4386
4387 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4388 for neither. In real maths, we cannot assume open ended ranges are
4389 the same. But, this is computer arithmetic, where numbers are finite.
4390 We can therefore make the transformation of any unbounded range with
4391 the value Z, Z being greater than any representable number. This permits
4392 us to treat unbounded ranges as equal. */
4393 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4394 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4395 switch (code)
4396 {
4397 case EQ_EXPR:
4398 result = sgn0 == sgn1;
4399 break;
4400 case NE_EXPR:
4401 result = sgn0 != sgn1;
4402 break;
4403 case LT_EXPR:
4404 result = sgn0 < sgn1;
4405 break;
4406 case LE_EXPR:
4407 result = sgn0 <= sgn1;
4408 break;
4409 case GT_EXPR:
4410 result = sgn0 > sgn1;
4411 break;
4412 case GE_EXPR:
4413 result = sgn0 >= sgn1;
4414 break;
4415 default:
4416 gcc_unreachable ();
4417 }
4418
4419 return constant_boolean_node (result, type);
4420 }
4421 \f
4422 /* Helper routine for make_range. Perform one step for it, return
4423 new expression if the loop should continue or NULL_TREE if it should
4424 stop. */
4425
4426 tree
4427 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4428 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4429 bool *strict_overflow_p)
4430 {
4431 tree arg0_type = TREE_TYPE (arg0);
4432 tree n_low, n_high, low = *p_low, high = *p_high;
4433 int in_p = *p_in_p, n_in_p;
4434
4435 switch (code)
4436 {
4437 case TRUTH_NOT_EXPR:
4438 /* We can only do something if the range is testing for zero. */
4439 if (low == NULL_TREE || high == NULL_TREE
4440 || ! integer_zerop (low) || ! integer_zerop (high))
4441 return NULL_TREE;
4442 *p_in_p = ! in_p;
4443 return arg0;
4444
4445 case EQ_EXPR: case NE_EXPR:
4446 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4447 /* We can only do something if the range is testing for zero
4448 and if the second operand is an integer constant. Note that
4449 saying something is "in" the range we make is done by
4450 complementing IN_P since it will set in the initial case of
4451 being not equal to zero; "out" is leaving it alone. */
4452 if (low == NULL_TREE || high == NULL_TREE
4453 || ! integer_zerop (low) || ! integer_zerop (high)
4454 || TREE_CODE (arg1) != INTEGER_CST)
4455 return NULL_TREE;
4456
4457 switch (code)
4458 {
4459 case NE_EXPR: /* - [c, c] */
4460 low = high = arg1;
4461 break;
4462 case EQ_EXPR: /* + [c, c] */
4463 in_p = ! in_p, low = high = arg1;
4464 break;
4465 case GT_EXPR: /* - [-, c] */
4466 low = 0, high = arg1;
4467 break;
4468 case GE_EXPR: /* + [c, -] */
4469 in_p = ! in_p, low = arg1, high = 0;
4470 break;
4471 case LT_EXPR: /* - [c, -] */
4472 low = arg1, high = 0;
4473 break;
4474 case LE_EXPR: /* + [-, c] */
4475 in_p = ! in_p, low = 0, high = arg1;
4476 break;
4477 default:
4478 gcc_unreachable ();
4479 }
4480
4481 /* If this is an unsigned comparison, we also know that EXP is
4482 greater than or equal to zero. We base the range tests we make
4483 on that fact, so we record it here so we can parse existing
4484 range tests. We test arg0_type since often the return type
4485 of, e.g. EQ_EXPR, is boolean. */
4486 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4487 {
4488 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4489 in_p, low, high, 1,
4490 build_int_cst (arg0_type, 0),
4491 NULL_TREE))
4492 return NULL_TREE;
4493
4494 in_p = n_in_p, low = n_low, high = n_high;
4495
4496 /* If the high bound is missing, but we have a nonzero low
4497 bound, reverse the range so it goes from zero to the low bound
4498 minus 1. */
4499 if (high == 0 && low && ! integer_zerop (low))
4500 {
4501 in_p = ! in_p;
4502 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4503 build_int_cst (TREE_TYPE (low), 1), 0);
4504 low = build_int_cst (arg0_type, 0);
4505 }
4506 }
4507
4508 *p_low = low;
4509 *p_high = high;
4510 *p_in_p = in_p;
4511 return arg0;
4512
4513 case NEGATE_EXPR:
4514 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4515 low and high are non-NULL, then normalize will DTRT. */
4516 if (!TYPE_UNSIGNED (arg0_type)
4517 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4518 {
4519 if (low == NULL_TREE)
4520 low = TYPE_MIN_VALUE (arg0_type);
4521 if (high == NULL_TREE)
4522 high = TYPE_MAX_VALUE (arg0_type);
4523 }
4524
4525 /* (-x) IN [a,b] -> x in [-b, -a] */
4526 n_low = range_binop (MINUS_EXPR, exp_type,
4527 build_int_cst (exp_type, 0),
4528 0, high, 1);
4529 n_high = range_binop (MINUS_EXPR, exp_type,
4530 build_int_cst (exp_type, 0),
4531 0, low, 0);
4532 if (n_high != 0 && TREE_OVERFLOW (n_high))
4533 return NULL_TREE;
4534 goto normalize;
4535
4536 case BIT_NOT_EXPR:
4537 /* ~ X -> -X - 1 */
4538 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4539 build_int_cst (exp_type, 1));
4540
4541 case PLUS_EXPR:
4542 case MINUS_EXPR:
4543 if (TREE_CODE (arg1) != INTEGER_CST)
4544 return NULL_TREE;
4545
4546 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4547 move a constant to the other side. */
4548 if (!TYPE_UNSIGNED (arg0_type)
4549 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4550 return NULL_TREE;
4551
4552 /* If EXP is signed, any overflow in the computation is undefined,
4553 so we don't worry about it so long as our computations on
4554 the bounds don't overflow. For unsigned, overflow is defined
4555 and this is exactly the right thing. */
4556 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4557 arg0_type, low, 0, arg1, 0);
4558 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4559 arg0_type, high, 1, arg1, 0);
4560 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4561 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4562 return NULL_TREE;
4563
4564 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4565 *strict_overflow_p = true;
4566
4567 normalize:
4568 /* Check for an unsigned range which has wrapped around the maximum
4569 value thus making n_high < n_low, and normalize it. */
4570 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4571 {
4572 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4573 build_int_cst (TREE_TYPE (n_high), 1), 0);
4574 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4575 build_int_cst (TREE_TYPE (n_low), 1), 0);
4576
4577 /* If the range is of the form +/- [ x+1, x ], we won't
4578 be able to normalize it. But then, it represents the
4579 whole range or the empty set, so make it
4580 +/- [ -, - ]. */
4581 if (tree_int_cst_equal (n_low, low)
4582 && tree_int_cst_equal (n_high, high))
4583 low = high = 0;
4584 else
4585 in_p = ! in_p;
4586 }
4587 else
4588 low = n_low, high = n_high;
4589
4590 *p_low = low;
4591 *p_high = high;
4592 *p_in_p = in_p;
4593 return arg0;
4594
4595 CASE_CONVERT:
4596 case NON_LVALUE_EXPR:
4597 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4598 return NULL_TREE;
4599
4600 if (! INTEGRAL_TYPE_P (arg0_type)
4601 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4602 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4603 return NULL_TREE;
4604
4605 n_low = low, n_high = high;
4606
4607 if (n_low != 0)
4608 n_low = fold_convert_loc (loc, arg0_type, n_low);
4609
4610 if (n_high != 0)
4611 n_high = fold_convert_loc (loc, arg0_type, n_high);
4612
4613 /* If we're converting arg0 from an unsigned type, to exp,
4614 a signed type, we will be doing the comparison as unsigned.
4615 The tests above have already verified that LOW and HIGH
4616 are both positive.
4617
4618 So we have to ensure that we will handle large unsigned
4619 values the same way that the current signed bounds treat
4620 negative values. */
4621
4622 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4623 {
4624 tree high_positive;
4625 tree equiv_type;
4626 /* For fixed-point modes, we need to pass the saturating flag
4627 as the 2nd parameter. */
4628 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4629 equiv_type
4630 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4631 TYPE_SATURATING (arg0_type));
4632 else
4633 equiv_type
4634 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4635
4636 /* A range without an upper bound is, naturally, unbounded.
4637 Since convert would have cropped a very large value, use
4638 the max value for the destination type. */
4639 high_positive
4640 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4641 : TYPE_MAX_VALUE (arg0_type);
4642
4643 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4644 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4645 fold_convert_loc (loc, arg0_type,
4646 high_positive),
4647 build_int_cst (arg0_type, 1));
4648
4649 /* If the low bound is specified, "and" the range with the
4650 range for which the original unsigned value will be
4651 positive. */
4652 if (low != 0)
4653 {
4654 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4655 1, fold_convert_loc (loc, arg0_type,
4656 integer_zero_node),
4657 high_positive))
4658 return NULL_TREE;
4659
4660 in_p = (n_in_p == in_p);
4661 }
4662 else
4663 {
4664 /* Otherwise, "or" the range with the range of the input
4665 that will be interpreted as negative. */
4666 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4667 1, fold_convert_loc (loc, arg0_type,
4668 integer_zero_node),
4669 high_positive))
4670 return NULL_TREE;
4671
4672 in_p = (in_p != n_in_p);
4673 }
4674 }
4675
4676 *p_low = n_low;
4677 *p_high = n_high;
4678 *p_in_p = in_p;
4679 return arg0;
4680
4681 default:
4682 return NULL_TREE;
4683 }
4684 }
4685
4686 /* Given EXP, a logical expression, set the range it is testing into
4687 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4688 actually being tested. *PLOW and *PHIGH will be made of the same
4689 type as the returned expression. If EXP is not a comparison, we
4690 will most likely not be returning a useful value and range. Set
4691 *STRICT_OVERFLOW_P to true if the return value is only valid
4692 because signed overflow is undefined; otherwise, do not change
4693 *STRICT_OVERFLOW_P. */
4694
4695 tree
4696 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4697 bool *strict_overflow_p)
4698 {
4699 enum tree_code code;
4700 tree arg0, arg1 = NULL_TREE;
4701 tree exp_type, nexp;
4702 int in_p;
4703 tree low, high;
4704 location_t loc = EXPR_LOCATION (exp);
4705
4706 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4707 and see if we can refine the range. Some of the cases below may not
4708 happen, but it doesn't seem worth worrying about this. We "continue"
4709 the outer loop when we've changed something; otherwise we "break"
4710 the switch, which will "break" the while. */
4711
4712 in_p = 0;
4713 low = high = build_int_cst (TREE_TYPE (exp), 0);
4714
4715 while (1)
4716 {
4717 code = TREE_CODE (exp);
4718 exp_type = TREE_TYPE (exp);
4719 arg0 = NULL_TREE;
4720
4721 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4722 {
4723 if (TREE_OPERAND_LENGTH (exp) > 0)
4724 arg0 = TREE_OPERAND (exp, 0);
4725 if (TREE_CODE_CLASS (code) == tcc_binary
4726 || TREE_CODE_CLASS (code) == tcc_comparison
4727 || (TREE_CODE_CLASS (code) == tcc_expression
4728 && TREE_OPERAND_LENGTH (exp) > 1))
4729 arg1 = TREE_OPERAND (exp, 1);
4730 }
4731 if (arg0 == NULL_TREE)
4732 break;
4733
4734 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4735 &high, &in_p, strict_overflow_p);
4736 if (nexp == NULL_TREE)
4737 break;
4738 exp = nexp;
4739 }
4740
4741 /* If EXP is a constant, we can evaluate whether this is true or false. */
4742 if (TREE_CODE (exp) == INTEGER_CST)
4743 {
4744 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4745 exp, 0, low, 0))
4746 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4747 exp, 1, high, 1)));
4748 low = high = 0;
4749 exp = 0;
4750 }
4751
4752 *pin_p = in_p, *plow = low, *phigh = high;
4753 return exp;
4754 }
4755
4756 /* Returns TRUE if [LOW, HIGH] range check can be optimized to
4757 a bitwise check i.e. when
4758 LOW == 0xXX...X00...0
4759 HIGH == 0xXX...X11...1
4760 Return corresponding mask in MASK and stem in VALUE. */
4761
4762 static bool
4763 maskable_range_p (const_tree low, const_tree high, tree type, tree *mask,
4764 tree *value)
4765 {
4766 if (TREE_CODE (low) != INTEGER_CST
4767 || TREE_CODE (high) != INTEGER_CST)
4768 return false;
4769
4770 unsigned prec = TYPE_PRECISION (type);
4771 wide_int lo = wi::to_wide (low, prec);
4772 wide_int hi = wi::to_wide (high, prec);
4773
4774 wide_int end_mask = lo ^ hi;
4775 if ((end_mask & (end_mask + 1)) != 0
4776 || (lo & end_mask) != 0)
4777 return false;
4778
4779 wide_int stem_mask = ~end_mask;
4780 wide_int stem = lo & stem_mask;
4781 if (stem != (hi & stem_mask))
4782 return false;
4783
4784 *mask = wide_int_to_tree (type, stem_mask);
4785 *value = wide_int_to_tree (type, stem);
4786
4787 return true;
4788 }
4789 \f
4790 /* Helper routine for build_range_check and match.pd. Return the type to
4791 perform the check or NULL if it shouldn't be optimized. */
4792
4793 tree
4794 range_check_type (tree etype)
4795 {
4796 /* First make sure that arithmetics in this type is valid, then make sure
4797 that it wraps around. */
4798 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4799 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4800 TYPE_UNSIGNED (etype));
4801
4802 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4803 {
4804 tree utype, minv, maxv;
4805
4806 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4807 for the type in question, as we rely on this here. */
4808 utype = unsigned_type_for (etype);
4809 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4810 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4811 build_int_cst (TREE_TYPE (maxv), 1), 1);
4812 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4813
4814 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4815 minv, 1, maxv, 1)))
4816 etype = utype;
4817 else
4818 return NULL_TREE;
4819 }
4820 return etype;
4821 }
4822
4823 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4824 type, TYPE, return an expression to test if EXP is in (or out of, depending
4825 on IN_P) the range. Return 0 if the test couldn't be created. */
4826
4827 tree
4828 build_range_check (location_t loc, tree type, tree exp, int in_p,
4829 tree low, tree high)
4830 {
4831 tree etype = TREE_TYPE (exp), mask, value;
4832
4833 /* Disable this optimization for function pointer expressions
4834 on targets that require function pointer canonicalization. */
4835 if (targetm.have_canonicalize_funcptr_for_compare ()
4836 && TREE_CODE (etype) == POINTER_TYPE
4837 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4838 return NULL_TREE;
4839
4840 if (! in_p)
4841 {
4842 value = build_range_check (loc, type, exp, 1, low, high);
4843 if (value != 0)
4844 return invert_truthvalue_loc (loc, value);
4845
4846 return 0;
4847 }
4848
4849 if (low == 0 && high == 0)
4850 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4851
4852 if (low == 0)
4853 return fold_build2_loc (loc, LE_EXPR, type, exp,
4854 fold_convert_loc (loc, etype, high));
4855
4856 if (high == 0)
4857 return fold_build2_loc (loc, GE_EXPR, type, exp,
4858 fold_convert_loc (loc, etype, low));
4859
4860 if (operand_equal_p (low, high, 0))
4861 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4862 fold_convert_loc (loc, etype, low));
4863
4864 if (TREE_CODE (exp) == BIT_AND_EXPR
4865 && maskable_range_p (low, high, etype, &mask, &value))
4866 return fold_build2_loc (loc, EQ_EXPR, type,
4867 fold_build2_loc (loc, BIT_AND_EXPR, etype,
4868 exp, mask),
4869 value);
4870
4871 if (integer_zerop (low))
4872 {
4873 if (! TYPE_UNSIGNED (etype))
4874 {
4875 etype = unsigned_type_for (etype);
4876 high = fold_convert_loc (loc, etype, high);
4877 exp = fold_convert_loc (loc, etype, exp);
4878 }
4879 return build_range_check (loc, type, exp, 1, 0, high);
4880 }
4881
4882 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4883 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4884 {
4885 int prec = TYPE_PRECISION (etype);
4886
4887 if (wi::mask (prec - 1, false, prec) == high)
4888 {
4889 if (TYPE_UNSIGNED (etype))
4890 {
4891 tree signed_etype = signed_type_for (etype);
4892 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4893 etype
4894 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4895 else
4896 etype = signed_etype;
4897 exp = fold_convert_loc (loc, etype, exp);
4898 }
4899 return fold_build2_loc (loc, GT_EXPR, type, exp,
4900 build_int_cst (etype, 0));
4901 }
4902 }
4903
4904 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4905 This requires wrap-around arithmetics for the type of the expression. */
4906 etype = range_check_type (etype);
4907 if (etype == NULL_TREE)
4908 return NULL_TREE;
4909
4910 if (POINTER_TYPE_P (etype))
4911 etype = unsigned_type_for (etype);
4912
4913 high = fold_convert_loc (loc, etype, high);
4914 low = fold_convert_loc (loc, etype, low);
4915 exp = fold_convert_loc (loc, etype, exp);
4916
4917 value = const_binop (MINUS_EXPR, high, low);
4918
4919 if (value != 0 && !TREE_OVERFLOW (value))
4920 return build_range_check (loc, type,
4921 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4922 1, build_int_cst (etype, 0), value);
4923
4924 return 0;
4925 }
4926 \f
4927 /* Return the predecessor of VAL in its type, handling the infinite case. */
4928
4929 static tree
4930 range_predecessor (tree val)
4931 {
4932 tree type = TREE_TYPE (val);
4933
4934 if (INTEGRAL_TYPE_P (type)
4935 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4936 return 0;
4937 else
4938 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4939 build_int_cst (TREE_TYPE (val), 1), 0);
4940 }
4941
4942 /* Return the successor of VAL in its type, handling the infinite case. */
4943
4944 static tree
4945 range_successor (tree val)
4946 {
4947 tree type = TREE_TYPE (val);
4948
4949 if (INTEGRAL_TYPE_P (type)
4950 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4951 return 0;
4952 else
4953 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4954 build_int_cst (TREE_TYPE (val), 1), 0);
4955 }
4956
4957 /* Given two ranges, see if we can merge them into one. Return 1 if we
4958 can, 0 if we can't. Set the output range into the specified parameters. */
4959
4960 bool
4961 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4962 tree high0, int in1_p, tree low1, tree high1)
4963 {
4964 int no_overlap;
4965 int subset;
4966 int temp;
4967 tree tem;
4968 int in_p;
4969 tree low, high;
4970 int lowequal = ((low0 == 0 && low1 == 0)
4971 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4972 low0, 0, low1, 0)));
4973 int highequal = ((high0 == 0 && high1 == 0)
4974 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4975 high0, 1, high1, 1)));
4976
4977 /* Make range 0 be the range that starts first, or ends last if they
4978 start at the same value. Swap them if it isn't. */
4979 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4980 low0, 0, low1, 0))
4981 || (lowequal
4982 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4983 high1, 1, high0, 1))))
4984 {
4985 temp = in0_p, in0_p = in1_p, in1_p = temp;
4986 tem = low0, low0 = low1, low1 = tem;
4987 tem = high0, high0 = high1, high1 = tem;
4988 }
4989
4990 /* Now flag two cases, whether the ranges are disjoint or whether the
4991 second range is totally subsumed in the first. Note that the tests
4992 below are simplified by the ones above. */
4993 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4994 high0, 1, low1, 0));
4995 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4996 high1, 1, high0, 1));
4997
4998 /* We now have four cases, depending on whether we are including or
4999 excluding the two ranges. */
5000 if (in0_p && in1_p)
5001 {
5002 /* If they don't overlap, the result is false. If the second range
5003 is a subset it is the result. Otherwise, the range is from the start
5004 of the second to the end of the first. */
5005 if (no_overlap)
5006 in_p = 0, low = high = 0;
5007 else if (subset)
5008 in_p = 1, low = low1, high = high1;
5009 else
5010 in_p = 1, low = low1, high = high0;
5011 }
5012
5013 else if (in0_p && ! in1_p)
5014 {
5015 /* If they don't overlap, the result is the first range. If they are
5016 equal, the result is false. If the second range is a subset of the
5017 first, and the ranges begin at the same place, we go from just after
5018 the end of the second range to the end of the first. If the second
5019 range is not a subset of the first, or if it is a subset and both
5020 ranges end at the same place, the range starts at the start of the
5021 first range and ends just before the second range.
5022 Otherwise, we can't describe this as a single range. */
5023 if (no_overlap)
5024 in_p = 1, low = low0, high = high0;
5025 else if (lowequal && highequal)
5026 in_p = 0, low = high = 0;
5027 else if (subset && lowequal)
5028 {
5029 low = range_successor (high1);
5030 high = high0;
5031 in_p = 1;
5032 if (low == 0)
5033 {
5034 /* We are in the weird situation where high0 > high1 but
5035 high1 has no successor. Punt. */
5036 return 0;
5037 }
5038 }
5039 else if (! subset || highequal)
5040 {
5041 low = low0;
5042 high = range_predecessor (low1);
5043 in_p = 1;
5044 if (high == 0)
5045 {
5046 /* low0 < low1 but low1 has no predecessor. Punt. */
5047 return 0;
5048 }
5049 }
5050 else
5051 return 0;
5052 }
5053
5054 else if (! in0_p && in1_p)
5055 {
5056 /* If they don't overlap, the result is the second range. If the second
5057 is a subset of the first, the result is false. Otherwise,
5058 the range starts just after the first range and ends at the
5059 end of the second. */
5060 if (no_overlap)
5061 in_p = 1, low = low1, high = high1;
5062 else if (subset || highequal)
5063 in_p = 0, low = high = 0;
5064 else
5065 {
5066 low = range_successor (high0);
5067 high = high1;
5068 in_p = 1;
5069 if (low == 0)
5070 {
5071 /* high1 > high0 but high0 has no successor. Punt. */
5072 return 0;
5073 }
5074 }
5075 }
5076
5077 else
5078 {
5079 /* The case where we are excluding both ranges. Here the complex case
5080 is if they don't overlap. In that case, the only time we have a
5081 range is if they are adjacent. If the second is a subset of the
5082 first, the result is the first. Otherwise, the range to exclude
5083 starts at the beginning of the first range and ends at the end of the
5084 second. */
5085 if (no_overlap)
5086 {
5087 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
5088 range_successor (high0),
5089 1, low1, 0)))
5090 in_p = 0, low = low0, high = high1;
5091 else
5092 {
5093 /* Canonicalize - [min, x] into - [-, x]. */
5094 if (low0 && TREE_CODE (low0) == INTEGER_CST)
5095 switch (TREE_CODE (TREE_TYPE (low0)))
5096 {
5097 case ENUMERAL_TYPE:
5098 if (TYPE_PRECISION (TREE_TYPE (low0))
5099 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
5100 break;
5101 /* FALLTHROUGH */
5102 case INTEGER_TYPE:
5103 if (tree_int_cst_equal (low0,
5104 TYPE_MIN_VALUE (TREE_TYPE (low0))))
5105 low0 = 0;
5106 break;
5107 case POINTER_TYPE:
5108 if (TYPE_UNSIGNED (TREE_TYPE (low0))
5109 && integer_zerop (low0))
5110 low0 = 0;
5111 break;
5112 default:
5113 break;
5114 }
5115
5116 /* Canonicalize - [x, max] into - [x, -]. */
5117 if (high1 && TREE_CODE (high1) == INTEGER_CST)
5118 switch (TREE_CODE (TREE_TYPE (high1)))
5119 {
5120 case ENUMERAL_TYPE:
5121 if (TYPE_PRECISION (TREE_TYPE (high1))
5122 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
5123 break;
5124 /* FALLTHROUGH */
5125 case INTEGER_TYPE:
5126 if (tree_int_cst_equal (high1,
5127 TYPE_MAX_VALUE (TREE_TYPE (high1))))
5128 high1 = 0;
5129 break;
5130 case POINTER_TYPE:
5131 if (TYPE_UNSIGNED (TREE_TYPE (high1))
5132 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
5133 high1, 1,
5134 build_int_cst (TREE_TYPE (high1), 1),
5135 1)))
5136 high1 = 0;
5137 break;
5138 default:
5139 break;
5140 }
5141
5142 /* The ranges might be also adjacent between the maximum and
5143 minimum values of the given type. For
5144 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5145 return + [x + 1, y - 1]. */
5146 if (low0 == 0 && high1 == 0)
5147 {
5148 low = range_successor (high0);
5149 high = range_predecessor (low1);
5150 if (low == 0 || high == 0)
5151 return 0;
5152
5153 in_p = 1;
5154 }
5155 else
5156 return 0;
5157 }
5158 }
5159 else if (subset)
5160 in_p = 0, low = low0, high = high0;
5161 else
5162 in_p = 0, low = low0, high = high1;
5163 }
5164
5165 *pin_p = in_p, *plow = low, *phigh = high;
5166 return 1;
5167 }
5168 \f
5169
5170 /* Subroutine of fold, looking inside expressions of the form
5171 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
5172 of the COND_EXPR. This function is being used also to optimize
5173 A op B ? C : A, by reversing the comparison first.
5174
5175 Return a folded expression whose code is not a COND_EXPR
5176 anymore, or NULL_TREE if no folding opportunity is found. */
5177
5178 static tree
5179 fold_cond_expr_with_comparison (location_t loc, tree type,
5180 tree arg0, tree arg1, tree arg2)
5181 {
5182 enum tree_code comp_code = TREE_CODE (arg0);
5183 tree arg00 = TREE_OPERAND (arg0, 0);
5184 tree arg01 = TREE_OPERAND (arg0, 1);
5185 tree arg1_type = TREE_TYPE (arg1);
5186 tree tem;
5187
5188 STRIP_NOPS (arg1);
5189 STRIP_NOPS (arg2);
5190
5191 /* If we have A op 0 ? A : -A, consider applying the following
5192 transformations:
5193
5194 A == 0? A : -A same as -A
5195 A != 0? A : -A same as A
5196 A >= 0? A : -A same as abs (A)
5197 A > 0? A : -A same as abs (A)
5198 A <= 0? A : -A same as -abs (A)
5199 A < 0? A : -A same as -abs (A)
5200
5201 None of these transformations work for modes with signed
5202 zeros. If A is +/-0, the first two transformations will
5203 change the sign of the result (from +0 to -0, or vice
5204 versa). The last four will fix the sign of the result,
5205 even though the original expressions could be positive or
5206 negative, depending on the sign of A.
5207
5208 Note that all these transformations are correct if A is
5209 NaN, since the two alternatives (A and -A) are also NaNs. */
5210 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5211 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5212 ? real_zerop (arg01)
5213 : integer_zerop (arg01))
5214 && ((TREE_CODE (arg2) == NEGATE_EXPR
5215 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5216 /* In the case that A is of the form X-Y, '-A' (arg2) may
5217 have already been folded to Y-X, check for that. */
5218 || (TREE_CODE (arg1) == MINUS_EXPR
5219 && TREE_CODE (arg2) == MINUS_EXPR
5220 && operand_equal_p (TREE_OPERAND (arg1, 0),
5221 TREE_OPERAND (arg2, 1), 0)
5222 && operand_equal_p (TREE_OPERAND (arg1, 1),
5223 TREE_OPERAND (arg2, 0), 0))))
5224 switch (comp_code)
5225 {
5226 case EQ_EXPR:
5227 case UNEQ_EXPR:
5228 tem = fold_convert_loc (loc, arg1_type, arg1);
5229 return fold_convert_loc (loc, type, negate_expr (tem));
5230 case NE_EXPR:
5231 case LTGT_EXPR:
5232 return fold_convert_loc (loc, type, arg1);
5233 case UNGE_EXPR:
5234 case UNGT_EXPR:
5235 if (flag_trapping_math)
5236 break;
5237 /* Fall through. */
5238 case GE_EXPR:
5239 case GT_EXPR:
5240 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5241 break;
5242 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5243 return fold_convert_loc (loc, type, tem);
5244 case UNLE_EXPR:
5245 case UNLT_EXPR:
5246 if (flag_trapping_math)
5247 break;
5248 /* FALLTHRU */
5249 case LE_EXPR:
5250 case LT_EXPR:
5251 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5252 break;
5253 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5254 return negate_expr (fold_convert_loc (loc, type, tem));
5255 default:
5256 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5257 break;
5258 }
5259
5260 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5261 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5262 both transformations are correct when A is NaN: A != 0
5263 is then true, and A == 0 is false. */
5264
5265 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5266 && integer_zerop (arg01) && integer_zerop (arg2))
5267 {
5268 if (comp_code == NE_EXPR)
5269 return fold_convert_loc (loc, type, arg1);
5270 else if (comp_code == EQ_EXPR)
5271 return build_zero_cst (type);
5272 }
5273
5274 /* Try some transformations of A op B ? A : B.
5275
5276 A == B? A : B same as B
5277 A != B? A : B same as A
5278 A >= B? A : B same as max (A, B)
5279 A > B? A : B same as max (B, A)
5280 A <= B? A : B same as min (A, B)
5281 A < B? A : B same as min (B, A)
5282
5283 As above, these transformations don't work in the presence
5284 of signed zeros. For example, if A and B are zeros of
5285 opposite sign, the first two transformations will change
5286 the sign of the result. In the last four, the original
5287 expressions give different results for (A=+0, B=-0) and
5288 (A=-0, B=+0), but the transformed expressions do not.
5289
5290 The first two transformations are correct if either A or B
5291 is a NaN. In the first transformation, the condition will
5292 be false, and B will indeed be chosen. In the case of the
5293 second transformation, the condition A != B will be true,
5294 and A will be chosen.
5295
5296 The conversions to max() and min() are not correct if B is
5297 a number and A is not. The conditions in the original
5298 expressions will be false, so all four give B. The min()
5299 and max() versions would give a NaN instead. */
5300 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5301 && operand_equal_for_comparison_p (arg01, arg2, arg00)
5302 /* Avoid these transformations if the COND_EXPR may be used
5303 as an lvalue in the C++ front-end. PR c++/19199. */
5304 && (in_gimple_form
5305 || VECTOR_TYPE_P (type)
5306 || (! lang_GNU_CXX ()
5307 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5308 || ! maybe_lvalue_p (arg1)
5309 || ! maybe_lvalue_p (arg2)))
5310 {
5311 tree comp_op0 = arg00;
5312 tree comp_op1 = arg01;
5313 tree comp_type = TREE_TYPE (comp_op0);
5314
5315 switch (comp_code)
5316 {
5317 case EQ_EXPR:
5318 return fold_convert_loc (loc, type, arg2);
5319 case NE_EXPR:
5320 return fold_convert_loc (loc, type, arg1);
5321 case LE_EXPR:
5322 case LT_EXPR:
5323 case UNLE_EXPR:
5324 case UNLT_EXPR:
5325 /* In C++ a ?: expression can be an lvalue, so put the
5326 operand which will be used if they are equal first
5327 so that we can convert this back to the
5328 corresponding COND_EXPR. */
5329 if (!HONOR_NANS (arg1))
5330 {
5331 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5332 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5333 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5334 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5335 : fold_build2_loc (loc, MIN_EXPR, comp_type,
5336 comp_op1, comp_op0);
5337 return fold_convert_loc (loc, type, tem);
5338 }
5339 break;
5340 case GE_EXPR:
5341 case GT_EXPR:
5342 case UNGE_EXPR:
5343 case UNGT_EXPR:
5344 if (!HONOR_NANS (arg1))
5345 {
5346 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5347 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5348 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5349 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5350 : fold_build2_loc (loc, MAX_EXPR, comp_type,
5351 comp_op1, comp_op0);
5352 return fold_convert_loc (loc, type, tem);
5353 }
5354 break;
5355 case UNEQ_EXPR:
5356 if (!HONOR_NANS (arg1))
5357 return fold_convert_loc (loc, type, arg2);
5358 break;
5359 case LTGT_EXPR:
5360 if (!HONOR_NANS (arg1))
5361 return fold_convert_loc (loc, type, arg1);
5362 break;
5363 default:
5364 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5365 break;
5366 }
5367 }
5368
5369 return NULL_TREE;
5370 }
5371
5372
5373 \f
5374 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5375 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5376 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5377 false) >= 2)
5378 #endif
5379
5380 /* EXP is some logical combination of boolean tests. See if we can
5381 merge it into some range test. Return the new tree if so. */
5382
5383 static tree
5384 fold_range_test (location_t loc, enum tree_code code, tree type,
5385 tree op0, tree op1)
5386 {
5387 int or_op = (code == TRUTH_ORIF_EXPR
5388 || code == TRUTH_OR_EXPR);
5389 int in0_p, in1_p, in_p;
5390 tree low0, low1, low, high0, high1, high;
5391 bool strict_overflow_p = false;
5392 tree tem, lhs, rhs;
5393 const char * const warnmsg = G_("assuming signed overflow does not occur "
5394 "when simplifying range test");
5395
5396 if (!INTEGRAL_TYPE_P (type))
5397 return 0;
5398
5399 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5400 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5401
5402 /* If this is an OR operation, invert both sides; we will invert
5403 again at the end. */
5404 if (or_op)
5405 in0_p = ! in0_p, in1_p = ! in1_p;
5406
5407 /* If both expressions are the same, if we can merge the ranges, and we
5408 can build the range test, return it or it inverted. If one of the
5409 ranges is always true or always false, consider it to be the same
5410 expression as the other. */
5411 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5412 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5413 in1_p, low1, high1)
5414 && 0 != (tem = (build_range_check (loc, type,
5415 lhs != 0 ? lhs
5416 : rhs != 0 ? rhs : integer_zero_node,
5417 in_p, low, high))))
5418 {
5419 if (strict_overflow_p)
5420 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5421 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5422 }
5423
5424 /* On machines where the branch cost is expensive, if this is a
5425 short-circuited branch and the underlying object on both sides
5426 is the same, make a non-short-circuit operation. */
5427 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5428 && lhs != 0 && rhs != 0
5429 && (code == TRUTH_ANDIF_EXPR
5430 || code == TRUTH_ORIF_EXPR)
5431 && operand_equal_p (lhs, rhs, 0))
5432 {
5433 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5434 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5435 which cases we can't do this. */
5436 if (simple_operand_p (lhs))
5437 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5438 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5439 type, op0, op1);
5440
5441 else if (!lang_hooks.decls.global_bindings_p ()
5442 && !CONTAINS_PLACEHOLDER_P (lhs))
5443 {
5444 tree common = save_expr (lhs);
5445
5446 if (0 != (lhs = build_range_check (loc, type, common,
5447 or_op ? ! in0_p : in0_p,
5448 low0, high0))
5449 && (0 != (rhs = build_range_check (loc, type, common,
5450 or_op ? ! in1_p : in1_p,
5451 low1, high1))))
5452 {
5453 if (strict_overflow_p)
5454 fold_overflow_warning (warnmsg,
5455 WARN_STRICT_OVERFLOW_COMPARISON);
5456 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5457 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5458 type, lhs, rhs);
5459 }
5460 }
5461 }
5462
5463 return 0;
5464 }
5465 \f
5466 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5467 bit value. Arrange things so the extra bits will be set to zero if and
5468 only if C is signed-extended to its full width. If MASK is nonzero,
5469 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5470
5471 static tree
5472 unextend (tree c, int p, int unsignedp, tree mask)
5473 {
5474 tree type = TREE_TYPE (c);
5475 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5476 tree temp;
5477
5478 if (p == modesize || unsignedp)
5479 return c;
5480
5481 /* We work by getting just the sign bit into the low-order bit, then
5482 into the high-order bit, then sign-extend. We then XOR that value
5483 with C. */
5484 temp = build_int_cst (TREE_TYPE (c), wi::extract_uhwi (c, p - 1, 1));
5485
5486 /* We must use a signed type in order to get an arithmetic right shift.
5487 However, we must also avoid introducing accidental overflows, so that
5488 a subsequent call to integer_zerop will work. Hence we must
5489 do the type conversion here. At this point, the constant is either
5490 zero or one, and the conversion to a signed type can never overflow.
5491 We could get an overflow if this conversion is done anywhere else. */
5492 if (TYPE_UNSIGNED (type))
5493 temp = fold_convert (signed_type_for (type), temp);
5494
5495 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5496 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5497 if (mask != 0)
5498 temp = const_binop (BIT_AND_EXPR, temp,
5499 fold_convert (TREE_TYPE (c), mask));
5500 /* If necessary, convert the type back to match the type of C. */
5501 if (TYPE_UNSIGNED (type))
5502 temp = fold_convert (type, temp);
5503
5504 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5505 }
5506 \f
5507 /* For an expression that has the form
5508 (A && B) || ~B
5509 or
5510 (A || B) && ~B,
5511 we can drop one of the inner expressions and simplify to
5512 A || ~B
5513 or
5514 A && ~B
5515 LOC is the location of the resulting expression. OP is the inner
5516 logical operation; the left-hand side in the examples above, while CMPOP
5517 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5518 removing a condition that guards another, as in
5519 (A != NULL && A->...) || A == NULL
5520 which we must not transform. If RHS_ONLY is true, only eliminate the
5521 right-most operand of the inner logical operation. */
5522
5523 static tree
5524 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5525 bool rhs_only)
5526 {
5527 tree type = TREE_TYPE (cmpop);
5528 enum tree_code code = TREE_CODE (cmpop);
5529 enum tree_code truthop_code = TREE_CODE (op);
5530 tree lhs = TREE_OPERAND (op, 0);
5531 tree rhs = TREE_OPERAND (op, 1);
5532 tree orig_lhs = lhs, orig_rhs = rhs;
5533 enum tree_code rhs_code = TREE_CODE (rhs);
5534 enum tree_code lhs_code = TREE_CODE (lhs);
5535 enum tree_code inv_code;
5536
5537 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5538 return NULL_TREE;
5539
5540 if (TREE_CODE_CLASS (code) != tcc_comparison)
5541 return NULL_TREE;
5542
5543 if (rhs_code == truthop_code)
5544 {
5545 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5546 if (newrhs != NULL_TREE)
5547 {
5548 rhs = newrhs;
5549 rhs_code = TREE_CODE (rhs);
5550 }
5551 }
5552 if (lhs_code == truthop_code && !rhs_only)
5553 {
5554 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5555 if (newlhs != NULL_TREE)
5556 {
5557 lhs = newlhs;
5558 lhs_code = TREE_CODE (lhs);
5559 }
5560 }
5561
5562 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
5563 if (inv_code == rhs_code
5564 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5565 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5566 return lhs;
5567 if (!rhs_only && inv_code == lhs_code
5568 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5569 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5570 return rhs;
5571 if (rhs != orig_rhs || lhs != orig_lhs)
5572 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5573 lhs, rhs);
5574 return NULL_TREE;
5575 }
5576
5577 /* Find ways of folding logical expressions of LHS and RHS:
5578 Try to merge two comparisons to the same innermost item.
5579 Look for range tests like "ch >= '0' && ch <= '9'".
5580 Look for combinations of simple terms on machines with expensive branches
5581 and evaluate the RHS unconditionally.
5582
5583 For example, if we have p->a == 2 && p->b == 4 and we can make an
5584 object large enough to span both A and B, we can do this with a comparison
5585 against the object ANDed with the a mask.
5586
5587 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5588 operations to do this with one comparison.
5589
5590 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5591 function and the one above.
5592
5593 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5594 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5595
5596 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5597 two operands.
5598
5599 We return the simplified tree or 0 if no optimization is possible. */
5600
5601 static tree
5602 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5603 tree lhs, tree rhs)
5604 {
5605 /* If this is the "or" of two comparisons, we can do something if
5606 the comparisons are NE_EXPR. If this is the "and", we can do something
5607 if the comparisons are EQ_EXPR. I.e.,
5608 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5609
5610 WANTED_CODE is this operation code. For single bit fields, we can
5611 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5612 comparison for one-bit fields. */
5613
5614 enum tree_code wanted_code;
5615 enum tree_code lcode, rcode;
5616 tree ll_arg, lr_arg, rl_arg, rr_arg;
5617 tree ll_inner, lr_inner, rl_inner, rr_inner;
5618 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5619 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5620 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5621 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5622 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5623 int ll_reversep, lr_reversep, rl_reversep, rr_reversep;
5624 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5625 machine_mode lnmode, rnmode;
5626 tree ll_mask, lr_mask, rl_mask, rr_mask;
5627 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5628 tree l_const, r_const;
5629 tree lntype, rntype, result;
5630 HOST_WIDE_INT first_bit, end_bit;
5631 int volatilep;
5632
5633 /* Start by getting the comparison codes. Fail if anything is volatile.
5634 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5635 it were surrounded with a NE_EXPR. */
5636
5637 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5638 return 0;
5639
5640 lcode = TREE_CODE (lhs);
5641 rcode = TREE_CODE (rhs);
5642
5643 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5644 {
5645 lhs = build2 (NE_EXPR, truth_type, lhs,
5646 build_int_cst (TREE_TYPE (lhs), 0));
5647 lcode = NE_EXPR;
5648 }
5649
5650 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5651 {
5652 rhs = build2 (NE_EXPR, truth_type, rhs,
5653 build_int_cst (TREE_TYPE (rhs), 0));
5654 rcode = NE_EXPR;
5655 }
5656
5657 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5658 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5659 return 0;
5660
5661 ll_arg = TREE_OPERAND (lhs, 0);
5662 lr_arg = TREE_OPERAND (lhs, 1);
5663 rl_arg = TREE_OPERAND (rhs, 0);
5664 rr_arg = TREE_OPERAND (rhs, 1);
5665
5666 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5667 if (simple_operand_p (ll_arg)
5668 && simple_operand_p (lr_arg))
5669 {
5670 if (operand_equal_p (ll_arg, rl_arg, 0)
5671 && operand_equal_p (lr_arg, rr_arg, 0))
5672 {
5673 result = combine_comparisons (loc, code, lcode, rcode,
5674 truth_type, ll_arg, lr_arg);
5675 if (result)
5676 return result;
5677 }
5678 else if (operand_equal_p (ll_arg, rr_arg, 0)
5679 && operand_equal_p (lr_arg, rl_arg, 0))
5680 {
5681 result = combine_comparisons (loc, code, lcode,
5682 swap_tree_comparison (rcode),
5683 truth_type, ll_arg, lr_arg);
5684 if (result)
5685 return result;
5686 }
5687 }
5688
5689 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5690 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5691
5692 /* If the RHS can be evaluated unconditionally and its operands are
5693 simple, it wins to evaluate the RHS unconditionally on machines
5694 with expensive branches. In this case, this isn't a comparison
5695 that can be merged. */
5696
5697 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5698 false) >= 2
5699 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5700 && simple_operand_p (rl_arg)
5701 && simple_operand_p (rr_arg))
5702 {
5703 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5704 if (code == TRUTH_OR_EXPR
5705 && lcode == NE_EXPR && integer_zerop (lr_arg)
5706 && rcode == NE_EXPR && integer_zerop (rr_arg)
5707 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5708 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5709 return build2_loc (loc, NE_EXPR, truth_type,
5710 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5711 ll_arg, rl_arg),
5712 build_int_cst (TREE_TYPE (ll_arg), 0));
5713
5714 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5715 if (code == TRUTH_AND_EXPR
5716 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5717 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5718 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5719 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5720 return build2_loc (loc, EQ_EXPR, truth_type,
5721 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5722 ll_arg, rl_arg),
5723 build_int_cst (TREE_TYPE (ll_arg), 0));
5724 }
5725
5726 /* See if the comparisons can be merged. Then get all the parameters for
5727 each side. */
5728
5729 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5730 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5731 return 0;
5732
5733 ll_reversep = lr_reversep = rl_reversep = rr_reversep = 0;
5734 volatilep = 0;
5735 ll_inner = decode_field_reference (loc, &ll_arg,
5736 &ll_bitsize, &ll_bitpos, &ll_mode,
5737 &ll_unsignedp, &ll_reversep, &volatilep,
5738 &ll_mask, &ll_and_mask);
5739 lr_inner = decode_field_reference (loc, &lr_arg,
5740 &lr_bitsize, &lr_bitpos, &lr_mode,
5741 &lr_unsignedp, &lr_reversep, &volatilep,
5742 &lr_mask, &lr_and_mask);
5743 rl_inner = decode_field_reference (loc, &rl_arg,
5744 &rl_bitsize, &rl_bitpos, &rl_mode,
5745 &rl_unsignedp, &rl_reversep, &volatilep,
5746 &rl_mask, &rl_and_mask);
5747 rr_inner = decode_field_reference (loc, &rr_arg,
5748 &rr_bitsize, &rr_bitpos, &rr_mode,
5749 &rr_unsignedp, &rr_reversep, &volatilep,
5750 &rr_mask, &rr_and_mask);
5751
5752 /* It must be true that the inner operation on the lhs of each
5753 comparison must be the same if we are to be able to do anything.
5754 Then see if we have constants. If not, the same must be true for
5755 the rhs's. */
5756 if (volatilep
5757 || ll_reversep != rl_reversep
5758 || ll_inner == 0 || rl_inner == 0
5759 || ! operand_equal_p (ll_inner, rl_inner, 0))
5760 return 0;
5761
5762 if (TREE_CODE (lr_arg) == INTEGER_CST
5763 && TREE_CODE (rr_arg) == INTEGER_CST)
5764 {
5765 l_const = lr_arg, r_const = rr_arg;
5766 lr_reversep = ll_reversep;
5767 }
5768 else if (lr_reversep != rr_reversep
5769 || lr_inner == 0 || rr_inner == 0
5770 || ! operand_equal_p (lr_inner, rr_inner, 0))
5771 return 0;
5772 else
5773 l_const = r_const = 0;
5774
5775 /* If either comparison code is not correct for our logical operation,
5776 fail. However, we can convert a one-bit comparison against zero into
5777 the opposite comparison against that bit being set in the field. */
5778
5779 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5780 if (lcode != wanted_code)
5781 {
5782 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5783 {
5784 /* Make the left operand unsigned, since we are only interested
5785 in the value of one bit. Otherwise we are doing the wrong
5786 thing below. */
5787 ll_unsignedp = 1;
5788 l_const = ll_mask;
5789 }
5790 else
5791 return 0;
5792 }
5793
5794 /* This is analogous to the code for l_const above. */
5795 if (rcode != wanted_code)
5796 {
5797 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5798 {
5799 rl_unsignedp = 1;
5800 r_const = rl_mask;
5801 }
5802 else
5803 return 0;
5804 }
5805
5806 /* See if we can find a mode that contains both fields being compared on
5807 the left. If we can't, fail. Otherwise, update all constants and masks
5808 to be relative to a field of that size. */
5809 first_bit = MIN (ll_bitpos, rl_bitpos);
5810 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5811 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5812 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5813 volatilep);
5814 if (lnmode == VOIDmode)
5815 return 0;
5816
5817 lnbitsize = GET_MODE_BITSIZE (lnmode);
5818 lnbitpos = first_bit & ~ (lnbitsize - 1);
5819 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5820 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5821
5822 if (ll_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5823 {
5824 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5825 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5826 }
5827
5828 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5829 size_int (xll_bitpos));
5830 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5831 size_int (xrl_bitpos));
5832
5833 if (l_const)
5834 {
5835 l_const = fold_convert_loc (loc, lntype, l_const);
5836 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5837 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5838 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5839 fold_build1_loc (loc, BIT_NOT_EXPR,
5840 lntype, ll_mask))))
5841 {
5842 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5843
5844 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5845 }
5846 }
5847 if (r_const)
5848 {
5849 r_const = fold_convert_loc (loc, lntype, r_const);
5850 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5851 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5852 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5853 fold_build1_loc (loc, BIT_NOT_EXPR,
5854 lntype, rl_mask))))
5855 {
5856 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5857
5858 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5859 }
5860 }
5861
5862 /* If the right sides are not constant, do the same for it. Also,
5863 disallow this optimization if a size or signedness mismatch occurs
5864 between the left and right sides. */
5865 if (l_const == 0)
5866 {
5867 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5868 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5869 /* Make sure the two fields on the right
5870 correspond to the left without being swapped. */
5871 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5872 return 0;
5873
5874 first_bit = MIN (lr_bitpos, rr_bitpos);
5875 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5876 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5877 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5878 volatilep);
5879 if (rnmode == VOIDmode)
5880 return 0;
5881
5882 rnbitsize = GET_MODE_BITSIZE (rnmode);
5883 rnbitpos = first_bit & ~ (rnbitsize - 1);
5884 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5885 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5886
5887 if (lr_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5888 {
5889 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5890 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5891 }
5892
5893 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5894 rntype, lr_mask),
5895 size_int (xlr_bitpos));
5896 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5897 rntype, rr_mask),
5898 size_int (xrr_bitpos));
5899
5900 /* Make a mask that corresponds to both fields being compared.
5901 Do this for both items being compared. If the operands are the
5902 same size and the bits being compared are in the same position
5903 then we can do this by masking both and comparing the masked
5904 results. */
5905 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5906 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5907 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5908 {
5909 lhs = make_bit_field_ref (loc, ll_inner, ll_arg,
5910 lntype, lnbitsize, lnbitpos,
5911 ll_unsignedp || rl_unsignedp, ll_reversep);
5912 if (! all_ones_mask_p (ll_mask, lnbitsize))
5913 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5914
5915 rhs = make_bit_field_ref (loc, lr_inner, lr_arg,
5916 rntype, rnbitsize, rnbitpos,
5917 lr_unsignedp || rr_unsignedp, lr_reversep);
5918 if (! all_ones_mask_p (lr_mask, rnbitsize))
5919 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5920
5921 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5922 }
5923
5924 /* There is still another way we can do something: If both pairs of
5925 fields being compared are adjacent, we may be able to make a wider
5926 field containing them both.
5927
5928 Note that we still must mask the lhs/rhs expressions. Furthermore,
5929 the mask must be shifted to account for the shift done by
5930 make_bit_field_ref. */
5931 if ((ll_bitsize + ll_bitpos == rl_bitpos
5932 && lr_bitsize + lr_bitpos == rr_bitpos)
5933 || (ll_bitpos == rl_bitpos + rl_bitsize
5934 && lr_bitpos == rr_bitpos + rr_bitsize))
5935 {
5936 tree type;
5937
5938 lhs = make_bit_field_ref (loc, ll_inner, ll_arg, lntype,
5939 ll_bitsize + rl_bitsize,
5940 MIN (ll_bitpos, rl_bitpos),
5941 ll_unsignedp, ll_reversep);
5942 rhs = make_bit_field_ref (loc, lr_inner, lr_arg, rntype,
5943 lr_bitsize + rr_bitsize,
5944 MIN (lr_bitpos, rr_bitpos),
5945 lr_unsignedp, lr_reversep);
5946
5947 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5948 size_int (MIN (xll_bitpos, xrl_bitpos)));
5949 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5950 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5951
5952 /* Convert to the smaller type before masking out unwanted bits. */
5953 type = lntype;
5954 if (lntype != rntype)
5955 {
5956 if (lnbitsize > rnbitsize)
5957 {
5958 lhs = fold_convert_loc (loc, rntype, lhs);
5959 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5960 type = rntype;
5961 }
5962 else if (lnbitsize < rnbitsize)
5963 {
5964 rhs = fold_convert_loc (loc, lntype, rhs);
5965 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5966 type = lntype;
5967 }
5968 }
5969
5970 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5971 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5972
5973 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5974 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5975
5976 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5977 }
5978
5979 return 0;
5980 }
5981
5982 /* Handle the case of comparisons with constants. If there is something in
5983 common between the masks, those bits of the constants must be the same.
5984 If not, the condition is always false. Test for this to avoid generating
5985 incorrect code below. */
5986 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5987 if (! integer_zerop (result)
5988 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5989 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5990 {
5991 if (wanted_code == NE_EXPR)
5992 {
5993 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5994 return constant_boolean_node (true, truth_type);
5995 }
5996 else
5997 {
5998 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5999 return constant_boolean_node (false, truth_type);
6000 }
6001 }
6002
6003 /* Construct the expression we will return. First get the component
6004 reference we will make. Unless the mask is all ones the width of
6005 that field, perform the mask operation. Then compare with the
6006 merged constant. */
6007 result = make_bit_field_ref (loc, ll_inner, ll_arg,
6008 lntype, lnbitsize, lnbitpos,
6009 ll_unsignedp || rl_unsignedp, ll_reversep);
6010
6011 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6012 if (! all_ones_mask_p (ll_mask, lnbitsize))
6013 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
6014
6015 return build2_loc (loc, wanted_code, truth_type, result,
6016 const_binop (BIT_IOR_EXPR, l_const, r_const));
6017 }
6018 \f
6019 /* T is an integer expression that is being multiplied, divided, or taken a
6020 modulus (CODE says which and what kind of divide or modulus) by a
6021 constant C. See if we can eliminate that operation by folding it with
6022 other operations already in T. WIDE_TYPE, if non-null, is a type that
6023 should be used for the computation if wider than our type.
6024
6025 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6026 (X * 2) + (Y * 4). We must, however, be assured that either the original
6027 expression would not overflow or that overflow is undefined for the type
6028 in the language in question.
6029
6030 If we return a non-null expression, it is an equivalent form of the
6031 original computation, but need not be in the original type.
6032
6033 We set *STRICT_OVERFLOW_P to true if the return values depends on
6034 signed overflow being undefined. Otherwise we do not change
6035 *STRICT_OVERFLOW_P. */
6036
6037 static tree
6038 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6039 bool *strict_overflow_p)
6040 {
6041 /* To avoid exponential search depth, refuse to allow recursion past
6042 three levels. Beyond that (1) it's highly unlikely that we'll find
6043 something interesting and (2) we've probably processed it before
6044 when we built the inner expression. */
6045
6046 static int depth;
6047 tree ret;
6048
6049 if (depth > 3)
6050 return NULL;
6051
6052 depth++;
6053 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6054 depth--;
6055
6056 return ret;
6057 }
6058
6059 static tree
6060 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6061 bool *strict_overflow_p)
6062 {
6063 tree type = TREE_TYPE (t);
6064 enum tree_code tcode = TREE_CODE (t);
6065 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
6066 > GET_MODE_SIZE (TYPE_MODE (type)))
6067 ? wide_type : type);
6068 tree t1, t2;
6069 int same_p = tcode == code;
6070 tree op0 = NULL_TREE, op1 = NULL_TREE;
6071 bool sub_strict_overflow_p;
6072
6073 /* Don't deal with constants of zero here; they confuse the code below. */
6074 if (integer_zerop (c))
6075 return NULL_TREE;
6076
6077 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6078 op0 = TREE_OPERAND (t, 0);
6079
6080 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6081 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6082
6083 /* Note that we need not handle conditional operations here since fold
6084 already handles those cases. So just do arithmetic here. */
6085 switch (tcode)
6086 {
6087 case INTEGER_CST:
6088 /* For a constant, we can always simplify if we are a multiply
6089 or (for divide and modulus) if it is a multiple of our constant. */
6090 if (code == MULT_EXPR
6091 || wi::multiple_of_p (t, c, TYPE_SIGN (type)))
6092 {
6093 tree tem = const_binop (code, fold_convert (ctype, t),
6094 fold_convert (ctype, c));
6095 /* If the multiplication overflowed, we lost information on it.
6096 See PR68142 and PR69845. */
6097 if (TREE_OVERFLOW (tem))
6098 return NULL_TREE;
6099 return tem;
6100 }
6101 break;
6102
6103 CASE_CONVERT: case NON_LVALUE_EXPR:
6104 /* If op0 is an expression ... */
6105 if ((COMPARISON_CLASS_P (op0)
6106 || UNARY_CLASS_P (op0)
6107 || BINARY_CLASS_P (op0)
6108 || VL_EXP_CLASS_P (op0)
6109 || EXPRESSION_CLASS_P (op0))
6110 /* ... and has wrapping overflow, and its type is smaller
6111 than ctype, then we cannot pass through as widening. */
6112 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6113 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0)))
6114 && (TYPE_PRECISION (ctype)
6115 > TYPE_PRECISION (TREE_TYPE (op0))))
6116 /* ... or this is a truncation (t is narrower than op0),
6117 then we cannot pass through this narrowing. */
6118 || (TYPE_PRECISION (type)
6119 < TYPE_PRECISION (TREE_TYPE (op0)))
6120 /* ... or signedness changes for division or modulus,
6121 then we cannot pass through this conversion. */
6122 || (code != MULT_EXPR
6123 && (TYPE_UNSIGNED (ctype)
6124 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6125 /* ... or has undefined overflow while the converted to
6126 type has not, we cannot do the operation in the inner type
6127 as that would introduce undefined overflow. */
6128 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6129 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0)))
6130 && !TYPE_OVERFLOW_UNDEFINED (type))))
6131 break;
6132
6133 /* Pass the constant down and see if we can make a simplification. If
6134 we can, replace this expression with the inner simplification for
6135 possible later conversion to our or some other type. */
6136 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6137 && TREE_CODE (t2) == INTEGER_CST
6138 && !TREE_OVERFLOW (t2)
6139 && (0 != (t1 = extract_muldiv (op0, t2, code,
6140 code == MULT_EXPR
6141 ? ctype : NULL_TREE,
6142 strict_overflow_p))))
6143 return t1;
6144 break;
6145
6146 case ABS_EXPR:
6147 /* If widening the type changes it from signed to unsigned, then we
6148 must avoid building ABS_EXPR itself as unsigned. */
6149 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6150 {
6151 tree cstype = (*signed_type_for) (ctype);
6152 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6153 != 0)
6154 {
6155 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6156 return fold_convert (ctype, t1);
6157 }
6158 break;
6159 }
6160 /* If the constant is negative, we cannot simplify this. */
6161 if (tree_int_cst_sgn (c) == -1)
6162 break;
6163 /* FALLTHROUGH */
6164 case NEGATE_EXPR:
6165 /* For division and modulus, type can't be unsigned, as e.g.
6166 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6167 For signed types, even with wrapping overflow, this is fine. */
6168 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6169 break;
6170 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6171 != 0)
6172 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6173 break;
6174
6175 case MIN_EXPR: case MAX_EXPR:
6176 /* If widening the type changes the signedness, then we can't perform
6177 this optimization as that changes the result. */
6178 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6179 break;
6180
6181 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6182 sub_strict_overflow_p = false;
6183 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6184 &sub_strict_overflow_p)) != 0
6185 && (t2 = extract_muldiv (op1, c, code, wide_type,
6186 &sub_strict_overflow_p)) != 0)
6187 {
6188 if (tree_int_cst_sgn (c) < 0)
6189 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6190 if (sub_strict_overflow_p)
6191 *strict_overflow_p = true;
6192 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6193 fold_convert (ctype, t2));
6194 }
6195 break;
6196
6197 case LSHIFT_EXPR: case RSHIFT_EXPR:
6198 /* If the second operand is constant, this is a multiplication
6199 or floor division, by a power of two, so we can treat it that
6200 way unless the multiplier or divisor overflows. Signed
6201 left-shift overflow is implementation-defined rather than
6202 undefined in C90, so do not convert signed left shift into
6203 multiplication. */
6204 if (TREE_CODE (op1) == INTEGER_CST
6205 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6206 /* const_binop may not detect overflow correctly,
6207 so check for it explicitly here. */
6208 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
6209 && 0 != (t1 = fold_convert (ctype,
6210 const_binop (LSHIFT_EXPR,
6211 size_one_node,
6212 op1)))
6213 && !TREE_OVERFLOW (t1))
6214 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6215 ? MULT_EXPR : FLOOR_DIV_EXPR,
6216 ctype,
6217 fold_convert (ctype, op0),
6218 t1),
6219 c, code, wide_type, strict_overflow_p);
6220 break;
6221
6222 case PLUS_EXPR: case MINUS_EXPR:
6223 /* See if we can eliminate the operation on both sides. If we can, we
6224 can return a new PLUS or MINUS. If we can't, the only remaining
6225 cases where we can do anything are if the second operand is a
6226 constant. */
6227 sub_strict_overflow_p = false;
6228 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6229 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6230 if (t1 != 0 && t2 != 0
6231 && TYPE_OVERFLOW_WRAPS (ctype)
6232 && (code == MULT_EXPR
6233 /* If not multiplication, we can only do this if both operands
6234 are divisible by c. */
6235 || (multiple_of_p (ctype, op0, c)
6236 && multiple_of_p (ctype, op1, c))))
6237 {
6238 if (sub_strict_overflow_p)
6239 *strict_overflow_p = true;
6240 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6241 fold_convert (ctype, t2));
6242 }
6243
6244 /* If this was a subtraction, negate OP1 and set it to be an addition.
6245 This simplifies the logic below. */
6246 if (tcode == MINUS_EXPR)
6247 {
6248 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6249 /* If OP1 was not easily negatable, the constant may be OP0. */
6250 if (TREE_CODE (op0) == INTEGER_CST)
6251 {
6252 std::swap (op0, op1);
6253 std::swap (t1, t2);
6254 }
6255 }
6256
6257 if (TREE_CODE (op1) != INTEGER_CST)
6258 break;
6259
6260 /* If either OP1 or C are negative, this optimization is not safe for
6261 some of the division and remainder types while for others we need
6262 to change the code. */
6263 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6264 {
6265 if (code == CEIL_DIV_EXPR)
6266 code = FLOOR_DIV_EXPR;
6267 else if (code == FLOOR_DIV_EXPR)
6268 code = CEIL_DIV_EXPR;
6269 else if (code != MULT_EXPR
6270 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6271 break;
6272 }
6273
6274 /* If it's a multiply or a division/modulus operation of a multiple
6275 of our constant, do the operation and verify it doesn't overflow. */
6276 if (code == MULT_EXPR
6277 || wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6278 {
6279 op1 = const_binop (code, fold_convert (ctype, op1),
6280 fold_convert (ctype, c));
6281 /* We allow the constant to overflow with wrapping semantics. */
6282 if (op1 == 0
6283 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6284 break;
6285 }
6286 else
6287 break;
6288
6289 /* If we have an unsigned type, we cannot widen the operation since it
6290 will change the result if the original computation overflowed. */
6291 if (TYPE_UNSIGNED (ctype) && ctype != type)
6292 break;
6293
6294 /* The last case is if we are a multiply. In that case, we can
6295 apply the distributive law to commute the multiply and addition
6296 if the multiplication of the constants doesn't overflow
6297 and overflow is defined. With undefined overflow
6298 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
6299 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
6300 return fold_build2 (tcode, ctype,
6301 fold_build2 (code, ctype,
6302 fold_convert (ctype, op0),
6303 fold_convert (ctype, c)),
6304 op1);
6305
6306 break;
6307
6308 case MULT_EXPR:
6309 /* We have a special case here if we are doing something like
6310 (C * 8) % 4 since we know that's zero. */
6311 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6312 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6313 /* If the multiplication can overflow we cannot optimize this. */
6314 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6315 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6316 && wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6317 {
6318 *strict_overflow_p = true;
6319 return omit_one_operand (type, integer_zero_node, op0);
6320 }
6321
6322 /* ... fall through ... */
6323
6324 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6325 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6326 /* If we can extract our operation from the LHS, do so and return a
6327 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6328 do something only if the second operand is a constant. */
6329 if (same_p
6330 && TYPE_OVERFLOW_WRAPS (ctype)
6331 && (t1 = extract_muldiv (op0, c, code, wide_type,
6332 strict_overflow_p)) != 0)
6333 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6334 fold_convert (ctype, op1));
6335 else if (tcode == MULT_EXPR && code == MULT_EXPR
6336 && TYPE_OVERFLOW_WRAPS (ctype)
6337 && (t1 = extract_muldiv (op1, c, code, wide_type,
6338 strict_overflow_p)) != 0)
6339 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6340 fold_convert (ctype, t1));
6341 else if (TREE_CODE (op1) != INTEGER_CST)
6342 return 0;
6343
6344 /* If these are the same operation types, we can associate them
6345 assuming no overflow. */
6346 if (tcode == code)
6347 {
6348 bool overflow_p = false;
6349 bool overflow_mul_p;
6350 signop sign = TYPE_SIGN (ctype);
6351 unsigned prec = TYPE_PRECISION (ctype);
6352 wide_int mul = wi::mul (wi::to_wide (op1, prec),
6353 wi::to_wide (c, prec),
6354 sign, &overflow_mul_p);
6355 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6356 if (overflow_mul_p
6357 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6358 overflow_p = true;
6359 if (!overflow_p)
6360 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6361 wide_int_to_tree (ctype, mul));
6362 }
6363
6364 /* If these operations "cancel" each other, we have the main
6365 optimizations of this pass, which occur when either constant is a
6366 multiple of the other, in which case we replace this with either an
6367 operation or CODE or TCODE.
6368
6369 If we have an unsigned type, we cannot do this since it will change
6370 the result if the original computation overflowed. */
6371 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6372 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6373 || (tcode == MULT_EXPR
6374 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6375 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6376 && code != MULT_EXPR)))
6377 {
6378 if (wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6379 {
6380 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6381 *strict_overflow_p = true;
6382 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6383 fold_convert (ctype,
6384 const_binop (TRUNC_DIV_EXPR,
6385 op1, c)));
6386 }
6387 else if (wi::multiple_of_p (c, op1, TYPE_SIGN (type)))
6388 {
6389 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6390 *strict_overflow_p = true;
6391 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6392 fold_convert (ctype,
6393 const_binop (TRUNC_DIV_EXPR,
6394 c, op1)));
6395 }
6396 }
6397 break;
6398
6399 default:
6400 break;
6401 }
6402
6403 return 0;
6404 }
6405 \f
6406 /* Return a node which has the indicated constant VALUE (either 0 or
6407 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6408 and is of the indicated TYPE. */
6409
6410 tree
6411 constant_boolean_node (bool value, tree type)
6412 {
6413 if (type == integer_type_node)
6414 return value ? integer_one_node : integer_zero_node;
6415 else if (type == boolean_type_node)
6416 return value ? boolean_true_node : boolean_false_node;
6417 else if (TREE_CODE (type) == VECTOR_TYPE)
6418 return build_vector_from_val (type,
6419 build_int_cst (TREE_TYPE (type),
6420 value ? -1 : 0));
6421 else
6422 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6423 }
6424
6425
6426 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6427 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6428 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6429 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6430 COND is the first argument to CODE; otherwise (as in the example
6431 given here), it is the second argument. TYPE is the type of the
6432 original expression. Return NULL_TREE if no simplification is
6433 possible. */
6434
6435 static tree
6436 fold_binary_op_with_conditional_arg (location_t loc,
6437 enum tree_code code,
6438 tree type, tree op0, tree op1,
6439 tree cond, tree arg, int cond_first_p)
6440 {
6441 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6442 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6443 tree test, true_value, false_value;
6444 tree lhs = NULL_TREE;
6445 tree rhs = NULL_TREE;
6446 enum tree_code cond_code = COND_EXPR;
6447
6448 if (TREE_CODE (cond) == COND_EXPR
6449 || TREE_CODE (cond) == VEC_COND_EXPR)
6450 {
6451 test = TREE_OPERAND (cond, 0);
6452 true_value = TREE_OPERAND (cond, 1);
6453 false_value = TREE_OPERAND (cond, 2);
6454 /* If this operand throws an expression, then it does not make
6455 sense to try to perform a logical or arithmetic operation
6456 involving it. */
6457 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6458 lhs = true_value;
6459 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6460 rhs = false_value;
6461 }
6462 else if (!(TREE_CODE (type) != VECTOR_TYPE
6463 && TREE_CODE (TREE_TYPE (cond)) == VECTOR_TYPE))
6464 {
6465 tree testtype = TREE_TYPE (cond);
6466 test = cond;
6467 true_value = constant_boolean_node (true, testtype);
6468 false_value = constant_boolean_node (false, testtype);
6469 }
6470 else
6471 /* Detect the case of mixing vector and scalar types - bail out. */
6472 return NULL_TREE;
6473
6474 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6475 cond_code = VEC_COND_EXPR;
6476
6477 /* This transformation is only worthwhile if we don't have to wrap ARG
6478 in a SAVE_EXPR and the operation can be simplified without recursing
6479 on at least one of the branches once its pushed inside the COND_EXPR. */
6480 if (!TREE_CONSTANT (arg)
6481 && (TREE_SIDE_EFFECTS (arg)
6482 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6483 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6484 return NULL_TREE;
6485
6486 arg = fold_convert_loc (loc, arg_type, arg);
6487 if (lhs == 0)
6488 {
6489 true_value = fold_convert_loc (loc, cond_type, true_value);
6490 if (cond_first_p)
6491 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6492 else
6493 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6494 }
6495 if (rhs == 0)
6496 {
6497 false_value = fold_convert_loc (loc, cond_type, false_value);
6498 if (cond_first_p)
6499 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6500 else
6501 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6502 }
6503
6504 /* Check that we have simplified at least one of the branches. */
6505 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6506 return NULL_TREE;
6507
6508 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6509 }
6510
6511 \f
6512 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6513
6514 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6515 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6516 ADDEND is the same as X.
6517
6518 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6519 and finite. The problematic cases are when X is zero, and its mode
6520 has signed zeros. In the case of rounding towards -infinity,
6521 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6522 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6523
6524 bool
6525 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6526 {
6527 if (!real_zerop (addend))
6528 return false;
6529
6530 /* Don't allow the fold with -fsignaling-nans. */
6531 if (HONOR_SNANS (element_mode (type)))
6532 return false;
6533
6534 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6535 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
6536 return true;
6537
6538 /* In a vector or complex, we would need to check the sign of all zeros. */
6539 if (TREE_CODE (addend) != REAL_CST)
6540 return false;
6541
6542 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6543 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6544 negate = !negate;
6545
6546 /* The mode has signed zeros, and we have to honor their sign.
6547 In this situation, there is only one case we can return true for.
6548 X - 0 is the same as X unless rounding towards -infinity is
6549 supported. */
6550 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type));
6551 }
6552
6553 /* Subroutine of match.pd that optimizes comparisons of a division by
6554 a nonzero integer constant against an integer constant, i.e.
6555 X/C1 op C2.
6556
6557 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6558 GE_EXPR or LE_EXPR. ARG01 and ARG1 must be a INTEGER_CST. */
6559
6560 enum tree_code
6561 fold_div_compare (enum tree_code code, tree c1, tree c2, tree *lo,
6562 tree *hi, bool *neg_overflow)
6563 {
6564 tree prod, tmp, type = TREE_TYPE (c1);
6565 signop sign = TYPE_SIGN (type);
6566 bool overflow;
6567
6568 /* We have to do this the hard way to detect unsigned overflow.
6569 prod = int_const_binop (MULT_EXPR, c1, c2); */
6570 wide_int val = wi::mul (c1, c2, sign, &overflow);
6571 prod = force_fit_type (type, val, -1, overflow);
6572 *neg_overflow = false;
6573
6574 if (sign == UNSIGNED)
6575 {
6576 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
6577 *lo = prod;
6578
6579 /* Likewise *hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6580 val = wi::add (prod, tmp, sign, &overflow);
6581 *hi = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (prod));
6582 }
6583 else if (tree_int_cst_sgn (c1) >= 0)
6584 {
6585 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
6586 switch (tree_int_cst_sgn (c2))
6587 {
6588 case -1:
6589 *neg_overflow = true;
6590 *lo = int_const_binop (MINUS_EXPR, prod, tmp);
6591 *hi = prod;
6592 break;
6593
6594 case 0:
6595 *lo = fold_negate_const (tmp, type);
6596 *hi = tmp;
6597 break;
6598
6599 case 1:
6600 *hi = int_const_binop (PLUS_EXPR, prod, tmp);
6601 *lo = prod;
6602 break;
6603
6604 default:
6605 gcc_unreachable ();
6606 }
6607 }
6608 else
6609 {
6610 /* A negative divisor reverses the relational operators. */
6611 code = swap_tree_comparison (code);
6612
6613 tmp = int_const_binop (PLUS_EXPR, c1, build_int_cst (type, 1));
6614 switch (tree_int_cst_sgn (c2))
6615 {
6616 case -1:
6617 *hi = int_const_binop (MINUS_EXPR, prod, tmp);
6618 *lo = prod;
6619 break;
6620
6621 case 0:
6622 *hi = fold_negate_const (tmp, type);
6623 *lo = tmp;
6624 break;
6625
6626 case 1:
6627 *neg_overflow = true;
6628 *lo = int_const_binop (PLUS_EXPR, prod, tmp);
6629 *hi = prod;
6630 break;
6631
6632 default:
6633 gcc_unreachable ();
6634 }
6635 }
6636
6637 if (code != EQ_EXPR && code != NE_EXPR)
6638 return code;
6639
6640 if (TREE_OVERFLOW (*lo)
6641 || operand_equal_p (*lo, TYPE_MIN_VALUE (type), 0))
6642 *lo = NULL_TREE;
6643 if (TREE_OVERFLOW (*hi)
6644 || operand_equal_p (*hi, TYPE_MAX_VALUE (type), 0))
6645 *hi = NULL_TREE;
6646
6647 return code;
6648 }
6649
6650
6651 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6652 equality/inequality test, then return a simplified form of the test
6653 using a sign testing. Otherwise return NULL. TYPE is the desired
6654 result type. */
6655
6656 static tree
6657 fold_single_bit_test_into_sign_test (location_t loc,
6658 enum tree_code code, tree arg0, tree arg1,
6659 tree result_type)
6660 {
6661 /* If this is testing a single bit, we can optimize the test. */
6662 if ((code == NE_EXPR || code == EQ_EXPR)
6663 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6664 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6665 {
6666 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6667 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6668 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6669
6670 if (arg00 != NULL_TREE
6671 /* This is only a win if casting to a signed type is cheap,
6672 i.e. when arg00's type is not a partial mode. */
6673 && TYPE_PRECISION (TREE_TYPE (arg00))
6674 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6675 {
6676 tree stype = signed_type_for (TREE_TYPE (arg00));
6677 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6678 result_type,
6679 fold_convert_loc (loc, stype, arg00),
6680 build_int_cst (stype, 0));
6681 }
6682 }
6683
6684 return NULL_TREE;
6685 }
6686
6687 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6688 equality/inequality test, then return a simplified form of
6689 the test using shifts and logical operations. Otherwise return
6690 NULL. TYPE is the desired result type. */
6691
6692 tree
6693 fold_single_bit_test (location_t loc, enum tree_code code,
6694 tree arg0, tree arg1, tree result_type)
6695 {
6696 /* If this is testing a single bit, we can optimize the test. */
6697 if ((code == NE_EXPR || code == EQ_EXPR)
6698 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6699 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6700 {
6701 tree inner = TREE_OPERAND (arg0, 0);
6702 tree type = TREE_TYPE (arg0);
6703 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6704 machine_mode operand_mode = TYPE_MODE (type);
6705 int ops_unsigned;
6706 tree signed_type, unsigned_type, intermediate_type;
6707 tree tem, one;
6708
6709 /* First, see if we can fold the single bit test into a sign-bit
6710 test. */
6711 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6712 result_type);
6713 if (tem)
6714 return tem;
6715
6716 /* Otherwise we have (A & C) != 0 where C is a single bit,
6717 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6718 Similarly for (A & C) == 0. */
6719
6720 /* If INNER is a right shift of a constant and it plus BITNUM does
6721 not overflow, adjust BITNUM and INNER. */
6722 if (TREE_CODE (inner) == RSHIFT_EXPR
6723 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6724 && bitnum < TYPE_PRECISION (type)
6725 && wi::ltu_p (TREE_OPERAND (inner, 1),
6726 TYPE_PRECISION (type) - bitnum))
6727 {
6728 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6729 inner = TREE_OPERAND (inner, 0);
6730 }
6731
6732 /* If we are going to be able to omit the AND below, we must do our
6733 operations as unsigned. If we must use the AND, we have a choice.
6734 Normally unsigned is faster, but for some machines signed is. */
6735 ops_unsigned = (load_extend_op (operand_mode) == SIGN_EXTEND
6736 && !flag_syntax_only) ? 0 : 1;
6737
6738 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6739 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6740 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6741 inner = fold_convert_loc (loc, intermediate_type, inner);
6742
6743 if (bitnum != 0)
6744 inner = build2 (RSHIFT_EXPR, intermediate_type,
6745 inner, size_int (bitnum));
6746
6747 one = build_int_cst (intermediate_type, 1);
6748
6749 if (code == EQ_EXPR)
6750 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6751
6752 /* Put the AND last so it can combine with more things. */
6753 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6754
6755 /* Make sure to return the proper type. */
6756 inner = fold_convert_loc (loc, result_type, inner);
6757
6758 return inner;
6759 }
6760 return NULL_TREE;
6761 }
6762
6763 /* Test whether it is preferable two swap two operands, ARG0 and
6764 ARG1, for example because ARG0 is an integer constant and ARG1
6765 isn't. */
6766
6767 bool
6768 tree_swap_operands_p (const_tree arg0, const_tree arg1)
6769 {
6770 if (CONSTANT_CLASS_P (arg1))
6771 return 0;
6772 if (CONSTANT_CLASS_P (arg0))
6773 return 1;
6774
6775 STRIP_NOPS (arg0);
6776 STRIP_NOPS (arg1);
6777
6778 if (TREE_CONSTANT (arg1))
6779 return 0;
6780 if (TREE_CONSTANT (arg0))
6781 return 1;
6782
6783 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6784 for commutative and comparison operators. Ensuring a canonical
6785 form allows the optimizers to find additional redundancies without
6786 having to explicitly check for both orderings. */
6787 if (TREE_CODE (arg0) == SSA_NAME
6788 && TREE_CODE (arg1) == SSA_NAME
6789 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6790 return 1;
6791
6792 /* Put SSA_NAMEs last. */
6793 if (TREE_CODE (arg1) == SSA_NAME)
6794 return 0;
6795 if (TREE_CODE (arg0) == SSA_NAME)
6796 return 1;
6797
6798 /* Put variables last. */
6799 if (DECL_P (arg1))
6800 return 0;
6801 if (DECL_P (arg0))
6802 return 1;
6803
6804 return 0;
6805 }
6806
6807
6808 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6809 means A >= Y && A != MAX, but in this case we know that
6810 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6811
6812 static tree
6813 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6814 {
6815 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6816
6817 if (TREE_CODE (bound) == LT_EXPR)
6818 a = TREE_OPERAND (bound, 0);
6819 else if (TREE_CODE (bound) == GT_EXPR)
6820 a = TREE_OPERAND (bound, 1);
6821 else
6822 return NULL_TREE;
6823
6824 typea = TREE_TYPE (a);
6825 if (!INTEGRAL_TYPE_P (typea)
6826 && !POINTER_TYPE_P (typea))
6827 return NULL_TREE;
6828
6829 if (TREE_CODE (ineq) == LT_EXPR)
6830 {
6831 a1 = TREE_OPERAND (ineq, 1);
6832 y = TREE_OPERAND (ineq, 0);
6833 }
6834 else if (TREE_CODE (ineq) == GT_EXPR)
6835 {
6836 a1 = TREE_OPERAND (ineq, 0);
6837 y = TREE_OPERAND (ineq, 1);
6838 }
6839 else
6840 return NULL_TREE;
6841
6842 if (TREE_TYPE (a1) != typea)
6843 return NULL_TREE;
6844
6845 if (POINTER_TYPE_P (typea))
6846 {
6847 /* Convert the pointer types into integer before taking the difference. */
6848 tree ta = fold_convert_loc (loc, ssizetype, a);
6849 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
6850 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
6851 }
6852 else
6853 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
6854
6855 if (!diff || !integer_onep (diff))
6856 return NULL_TREE;
6857
6858 return fold_build2_loc (loc, GE_EXPR, type, a, y);
6859 }
6860
6861 /* Fold a sum or difference of at least one multiplication.
6862 Returns the folded tree or NULL if no simplification could be made. */
6863
6864 static tree
6865 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
6866 tree arg0, tree arg1)
6867 {
6868 tree arg00, arg01, arg10, arg11;
6869 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6870
6871 /* (A * C) +- (B * C) -> (A+-B) * C.
6872 (A * C) +- A -> A * (C+-1).
6873 We are most concerned about the case where C is a constant,
6874 but other combinations show up during loop reduction. Since
6875 it is not difficult, try all four possibilities. */
6876
6877 if (TREE_CODE (arg0) == MULT_EXPR)
6878 {
6879 arg00 = TREE_OPERAND (arg0, 0);
6880 arg01 = TREE_OPERAND (arg0, 1);
6881 }
6882 else if (TREE_CODE (arg0) == INTEGER_CST)
6883 {
6884 arg00 = build_one_cst (type);
6885 arg01 = arg0;
6886 }
6887 else
6888 {
6889 /* We cannot generate constant 1 for fract. */
6890 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6891 return NULL_TREE;
6892 arg00 = arg0;
6893 arg01 = build_one_cst (type);
6894 }
6895 if (TREE_CODE (arg1) == MULT_EXPR)
6896 {
6897 arg10 = TREE_OPERAND (arg1, 0);
6898 arg11 = TREE_OPERAND (arg1, 1);
6899 }
6900 else if (TREE_CODE (arg1) == INTEGER_CST)
6901 {
6902 arg10 = build_one_cst (type);
6903 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
6904 the purpose of this canonicalization. */
6905 if (wi::neg_p (arg1, TYPE_SIGN (TREE_TYPE (arg1)))
6906 && negate_expr_p (arg1)
6907 && code == PLUS_EXPR)
6908 {
6909 arg11 = negate_expr (arg1);
6910 code = MINUS_EXPR;
6911 }
6912 else
6913 arg11 = arg1;
6914 }
6915 else
6916 {
6917 /* We cannot generate constant 1 for fract. */
6918 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6919 return NULL_TREE;
6920 arg10 = arg1;
6921 arg11 = build_one_cst (type);
6922 }
6923 same = NULL_TREE;
6924
6925 /* Prefer factoring a common non-constant. */
6926 if (operand_equal_p (arg00, arg10, 0))
6927 same = arg00, alt0 = arg01, alt1 = arg11;
6928 else if (operand_equal_p (arg01, arg11, 0))
6929 same = arg01, alt0 = arg00, alt1 = arg10;
6930 else if (operand_equal_p (arg00, arg11, 0))
6931 same = arg00, alt0 = arg01, alt1 = arg10;
6932 else if (operand_equal_p (arg01, arg10, 0))
6933 same = arg01, alt0 = arg00, alt1 = arg11;
6934
6935 /* No identical multiplicands; see if we can find a common
6936 power-of-two factor in non-power-of-two multiplies. This
6937 can help in multi-dimensional array access. */
6938 else if (tree_fits_shwi_p (arg01)
6939 && tree_fits_shwi_p (arg11))
6940 {
6941 HOST_WIDE_INT int01, int11, tmp;
6942 bool swap = false;
6943 tree maybe_same;
6944 int01 = tree_to_shwi (arg01);
6945 int11 = tree_to_shwi (arg11);
6946
6947 /* Move min of absolute values to int11. */
6948 if (absu_hwi (int01) < absu_hwi (int11))
6949 {
6950 tmp = int01, int01 = int11, int11 = tmp;
6951 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6952 maybe_same = arg01;
6953 swap = true;
6954 }
6955 else
6956 maybe_same = arg11;
6957
6958 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
6959 /* The remainder should not be a constant, otherwise we
6960 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
6961 increased the number of multiplications necessary. */
6962 && TREE_CODE (arg10) != INTEGER_CST)
6963 {
6964 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
6965 build_int_cst (TREE_TYPE (arg00),
6966 int01 / int11));
6967 alt1 = arg10;
6968 same = maybe_same;
6969 if (swap)
6970 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
6971 }
6972 }
6973
6974 if (!same)
6975 return NULL_TREE;
6976
6977 if (! INTEGRAL_TYPE_P (type)
6978 || TYPE_OVERFLOW_WRAPS (type)
6979 /* We are neither factoring zero nor minus one. */
6980 || TREE_CODE (same) == INTEGER_CST)
6981 return fold_build2_loc (loc, MULT_EXPR, type,
6982 fold_build2_loc (loc, code, type,
6983 fold_convert_loc (loc, type, alt0),
6984 fold_convert_loc (loc, type, alt1)),
6985 fold_convert_loc (loc, type, same));
6986
6987 /* Same may be zero and thus the operation 'code' may overflow. Likewise
6988 same may be minus one and thus the multiplication may overflow. Perform
6989 the operations in an unsigned type. */
6990 tree utype = unsigned_type_for (type);
6991 tree tem = fold_build2_loc (loc, code, utype,
6992 fold_convert_loc (loc, utype, alt0),
6993 fold_convert_loc (loc, utype, alt1));
6994 /* If the sum evaluated to a constant that is not -INF the multiplication
6995 cannot overflow. */
6996 if (TREE_CODE (tem) == INTEGER_CST
6997 && ! wi::eq_p (tem, wi::min_value (TYPE_PRECISION (utype), SIGNED)))
6998 return fold_build2_loc (loc, MULT_EXPR, type,
6999 fold_convert (type, tem), same);
7000
7001 return fold_convert_loc (loc, type,
7002 fold_build2_loc (loc, MULT_EXPR, utype, tem,
7003 fold_convert_loc (loc, utype, same)));
7004 }
7005
7006 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7007 specified by EXPR into the buffer PTR of length LEN bytes.
7008 Return the number of bytes placed in the buffer, or zero
7009 upon failure. */
7010
7011 static int
7012 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7013 {
7014 tree type = TREE_TYPE (expr);
7015 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7016 int byte, offset, word, words;
7017 unsigned char value;
7018
7019 if ((off == -1 && total_bytes > len)
7020 || off >= total_bytes)
7021 return 0;
7022 if (off == -1)
7023 off = 0;
7024 words = total_bytes / UNITS_PER_WORD;
7025
7026 for (byte = 0; byte < total_bytes; byte++)
7027 {
7028 int bitpos = byte * BITS_PER_UNIT;
7029 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7030 number of bytes. */
7031 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7032
7033 if (total_bytes > UNITS_PER_WORD)
7034 {
7035 word = byte / UNITS_PER_WORD;
7036 if (WORDS_BIG_ENDIAN)
7037 word = (words - 1) - word;
7038 offset = word * UNITS_PER_WORD;
7039 if (BYTES_BIG_ENDIAN)
7040 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7041 else
7042 offset += byte % UNITS_PER_WORD;
7043 }
7044 else
7045 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7046 if (offset >= off
7047 && offset - off < len)
7048 ptr[offset - off] = value;
7049 }
7050 return MIN (len, total_bytes - off);
7051 }
7052
7053
7054 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7055 specified by EXPR into the buffer PTR of length LEN bytes.
7056 Return the number of bytes placed in the buffer, or zero
7057 upon failure. */
7058
7059 static int
7060 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7061 {
7062 tree type = TREE_TYPE (expr);
7063 machine_mode mode = TYPE_MODE (type);
7064 int total_bytes = GET_MODE_SIZE (mode);
7065 FIXED_VALUE_TYPE value;
7066 tree i_value, i_type;
7067
7068 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7069 return 0;
7070
7071 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7072
7073 if (NULL_TREE == i_type
7074 || TYPE_PRECISION (i_type) != total_bytes)
7075 return 0;
7076
7077 value = TREE_FIXED_CST (expr);
7078 i_value = double_int_to_tree (i_type, value.data);
7079
7080 return native_encode_int (i_value, ptr, len, off);
7081 }
7082
7083
7084 /* Subroutine of native_encode_expr. Encode the REAL_CST
7085 specified by EXPR into the buffer PTR of length LEN bytes.
7086 Return the number of bytes placed in the buffer, or zero
7087 upon failure. */
7088
7089 static int
7090 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7091 {
7092 tree type = TREE_TYPE (expr);
7093 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7094 int byte, offset, word, words, bitpos;
7095 unsigned char value;
7096
7097 /* There are always 32 bits in each long, no matter the size of
7098 the hosts long. We handle floating point representations with
7099 up to 192 bits. */
7100 long tmp[6];
7101
7102 if ((off == -1 && total_bytes > len)
7103 || off >= total_bytes)
7104 return 0;
7105 if (off == -1)
7106 off = 0;
7107 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7108
7109 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7110
7111 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7112 bitpos += BITS_PER_UNIT)
7113 {
7114 byte = (bitpos / BITS_PER_UNIT) & 3;
7115 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7116
7117 if (UNITS_PER_WORD < 4)
7118 {
7119 word = byte / UNITS_PER_WORD;
7120 if (WORDS_BIG_ENDIAN)
7121 word = (words - 1) - word;
7122 offset = word * UNITS_PER_WORD;
7123 if (BYTES_BIG_ENDIAN)
7124 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7125 else
7126 offset += byte % UNITS_PER_WORD;
7127 }
7128 else
7129 {
7130 offset = byte;
7131 if (BYTES_BIG_ENDIAN)
7132 {
7133 /* Reverse bytes within each long, or within the entire float
7134 if it's smaller than a long (for HFmode). */
7135 offset = MIN (3, total_bytes - 1) - offset;
7136 gcc_assert (offset >= 0);
7137 }
7138 }
7139 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7140 if (offset >= off
7141 && offset - off < len)
7142 ptr[offset - off] = value;
7143 }
7144 return MIN (len, total_bytes - off);
7145 }
7146
7147 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7148 specified by EXPR into the buffer PTR of length LEN bytes.
7149 Return the number of bytes placed in the buffer, or zero
7150 upon failure. */
7151
7152 static int
7153 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7154 {
7155 int rsize, isize;
7156 tree part;
7157
7158 part = TREE_REALPART (expr);
7159 rsize = native_encode_expr (part, ptr, len, off);
7160 if (off == -1
7161 && rsize == 0)
7162 return 0;
7163 part = TREE_IMAGPART (expr);
7164 if (off != -1)
7165 off = MAX (0, off - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part))));
7166 isize = native_encode_expr (part, ptr+rsize, len-rsize, off);
7167 if (off == -1
7168 && isize != rsize)
7169 return 0;
7170 return rsize + isize;
7171 }
7172
7173
7174 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7175 specified by EXPR into the buffer PTR of length LEN bytes.
7176 Return the number of bytes placed in the buffer, or zero
7177 upon failure. */
7178
7179 static int
7180 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7181 {
7182 unsigned i, count;
7183 int size, offset;
7184 tree itype, elem;
7185
7186 offset = 0;
7187 count = VECTOR_CST_NELTS (expr);
7188 itype = TREE_TYPE (TREE_TYPE (expr));
7189 size = GET_MODE_SIZE (TYPE_MODE (itype));
7190 for (i = 0; i < count; i++)
7191 {
7192 if (off >= size)
7193 {
7194 off -= size;
7195 continue;
7196 }
7197 elem = VECTOR_CST_ELT (expr, i);
7198 int res = native_encode_expr (elem, ptr+offset, len-offset, off);
7199 if ((off == -1 && res != size)
7200 || res == 0)
7201 return 0;
7202 offset += res;
7203 if (offset >= len)
7204 return offset;
7205 if (off != -1)
7206 off = 0;
7207 }
7208 return offset;
7209 }
7210
7211
7212 /* Subroutine of native_encode_expr. Encode the STRING_CST
7213 specified by EXPR into the buffer PTR of length LEN bytes.
7214 Return the number of bytes placed in the buffer, or zero
7215 upon failure. */
7216
7217 static int
7218 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7219 {
7220 tree type = TREE_TYPE (expr);
7221 HOST_WIDE_INT total_bytes;
7222
7223 if (TREE_CODE (type) != ARRAY_TYPE
7224 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7225 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7226 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7227 return 0;
7228 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7229 if ((off == -1 && total_bytes > len)
7230 || off >= total_bytes)
7231 return 0;
7232 if (off == -1)
7233 off = 0;
7234 if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7235 {
7236 int written = 0;
7237 if (off < TREE_STRING_LENGTH (expr))
7238 {
7239 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7240 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7241 }
7242 memset (ptr + written, 0,
7243 MIN (total_bytes - written, len - written));
7244 }
7245 else
7246 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7247 return MIN (total_bytes - off, len);
7248 }
7249
7250
7251 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7252 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7253 buffer PTR of length LEN bytes. If OFF is not -1 then start
7254 the encoding at byte offset OFF and encode at most LEN bytes.
7255 Return the number of bytes placed in the buffer, or zero upon failure. */
7256
7257 int
7258 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7259 {
7260 /* We don't support starting at negative offset and -1 is special. */
7261 if (off < -1)
7262 return 0;
7263
7264 switch (TREE_CODE (expr))
7265 {
7266 case INTEGER_CST:
7267 return native_encode_int (expr, ptr, len, off);
7268
7269 case REAL_CST:
7270 return native_encode_real (expr, ptr, len, off);
7271
7272 case FIXED_CST:
7273 return native_encode_fixed (expr, ptr, len, off);
7274
7275 case COMPLEX_CST:
7276 return native_encode_complex (expr, ptr, len, off);
7277
7278 case VECTOR_CST:
7279 return native_encode_vector (expr, ptr, len, off);
7280
7281 case STRING_CST:
7282 return native_encode_string (expr, ptr, len, off);
7283
7284 default:
7285 return 0;
7286 }
7287 }
7288
7289
7290 /* Subroutine of native_interpret_expr. Interpret the contents of
7291 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7292 If the buffer cannot be interpreted, return NULL_TREE. */
7293
7294 static tree
7295 native_interpret_int (tree type, const unsigned char *ptr, int len)
7296 {
7297 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7298
7299 if (total_bytes > len
7300 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7301 return NULL_TREE;
7302
7303 wide_int result = wi::from_buffer (ptr, total_bytes);
7304
7305 return wide_int_to_tree (type, result);
7306 }
7307
7308
7309 /* Subroutine of native_interpret_expr. Interpret the contents of
7310 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7311 If the buffer cannot be interpreted, return NULL_TREE. */
7312
7313 static tree
7314 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7315 {
7316 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7317 double_int result;
7318 FIXED_VALUE_TYPE fixed_value;
7319
7320 if (total_bytes > len
7321 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7322 return NULL_TREE;
7323
7324 result = double_int::from_buffer (ptr, total_bytes);
7325 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7326
7327 return build_fixed (type, fixed_value);
7328 }
7329
7330
7331 /* Subroutine of native_interpret_expr. Interpret the contents of
7332 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7333 If the buffer cannot be interpreted, return NULL_TREE. */
7334
7335 static tree
7336 native_interpret_real (tree type, const unsigned char *ptr, int len)
7337 {
7338 machine_mode mode = TYPE_MODE (type);
7339 int total_bytes = GET_MODE_SIZE (mode);
7340 unsigned char value;
7341 /* There are always 32 bits in each long, no matter the size of
7342 the hosts long. We handle floating point representations with
7343 up to 192 bits. */
7344 REAL_VALUE_TYPE r;
7345 long tmp[6];
7346
7347 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7348 if (total_bytes > len || total_bytes > 24)
7349 return NULL_TREE;
7350 int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7351
7352 memset (tmp, 0, sizeof (tmp));
7353 for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7354 bitpos += BITS_PER_UNIT)
7355 {
7356 /* Both OFFSET and BYTE index within a long;
7357 bitpos indexes the whole float. */
7358 int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
7359 if (UNITS_PER_WORD < 4)
7360 {
7361 int word = byte / UNITS_PER_WORD;
7362 if (WORDS_BIG_ENDIAN)
7363 word = (words - 1) - word;
7364 offset = word * UNITS_PER_WORD;
7365 if (BYTES_BIG_ENDIAN)
7366 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7367 else
7368 offset += byte % UNITS_PER_WORD;
7369 }
7370 else
7371 {
7372 offset = byte;
7373 if (BYTES_BIG_ENDIAN)
7374 {
7375 /* Reverse bytes within each long, or within the entire float
7376 if it's smaller than a long (for HFmode). */
7377 offset = MIN (3, total_bytes - 1) - offset;
7378 gcc_assert (offset >= 0);
7379 }
7380 }
7381 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7382
7383 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7384 }
7385
7386 real_from_target (&r, tmp, mode);
7387 return build_real (type, r);
7388 }
7389
7390
7391 /* Subroutine of native_interpret_expr. Interpret the contents of
7392 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7393 If the buffer cannot be interpreted, return NULL_TREE. */
7394
7395 static tree
7396 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7397 {
7398 tree etype, rpart, ipart;
7399 int size;
7400
7401 etype = TREE_TYPE (type);
7402 size = GET_MODE_SIZE (TYPE_MODE (etype));
7403 if (size * 2 > len)
7404 return NULL_TREE;
7405 rpart = native_interpret_expr (etype, ptr, size);
7406 if (!rpart)
7407 return NULL_TREE;
7408 ipart = native_interpret_expr (etype, ptr+size, size);
7409 if (!ipart)
7410 return NULL_TREE;
7411 return build_complex (type, rpart, ipart);
7412 }
7413
7414
7415 /* Subroutine of native_interpret_expr. Interpret the contents of
7416 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7417 If the buffer cannot be interpreted, return NULL_TREE. */
7418
7419 static tree
7420 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7421 {
7422 tree etype, elem;
7423 int i, size, count;
7424 tree *elements;
7425
7426 etype = TREE_TYPE (type);
7427 size = GET_MODE_SIZE (TYPE_MODE (etype));
7428 count = TYPE_VECTOR_SUBPARTS (type);
7429 if (size * count > len)
7430 return NULL_TREE;
7431
7432 elements = XALLOCAVEC (tree, count);
7433 for (i = count - 1; i >= 0; i--)
7434 {
7435 elem = native_interpret_expr (etype, ptr+(i*size), size);
7436 if (!elem)
7437 return NULL_TREE;
7438 elements[i] = elem;
7439 }
7440 return build_vector (type, elements);
7441 }
7442
7443
7444 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7445 the buffer PTR of length LEN as a constant of type TYPE. For
7446 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7447 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7448 return NULL_TREE. */
7449
7450 tree
7451 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7452 {
7453 switch (TREE_CODE (type))
7454 {
7455 case INTEGER_TYPE:
7456 case ENUMERAL_TYPE:
7457 case BOOLEAN_TYPE:
7458 case POINTER_TYPE:
7459 case REFERENCE_TYPE:
7460 return native_interpret_int (type, ptr, len);
7461
7462 case REAL_TYPE:
7463 return native_interpret_real (type, ptr, len);
7464
7465 case FIXED_POINT_TYPE:
7466 return native_interpret_fixed (type, ptr, len);
7467
7468 case COMPLEX_TYPE:
7469 return native_interpret_complex (type, ptr, len);
7470
7471 case VECTOR_TYPE:
7472 return native_interpret_vector (type, ptr, len);
7473
7474 default:
7475 return NULL_TREE;
7476 }
7477 }
7478
7479 /* Returns true if we can interpret the contents of a native encoding
7480 as TYPE. */
7481
7482 static bool
7483 can_native_interpret_type_p (tree type)
7484 {
7485 switch (TREE_CODE (type))
7486 {
7487 case INTEGER_TYPE:
7488 case ENUMERAL_TYPE:
7489 case BOOLEAN_TYPE:
7490 case POINTER_TYPE:
7491 case REFERENCE_TYPE:
7492 case FIXED_POINT_TYPE:
7493 case REAL_TYPE:
7494 case COMPLEX_TYPE:
7495 case VECTOR_TYPE:
7496 return true;
7497 default:
7498 return false;
7499 }
7500 }
7501
7502 /* Return true iff a constant of type TYPE is accepted by
7503 native_encode_expr. */
7504
7505 bool
7506 can_native_encode_type_p (tree type)
7507 {
7508 switch (TREE_CODE (type))
7509 {
7510 case INTEGER_TYPE:
7511 case REAL_TYPE:
7512 case FIXED_POINT_TYPE:
7513 case COMPLEX_TYPE:
7514 case VECTOR_TYPE:
7515 case POINTER_TYPE:
7516 return true;
7517 default:
7518 return false;
7519 }
7520 }
7521
7522 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7523 TYPE at compile-time. If we're unable to perform the conversion
7524 return NULL_TREE. */
7525
7526 static tree
7527 fold_view_convert_expr (tree type, tree expr)
7528 {
7529 /* We support up to 512-bit values (for V8DFmode). */
7530 unsigned char buffer[64];
7531 int len;
7532
7533 /* Check that the host and target are sane. */
7534 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7535 return NULL_TREE;
7536
7537 len = native_encode_expr (expr, buffer, sizeof (buffer));
7538 if (len == 0)
7539 return NULL_TREE;
7540
7541 return native_interpret_expr (type, buffer, len);
7542 }
7543
7544 /* Build an expression for the address of T. Folds away INDIRECT_REF
7545 to avoid confusing the gimplify process. */
7546
7547 tree
7548 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7549 {
7550 /* The size of the object is not relevant when talking about its address. */
7551 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7552 t = TREE_OPERAND (t, 0);
7553
7554 if (TREE_CODE (t) == INDIRECT_REF)
7555 {
7556 t = TREE_OPERAND (t, 0);
7557
7558 if (TREE_TYPE (t) != ptrtype)
7559 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7560 }
7561 else if (TREE_CODE (t) == MEM_REF
7562 && integer_zerop (TREE_OPERAND (t, 1)))
7563 return TREE_OPERAND (t, 0);
7564 else if (TREE_CODE (t) == MEM_REF
7565 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7566 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7567 TREE_OPERAND (t, 0),
7568 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7569 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7570 {
7571 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7572
7573 if (TREE_TYPE (t) != ptrtype)
7574 t = fold_convert_loc (loc, ptrtype, t);
7575 }
7576 else
7577 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7578
7579 return t;
7580 }
7581
7582 /* Build an expression for the address of T. */
7583
7584 tree
7585 build_fold_addr_expr_loc (location_t loc, tree t)
7586 {
7587 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7588
7589 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7590 }
7591
7592 /* Fold a unary expression of code CODE and type TYPE with operand
7593 OP0. Return the folded expression if folding is successful.
7594 Otherwise, return NULL_TREE. */
7595
7596 tree
7597 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7598 {
7599 tree tem;
7600 tree arg0;
7601 enum tree_code_class kind = TREE_CODE_CLASS (code);
7602
7603 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7604 && TREE_CODE_LENGTH (code) == 1);
7605
7606 arg0 = op0;
7607 if (arg0)
7608 {
7609 if (CONVERT_EXPR_CODE_P (code)
7610 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7611 {
7612 /* Don't use STRIP_NOPS, because signedness of argument type
7613 matters. */
7614 STRIP_SIGN_NOPS (arg0);
7615 }
7616 else
7617 {
7618 /* Strip any conversions that don't change the mode. This
7619 is safe for every expression, except for a comparison
7620 expression because its signedness is derived from its
7621 operands.
7622
7623 Note that this is done as an internal manipulation within
7624 the constant folder, in order to find the simplest
7625 representation of the arguments so that their form can be
7626 studied. In any cases, the appropriate type conversions
7627 should be put back in the tree that will get out of the
7628 constant folder. */
7629 STRIP_NOPS (arg0);
7630 }
7631
7632 if (CONSTANT_CLASS_P (arg0))
7633 {
7634 tree tem = const_unop (code, type, arg0);
7635 if (tem)
7636 {
7637 if (TREE_TYPE (tem) != type)
7638 tem = fold_convert_loc (loc, type, tem);
7639 return tem;
7640 }
7641 }
7642 }
7643
7644 tem = generic_simplify (loc, code, type, op0);
7645 if (tem)
7646 return tem;
7647
7648 if (TREE_CODE_CLASS (code) == tcc_unary)
7649 {
7650 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7651 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7652 fold_build1_loc (loc, code, type,
7653 fold_convert_loc (loc, TREE_TYPE (op0),
7654 TREE_OPERAND (arg0, 1))));
7655 else if (TREE_CODE (arg0) == COND_EXPR)
7656 {
7657 tree arg01 = TREE_OPERAND (arg0, 1);
7658 tree arg02 = TREE_OPERAND (arg0, 2);
7659 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7660 arg01 = fold_build1_loc (loc, code, type,
7661 fold_convert_loc (loc,
7662 TREE_TYPE (op0), arg01));
7663 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7664 arg02 = fold_build1_loc (loc, code, type,
7665 fold_convert_loc (loc,
7666 TREE_TYPE (op0), arg02));
7667 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7668 arg01, arg02);
7669
7670 /* If this was a conversion, and all we did was to move into
7671 inside the COND_EXPR, bring it back out. But leave it if
7672 it is a conversion from integer to integer and the
7673 result precision is no wider than a word since such a
7674 conversion is cheap and may be optimized away by combine,
7675 while it couldn't if it were outside the COND_EXPR. Then return
7676 so we don't get into an infinite recursion loop taking the
7677 conversion out and then back in. */
7678
7679 if ((CONVERT_EXPR_CODE_P (code)
7680 || code == NON_LVALUE_EXPR)
7681 && TREE_CODE (tem) == COND_EXPR
7682 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7683 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7684 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7685 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7686 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7687 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7688 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7689 && (INTEGRAL_TYPE_P
7690 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7691 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7692 || flag_syntax_only))
7693 tem = build1_loc (loc, code, type,
7694 build3 (COND_EXPR,
7695 TREE_TYPE (TREE_OPERAND
7696 (TREE_OPERAND (tem, 1), 0)),
7697 TREE_OPERAND (tem, 0),
7698 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7699 TREE_OPERAND (TREE_OPERAND (tem, 2),
7700 0)));
7701 return tem;
7702 }
7703 }
7704
7705 switch (code)
7706 {
7707 case NON_LVALUE_EXPR:
7708 if (!maybe_lvalue_p (op0))
7709 return fold_convert_loc (loc, type, op0);
7710 return NULL_TREE;
7711
7712 CASE_CONVERT:
7713 case FLOAT_EXPR:
7714 case FIX_TRUNC_EXPR:
7715 if (COMPARISON_CLASS_P (op0))
7716 {
7717 /* If we have (type) (a CMP b) and type is an integral type, return
7718 new expression involving the new type. Canonicalize
7719 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7720 non-integral type.
7721 Do not fold the result as that would not simplify further, also
7722 folding again results in recursions. */
7723 if (TREE_CODE (type) == BOOLEAN_TYPE)
7724 return build2_loc (loc, TREE_CODE (op0), type,
7725 TREE_OPERAND (op0, 0),
7726 TREE_OPERAND (op0, 1));
7727 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7728 && TREE_CODE (type) != VECTOR_TYPE)
7729 return build3_loc (loc, COND_EXPR, type, op0,
7730 constant_boolean_node (true, type),
7731 constant_boolean_node (false, type));
7732 }
7733
7734 /* Handle (T *)&A.B.C for A being of type T and B and C
7735 living at offset zero. This occurs frequently in
7736 C++ upcasting and then accessing the base. */
7737 if (TREE_CODE (op0) == ADDR_EXPR
7738 && POINTER_TYPE_P (type)
7739 && handled_component_p (TREE_OPERAND (op0, 0)))
7740 {
7741 HOST_WIDE_INT bitsize, bitpos;
7742 tree offset;
7743 machine_mode mode;
7744 int unsignedp, reversep, volatilep;
7745 tree base
7746 = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
7747 &offset, &mode, &unsignedp, &reversep,
7748 &volatilep);
7749 /* If the reference was to a (constant) zero offset, we can use
7750 the address of the base if it has the same base type
7751 as the result type and the pointer type is unqualified. */
7752 if (! offset && bitpos == 0
7753 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7754 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7755 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7756 return fold_convert_loc (loc, type,
7757 build_fold_addr_expr_loc (loc, base));
7758 }
7759
7760 if (TREE_CODE (op0) == MODIFY_EXPR
7761 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7762 /* Detect assigning a bitfield. */
7763 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7764 && DECL_BIT_FIELD
7765 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7766 {
7767 /* Don't leave an assignment inside a conversion
7768 unless assigning a bitfield. */
7769 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7770 /* First do the assignment, then return converted constant. */
7771 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7772 TREE_NO_WARNING (tem) = 1;
7773 TREE_USED (tem) = 1;
7774 return tem;
7775 }
7776
7777 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7778 constants (if x has signed type, the sign bit cannot be set
7779 in c). This folds extension into the BIT_AND_EXPR.
7780 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7781 very likely don't have maximal range for their precision and this
7782 transformation effectively doesn't preserve non-maximal ranges. */
7783 if (TREE_CODE (type) == INTEGER_TYPE
7784 && TREE_CODE (op0) == BIT_AND_EXPR
7785 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7786 {
7787 tree and_expr = op0;
7788 tree and0 = TREE_OPERAND (and_expr, 0);
7789 tree and1 = TREE_OPERAND (and_expr, 1);
7790 int change = 0;
7791
7792 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7793 || (TYPE_PRECISION (type)
7794 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7795 change = 1;
7796 else if (TYPE_PRECISION (TREE_TYPE (and1))
7797 <= HOST_BITS_PER_WIDE_INT
7798 && tree_fits_uhwi_p (and1))
7799 {
7800 unsigned HOST_WIDE_INT cst;
7801
7802 cst = tree_to_uhwi (and1);
7803 cst &= HOST_WIDE_INT_M1U
7804 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7805 change = (cst == 0);
7806 if (change
7807 && !flag_syntax_only
7808 && (load_extend_op (TYPE_MODE (TREE_TYPE (and0)))
7809 == ZERO_EXTEND))
7810 {
7811 tree uns = unsigned_type_for (TREE_TYPE (and0));
7812 and0 = fold_convert_loc (loc, uns, and0);
7813 and1 = fold_convert_loc (loc, uns, and1);
7814 }
7815 }
7816 if (change)
7817 {
7818 tem = force_fit_type (type, wi::to_widest (and1), 0,
7819 TREE_OVERFLOW (and1));
7820 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7821 fold_convert_loc (loc, type, and0), tem);
7822 }
7823 }
7824
7825 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
7826 cast (T1)X will fold away. We assume that this happens when X itself
7827 is a cast. */
7828 if (POINTER_TYPE_P (type)
7829 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7830 && CONVERT_EXPR_P (TREE_OPERAND (arg0, 0)))
7831 {
7832 tree arg00 = TREE_OPERAND (arg0, 0);
7833 tree arg01 = TREE_OPERAND (arg0, 1);
7834
7835 return fold_build_pointer_plus_loc
7836 (loc, fold_convert_loc (loc, type, arg00), arg01);
7837 }
7838
7839 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7840 of the same precision, and X is an integer type not narrower than
7841 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7842 if (INTEGRAL_TYPE_P (type)
7843 && TREE_CODE (op0) == BIT_NOT_EXPR
7844 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7845 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7846 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7847 {
7848 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7849 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7850 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7851 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7852 fold_convert_loc (loc, type, tem));
7853 }
7854
7855 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7856 type of X and Y (integer types only). */
7857 if (INTEGRAL_TYPE_P (type)
7858 && TREE_CODE (op0) == MULT_EXPR
7859 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7860 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7861 {
7862 /* Be careful not to introduce new overflows. */
7863 tree mult_type;
7864 if (TYPE_OVERFLOW_WRAPS (type))
7865 mult_type = type;
7866 else
7867 mult_type = unsigned_type_for (type);
7868
7869 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7870 {
7871 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7872 fold_convert_loc (loc, mult_type,
7873 TREE_OPERAND (op0, 0)),
7874 fold_convert_loc (loc, mult_type,
7875 TREE_OPERAND (op0, 1)));
7876 return fold_convert_loc (loc, type, tem);
7877 }
7878 }
7879
7880 return NULL_TREE;
7881
7882 case VIEW_CONVERT_EXPR:
7883 if (TREE_CODE (op0) == MEM_REF)
7884 {
7885 if (TYPE_ALIGN (TREE_TYPE (op0)) != TYPE_ALIGN (type))
7886 type = build_aligned_type (type, TYPE_ALIGN (TREE_TYPE (op0)));
7887 tem = fold_build2_loc (loc, MEM_REF, type,
7888 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
7889 REF_REVERSE_STORAGE_ORDER (tem) = REF_REVERSE_STORAGE_ORDER (op0);
7890 return tem;
7891 }
7892
7893 return NULL_TREE;
7894
7895 case NEGATE_EXPR:
7896 tem = fold_negate_expr (loc, arg0);
7897 if (tem)
7898 return fold_convert_loc (loc, type, tem);
7899 return NULL_TREE;
7900
7901 case ABS_EXPR:
7902 /* Convert fabs((double)float) into (double)fabsf(float). */
7903 if (TREE_CODE (arg0) == NOP_EXPR
7904 && TREE_CODE (type) == REAL_TYPE)
7905 {
7906 tree targ0 = strip_float_extensions (arg0);
7907 if (targ0 != arg0)
7908 return fold_convert_loc (loc, type,
7909 fold_build1_loc (loc, ABS_EXPR,
7910 TREE_TYPE (targ0),
7911 targ0));
7912 }
7913 return NULL_TREE;
7914
7915 case BIT_NOT_EXPR:
7916 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7917 if (TREE_CODE (arg0) == BIT_XOR_EXPR
7918 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7919 fold_convert_loc (loc, type,
7920 TREE_OPERAND (arg0, 0)))))
7921 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
7922 fold_convert_loc (loc, type,
7923 TREE_OPERAND (arg0, 1)));
7924 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7925 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7926 fold_convert_loc (loc, type,
7927 TREE_OPERAND (arg0, 1)))))
7928 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
7929 fold_convert_loc (loc, type,
7930 TREE_OPERAND (arg0, 0)), tem);
7931
7932 return NULL_TREE;
7933
7934 case TRUTH_NOT_EXPR:
7935 /* Note that the operand of this must be an int
7936 and its values must be 0 or 1.
7937 ("true" is a fixed value perhaps depending on the language,
7938 but we don't handle values other than 1 correctly yet.) */
7939 tem = fold_truth_not_expr (loc, arg0);
7940 if (!tem)
7941 return NULL_TREE;
7942 return fold_convert_loc (loc, type, tem);
7943
7944 case INDIRECT_REF:
7945 /* Fold *&X to X if X is an lvalue. */
7946 if (TREE_CODE (op0) == ADDR_EXPR)
7947 {
7948 tree op00 = TREE_OPERAND (op0, 0);
7949 if ((VAR_P (op00)
7950 || TREE_CODE (op00) == PARM_DECL
7951 || TREE_CODE (op00) == RESULT_DECL)
7952 && !TREE_READONLY (op00))
7953 return op00;
7954 }
7955 return NULL_TREE;
7956
7957 default:
7958 return NULL_TREE;
7959 } /* switch (code) */
7960 }
7961
7962
7963 /* If the operation was a conversion do _not_ mark a resulting constant
7964 with TREE_OVERFLOW if the original constant was not. These conversions
7965 have implementation defined behavior and retaining the TREE_OVERFLOW
7966 flag here would confuse later passes such as VRP. */
7967 tree
7968 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
7969 tree type, tree op0)
7970 {
7971 tree res = fold_unary_loc (loc, code, type, op0);
7972 if (res
7973 && TREE_CODE (res) == INTEGER_CST
7974 && TREE_CODE (op0) == INTEGER_CST
7975 && CONVERT_EXPR_CODE_P (code))
7976 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
7977
7978 return res;
7979 }
7980
7981 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
7982 operands OP0 and OP1. LOC is the location of the resulting expression.
7983 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
7984 Return the folded expression if folding is successful. Otherwise,
7985 return NULL_TREE. */
7986 static tree
7987 fold_truth_andor (location_t loc, enum tree_code code, tree type,
7988 tree arg0, tree arg1, tree op0, tree op1)
7989 {
7990 tree tem;
7991
7992 /* We only do these simplifications if we are optimizing. */
7993 if (!optimize)
7994 return NULL_TREE;
7995
7996 /* Check for things like (A || B) && (A || C). We can convert this
7997 to A || (B && C). Note that either operator can be any of the four
7998 truth and/or operations and the transformation will still be
7999 valid. Also note that we only care about order for the
8000 ANDIF and ORIF operators. If B contains side effects, this
8001 might change the truth-value of A. */
8002 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8003 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8004 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8005 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8006 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8007 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8008 {
8009 tree a00 = TREE_OPERAND (arg0, 0);
8010 tree a01 = TREE_OPERAND (arg0, 1);
8011 tree a10 = TREE_OPERAND (arg1, 0);
8012 tree a11 = TREE_OPERAND (arg1, 1);
8013 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8014 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8015 && (code == TRUTH_AND_EXPR
8016 || code == TRUTH_OR_EXPR));
8017
8018 if (operand_equal_p (a00, a10, 0))
8019 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8020 fold_build2_loc (loc, code, type, a01, a11));
8021 else if (commutative && operand_equal_p (a00, a11, 0))
8022 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8023 fold_build2_loc (loc, code, type, a01, a10));
8024 else if (commutative && operand_equal_p (a01, a10, 0))
8025 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8026 fold_build2_loc (loc, code, type, a00, a11));
8027
8028 /* This case if tricky because we must either have commutative
8029 operators or else A10 must not have side-effects. */
8030
8031 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8032 && operand_equal_p (a01, a11, 0))
8033 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8034 fold_build2_loc (loc, code, type, a00, a10),
8035 a01);
8036 }
8037
8038 /* See if we can build a range comparison. */
8039 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8040 return tem;
8041
8042 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8043 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8044 {
8045 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8046 if (tem)
8047 return fold_build2_loc (loc, code, type, tem, arg1);
8048 }
8049
8050 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8051 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8052 {
8053 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8054 if (tem)
8055 return fold_build2_loc (loc, code, type, arg0, tem);
8056 }
8057
8058 /* Check for the possibility of merging component references. If our
8059 lhs is another similar operation, try to merge its rhs with our
8060 rhs. Then try to merge our lhs and rhs. */
8061 if (TREE_CODE (arg0) == code
8062 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8063 TREE_OPERAND (arg0, 1), arg1)))
8064 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8065
8066 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8067 return tem;
8068
8069 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8070 && (code == TRUTH_AND_EXPR
8071 || code == TRUTH_ANDIF_EXPR
8072 || code == TRUTH_OR_EXPR
8073 || code == TRUTH_ORIF_EXPR))
8074 {
8075 enum tree_code ncode, icode;
8076
8077 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8078 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8079 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8080
8081 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8082 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8083 We don't want to pack more than two leafs to a non-IF AND/OR
8084 expression.
8085 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8086 equal to IF-CODE, then we don't want to add right-hand operand.
8087 If the inner right-hand side of left-hand operand has
8088 side-effects, or isn't simple, then we can't add to it,
8089 as otherwise we might destroy if-sequence. */
8090 if (TREE_CODE (arg0) == icode
8091 && simple_operand_p_2 (arg1)
8092 /* Needed for sequence points to handle trappings, and
8093 side-effects. */
8094 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8095 {
8096 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8097 arg1);
8098 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8099 tem);
8100 }
8101 /* Same as above but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8102 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8103 else if (TREE_CODE (arg1) == icode
8104 && simple_operand_p_2 (arg0)
8105 /* Needed for sequence points to handle trappings, and
8106 side-effects. */
8107 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8108 {
8109 tem = fold_build2_loc (loc, ncode, type,
8110 arg0, TREE_OPERAND (arg1, 0));
8111 return fold_build2_loc (loc, icode, type, tem,
8112 TREE_OPERAND (arg1, 1));
8113 }
8114 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8115 into (A OR B).
8116 For sequence point consistancy, we need to check for trapping,
8117 and side-effects. */
8118 else if (code == icode && simple_operand_p_2 (arg0)
8119 && simple_operand_p_2 (arg1))
8120 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8121 }
8122
8123 return NULL_TREE;
8124 }
8125
8126 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8127 by changing CODE to reduce the magnitude of constants involved in
8128 ARG0 of the comparison.
8129 Returns a canonicalized comparison tree if a simplification was
8130 possible, otherwise returns NULL_TREE.
8131 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8132 valid if signed overflow is undefined. */
8133
8134 static tree
8135 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8136 tree arg0, tree arg1,
8137 bool *strict_overflow_p)
8138 {
8139 enum tree_code code0 = TREE_CODE (arg0);
8140 tree t, cst0 = NULL_TREE;
8141 int sgn0;
8142
8143 /* Match A +- CST code arg1. We can change this only if overflow
8144 is undefined. */
8145 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8146 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8147 /* In principle pointers also have undefined overflow behavior,
8148 but that causes problems elsewhere. */
8149 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8150 && (code0 == MINUS_EXPR
8151 || code0 == PLUS_EXPR)
8152 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
8153 return NULL_TREE;
8154
8155 /* Identify the constant in arg0 and its sign. */
8156 cst0 = TREE_OPERAND (arg0, 1);
8157 sgn0 = tree_int_cst_sgn (cst0);
8158
8159 /* Overflowed constants and zero will cause problems. */
8160 if (integer_zerop (cst0)
8161 || TREE_OVERFLOW (cst0))
8162 return NULL_TREE;
8163
8164 /* See if we can reduce the magnitude of the constant in
8165 arg0 by changing the comparison code. */
8166 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8167 if (code == LT_EXPR
8168 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8169 code = LE_EXPR;
8170 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8171 else if (code == GT_EXPR
8172 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8173 code = GE_EXPR;
8174 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8175 else if (code == LE_EXPR
8176 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8177 code = LT_EXPR;
8178 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8179 else if (code == GE_EXPR
8180 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8181 code = GT_EXPR;
8182 else
8183 return NULL_TREE;
8184 *strict_overflow_p = true;
8185
8186 /* Now build the constant reduced in magnitude. But not if that
8187 would produce one outside of its types range. */
8188 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8189 && ((sgn0 == 1
8190 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8191 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8192 || (sgn0 == -1
8193 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8194 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8195 return NULL_TREE;
8196
8197 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8198 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8199 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8200 t = fold_convert (TREE_TYPE (arg1), t);
8201
8202 return fold_build2_loc (loc, code, type, t, arg1);
8203 }
8204
8205 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8206 overflow further. Try to decrease the magnitude of constants involved
8207 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8208 and put sole constants at the second argument position.
8209 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8210
8211 static tree
8212 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8213 tree arg0, tree arg1)
8214 {
8215 tree t;
8216 bool strict_overflow_p;
8217 const char * const warnmsg = G_("assuming signed overflow does not occur "
8218 "when reducing constant in comparison");
8219
8220 /* Try canonicalization by simplifying arg0. */
8221 strict_overflow_p = false;
8222 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8223 &strict_overflow_p);
8224 if (t)
8225 {
8226 if (strict_overflow_p)
8227 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8228 return t;
8229 }
8230
8231 /* Try canonicalization by simplifying arg1 using the swapped
8232 comparison. */
8233 code = swap_tree_comparison (code);
8234 strict_overflow_p = false;
8235 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8236 &strict_overflow_p);
8237 if (t && strict_overflow_p)
8238 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8239 return t;
8240 }
8241
8242 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8243 space. This is used to avoid issuing overflow warnings for
8244 expressions like &p->x which can not wrap. */
8245
8246 static bool
8247 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8248 {
8249 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8250 return true;
8251
8252 if (bitpos < 0)
8253 return true;
8254
8255 wide_int wi_offset;
8256 int precision = TYPE_PRECISION (TREE_TYPE (base));
8257 if (offset == NULL_TREE)
8258 wi_offset = wi::zero (precision);
8259 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8260 return true;
8261 else
8262 wi_offset = offset;
8263
8264 bool overflow;
8265 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8266 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8267 if (overflow)
8268 return true;
8269
8270 if (!wi::fits_uhwi_p (total))
8271 return true;
8272
8273 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8274 if (size <= 0)
8275 return true;
8276
8277 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8278 array. */
8279 if (TREE_CODE (base) == ADDR_EXPR)
8280 {
8281 HOST_WIDE_INT base_size;
8282
8283 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8284 if (base_size > 0 && size < base_size)
8285 size = base_size;
8286 }
8287
8288 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8289 }
8290
8291 /* Return a positive integer when the symbol DECL is known to have
8292 a nonzero address, zero when it's known not to (e.g., it's a weak
8293 symbol), and a negative integer when the symbol is not yet in the
8294 symbol table and so whether or not its address is zero is unknown.
8295 For function local objects always return positive integer. */
8296 static int
8297 maybe_nonzero_address (tree decl)
8298 {
8299 if (DECL_P (decl) && decl_in_symtab_p (decl))
8300 if (struct symtab_node *symbol = symtab_node::get_create (decl))
8301 return symbol->nonzero_address ();
8302
8303 /* Function local objects are never NULL. */
8304 if (DECL_P (decl)
8305 && (DECL_CONTEXT (decl)
8306 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
8307 && auto_var_in_fn_p (decl, DECL_CONTEXT (decl))))
8308 return 1;
8309
8310 return -1;
8311 }
8312
8313 /* Subroutine of fold_binary. This routine performs all of the
8314 transformations that are common to the equality/inequality
8315 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8316 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8317 fold_binary should call fold_binary. Fold a comparison with
8318 tree code CODE and type TYPE with operands OP0 and OP1. Return
8319 the folded comparison or NULL_TREE. */
8320
8321 static tree
8322 fold_comparison (location_t loc, enum tree_code code, tree type,
8323 tree op0, tree op1)
8324 {
8325 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8326 tree arg0, arg1, tem;
8327
8328 arg0 = op0;
8329 arg1 = op1;
8330
8331 STRIP_SIGN_NOPS (arg0);
8332 STRIP_SIGN_NOPS (arg1);
8333
8334 /* For comparisons of pointers we can decompose it to a compile time
8335 comparison of the base objects and the offsets into the object.
8336 This requires at least one operand being an ADDR_EXPR or a
8337 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8338 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8339 && (TREE_CODE (arg0) == ADDR_EXPR
8340 || TREE_CODE (arg1) == ADDR_EXPR
8341 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8342 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8343 {
8344 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8345 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8346 machine_mode mode;
8347 int volatilep, reversep, unsignedp;
8348 bool indirect_base0 = false, indirect_base1 = false;
8349
8350 /* Get base and offset for the access. Strip ADDR_EXPR for
8351 get_inner_reference, but put it back by stripping INDIRECT_REF
8352 off the base object if possible. indirect_baseN will be true
8353 if baseN is not an address but refers to the object itself. */
8354 base0 = arg0;
8355 if (TREE_CODE (arg0) == ADDR_EXPR)
8356 {
8357 base0
8358 = get_inner_reference (TREE_OPERAND (arg0, 0),
8359 &bitsize, &bitpos0, &offset0, &mode,
8360 &unsignedp, &reversep, &volatilep);
8361 if (TREE_CODE (base0) == INDIRECT_REF)
8362 base0 = TREE_OPERAND (base0, 0);
8363 else
8364 indirect_base0 = true;
8365 }
8366 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8367 {
8368 base0 = TREE_OPERAND (arg0, 0);
8369 STRIP_SIGN_NOPS (base0);
8370 if (TREE_CODE (base0) == ADDR_EXPR)
8371 {
8372 base0
8373 = get_inner_reference (TREE_OPERAND (base0, 0),
8374 &bitsize, &bitpos0, &offset0, &mode,
8375 &unsignedp, &reversep, &volatilep);
8376 if (TREE_CODE (base0) == INDIRECT_REF)
8377 base0 = TREE_OPERAND (base0, 0);
8378 else
8379 indirect_base0 = true;
8380 }
8381 if (offset0 == NULL_TREE || integer_zerop (offset0))
8382 offset0 = TREE_OPERAND (arg0, 1);
8383 else
8384 offset0 = size_binop (PLUS_EXPR, offset0,
8385 TREE_OPERAND (arg0, 1));
8386 if (TREE_CODE (offset0) == INTEGER_CST)
8387 {
8388 offset_int tem = wi::sext (wi::to_offset (offset0),
8389 TYPE_PRECISION (sizetype));
8390 tem <<= LOG2_BITS_PER_UNIT;
8391 tem += bitpos0;
8392 if (wi::fits_shwi_p (tem))
8393 {
8394 bitpos0 = tem.to_shwi ();
8395 offset0 = NULL_TREE;
8396 }
8397 }
8398 }
8399
8400 base1 = arg1;
8401 if (TREE_CODE (arg1) == ADDR_EXPR)
8402 {
8403 base1
8404 = get_inner_reference (TREE_OPERAND (arg1, 0),
8405 &bitsize, &bitpos1, &offset1, &mode,
8406 &unsignedp, &reversep, &volatilep);
8407 if (TREE_CODE (base1) == INDIRECT_REF)
8408 base1 = TREE_OPERAND (base1, 0);
8409 else
8410 indirect_base1 = true;
8411 }
8412 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8413 {
8414 base1 = TREE_OPERAND (arg1, 0);
8415 STRIP_SIGN_NOPS (base1);
8416 if (TREE_CODE (base1) == ADDR_EXPR)
8417 {
8418 base1
8419 = get_inner_reference (TREE_OPERAND (base1, 0),
8420 &bitsize, &bitpos1, &offset1, &mode,
8421 &unsignedp, &reversep, &volatilep);
8422 if (TREE_CODE (base1) == INDIRECT_REF)
8423 base1 = TREE_OPERAND (base1, 0);
8424 else
8425 indirect_base1 = true;
8426 }
8427 if (offset1 == NULL_TREE || integer_zerop (offset1))
8428 offset1 = TREE_OPERAND (arg1, 1);
8429 else
8430 offset1 = size_binop (PLUS_EXPR, offset1,
8431 TREE_OPERAND (arg1, 1));
8432 if (TREE_CODE (offset1) == INTEGER_CST)
8433 {
8434 offset_int tem = wi::sext (wi::to_offset (offset1),
8435 TYPE_PRECISION (sizetype));
8436 tem <<= LOG2_BITS_PER_UNIT;
8437 tem += bitpos1;
8438 if (wi::fits_shwi_p (tem))
8439 {
8440 bitpos1 = tem.to_shwi ();
8441 offset1 = NULL_TREE;
8442 }
8443 }
8444 }
8445
8446 /* If we have equivalent bases we might be able to simplify. */
8447 if (indirect_base0 == indirect_base1
8448 && operand_equal_p (base0, base1,
8449 indirect_base0 ? OEP_ADDRESS_OF : 0))
8450 {
8451 /* We can fold this expression to a constant if the non-constant
8452 offset parts are equal. */
8453 if (offset0 == offset1
8454 || (offset0 && offset1
8455 && operand_equal_p (offset0, offset1, 0)))
8456 {
8457 if (!equality_code
8458 && bitpos0 != bitpos1
8459 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8460 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8461 fold_overflow_warning (("assuming pointer wraparound does not "
8462 "occur when comparing P +- C1 with "
8463 "P +- C2"),
8464 WARN_STRICT_OVERFLOW_CONDITIONAL);
8465
8466 switch (code)
8467 {
8468 case EQ_EXPR:
8469 return constant_boolean_node (bitpos0 == bitpos1, type);
8470 case NE_EXPR:
8471 return constant_boolean_node (bitpos0 != bitpos1, type);
8472 case LT_EXPR:
8473 return constant_boolean_node (bitpos0 < bitpos1, type);
8474 case LE_EXPR:
8475 return constant_boolean_node (bitpos0 <= bitpos1, type);
8476 case GE_EXPR:
8477 return constant_boolean_node (bitpos0 >= bitpos1, type);
8478 case GT_EXPR:
8479 return constant_boolean_node (bitpos0 > bitpos1, type);
8480 default:;
8481 }
8482 }
8483 /* We can simplify the comparison to a comparison of the variable
8484 offset parts if the constant offset parts are equal.
8485 Be careful to use signed sizetype here because otherwise we
8486 mess with array offsets in the wrong way. This is possible
8487 because pointer arithmetic is restricted to retain within an
8488 object and overflow on pointer differences is undefined as of
8489 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8490 else if (bitpos0 == bitpos1)
8491 {
8492 /* By converting to signed sizetype we cover middle-end pointer
8493 arithmetic which operates on unsigned pointer types of size
8494 type size and ARRAY_REF offsets which are properly sign or
8495 zero extended from their type in case it is narrower than
8496 sizetype. */
8497 if (offset0 == NULL_TREE)
8498 offset0 = build_int_cst (ssizetype, 0);
8499 else
8500 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8501 if (offset1 == NULL_TREE)
8502 offset1 = build_int_cst (ssizetype, 0);
8503 else
8504 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8505
8506 if (!equality_code
8507 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8508 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8509 fold_overflow_warning (("assuming pointer wraparound does not "
8510 "occur when comparing P +- C1 with "
8511 "P +- C2"),
8512 WARN_STRICT_OVERFLOW_COMPARISON);
8513
8514 return fold_build2_loc (loc, code, type, offset0, offset1);
8515 }
8516 }
8517 /* For equal offsets we can simplify to a comparison of the
8518 base addresses. */
8519 else if (bitpos0 == bitpos1
8520 && (indirect_base0
8521 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8522 && (indirect_base1
8523 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8524 && ((offset0 == offset1)
8525 || (offset0 && offset1
8526 && operand_equal_p (offset0, offset1, 0))))
8527 {
8528 if (indirect_base0)
8529 base0 = build_fold_addr_expr_loc (loc, base0);
8530 if (indirect_base1)
8531 base1 = build_fold_addr_expr_loc (loc, base1);
8532 return fold_build2_loc (loc, code, type, base0, base1);
8533 }
8534 /* Comparison between an ordinary (non-weak) symbol and a null
8535 pointer can be eliminated since such symbols must have a non
8536 null address. In C, relational expressions between pointers
8537 to objects and null pointers are undefined. The results
8538 below follow the C++ rules with the additional property that
8539 every object pointer compares greater than a null pointer.
8540 */
8541 else if (((DECL_P (base0)
8542 && maybe_nonzero_address (base0) > 0
8543 /* Avoid folding references to struct members at offset 0 to
8544 prevent tests like '&ptr->firstmember == 0' from getting
8545 eliminated. When ptr is null, although the -> expression
8546 is strictly speaking invalid, GCC retains it as a matter
8547 of QoI. See PR c/44555. */
8548 && (offset0 == NULL_TREE && bitpos0 != 0))
8549 || CONSTANT_CLASS_P (base0))
8550 && indirect_base0
8551 /* The caller guarantees that when one of the arguments is
8552 constant (i.e., null in this case) it is second. */
8553 && integer_zerop (arg1))
8554 {
8555 switch (code)
8556 {
8557 case EQ_EXPR:
8558 case LE_EXPR:
8559 case LT_EXPR:
8560 return constant_boolean_node (false, type);
8561 case GE_EXPR:
8562 case GT_EXPR:
8563 case NE_EXPR:
8564 return constant_boolean_node (true, type);
8565 default:
8566 gcc_unreachable ();
8567 }
8568 }
8569 }
8570
8571 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8572 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8573 the resulting offset is smaller in absolute value than the
8574 original one and has the same sign. */
8575 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8576 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8577 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8578 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8579 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8580 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8581 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8582 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8583 {
8584 tree const1 = TREE_OPERAND (arg0, 1);
8585 tree const2 = TREE_OPERAND (arg1, 1);
8586 tree variable1 = TREE_OPERAND (arg0, 0);
8587 tree variable2 = TREE_OPERAND (arg1, 0);
8588 tree cst;
8589 const char * const warnmsg = G_("assuming signed overflow does not "
8590 "occur when combining constants around "
8591 "a comparison");
8592
8593 /* Put the constant on the side where it doesn't overflow and is
8594 of lower absolute value and of same sign than before. */
8595 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8596 ? MINUS_EXPR : PLUS_EXPR,
8597 const2, const1);
8598 if (!TREE_OVERFLOW (cst)
8599 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
8600 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
8601 {
8602 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8603 return fold_build2_loc (loc, code, type,
8604 variable1,
8605 fold_build2_loc (loc, TREE_CODE (arg1),
8606 TREE_TYPE (arg1),
8607 variable2, cst));
8608 }
8609
8610 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8611 ? MINUS_EXPR : PLUS_EXPR,
8612 const1, const2);
8613 if (!TREE_OVERFLOW (cst)
8614 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
8615 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
8616 {
8617 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8618 return fold_build2_loc (loc, code, type,
8619 fold_build2_loc (loc, TREE_CODE (arg0),
8620 TREE_TYPE (arg0),
8621 variable1, cst),
8622 variable2);
8623 }
8624 }
8625
8626 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
8627 if (tem)
8628 return tem;
8629
8630 /* If we are comparing an expression that just has comparisons
8631 of two integer values, arithmetic expressions of those comparisons,
8632 and constants, we can simplify it. There are only three cases
8633 to check: the two values can either be equal, the first can be
8634 greater, or the second can be greater. Fold the expression for
8635 those three values. Since each value must be 0 or 1, we have
8636 eight possibilities, each of which corresponds to the constant 0
8637 or 1 or one of the six possible comparisons.
8638
8639 This handles common cases like (a > b) == 0 but also handles
8640 expressions like ((x > y) - (y > x)) > 0, which supposedly
8641 occur in macroized code. */
8642
8643 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8644 {
8645 tree cval1 = 0, cval2 = 0;
8646 int save_p = 0;
8647
8648 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8649 /* Don't handle degenerate cases here; they should already
8650 have been handled anyway. */
8651 && cval1 != 0 && cval2 != 0
8652 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8653 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8654 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8655 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8656 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8657 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8658 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8659 {
8660 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8661 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8662
8663 /* We can't just pass T to eval_subst in case cval1 or cval2
8664 was the same as ARG1. */
8665
8666 tree high_result
8667 = fold_build2_loc (loc, code, type,
8668 eval_subst (loc, arg0, cval1, maxval,
8669 cval2, minval),
8670 arg1);
8671 tree equal_result
8672 = fold_build2_loc (loc, code, type,
8673 eval_subst (loc, arg0, cval1, maxval,
8674 cval2, maxval),
8675 arg1);
8676 tree low_result
8677 = fold_build2_loc (loc, code, type,
8678 eval_subst (loc, arg0, cval1, minval,
8679 cval2, maxval),
8680 arg1);
8681
8682 /* All three of these results should be 0 or 1. Confirm they are.
8683 Then use those values to select the proper code to use. */
8684
8685 if (TREE_CODE (high_result) == INTEGER_CST
8686 && TREE_CODE (equal_result) == INTEGER_CST
8687 && TREE_CODE (low_result) == INTEGER_CST)
8688 {
8689 /* Make a 3-bit mask with the high-order bit being the
8690 value for `>', the next for '=', and the low for '<'. */
8691 switch ((integer_onep (high_result) * 4)
8692 + (integer_onep (equal_result) * 2)
8693 + integer_onep (low_result))
8694 {
8695 case 0:
8696 /* Always false. */
8697 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
8698 case 1:
8699 code = LT_EXPR;
8700 break;
8701 case 2:
8702 code = EQ_EXPR;
8703 break;
8704 case 3:
8705 code = LE_EXPR;
8706 break;
8707 case 4:
8708 code = GT_EXPR;
8709 break;
8710 case 5:
8711 code = NE_EXPR;
8712 break;
8713 case 6:
8714 code = GE_EXPR;
8715 break;
8716 case 7:
8717 /* Always true. */
8718 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
8719 }
8720
8721 if (save_p)
8722 {
8723 tem = save_expr (build2 (code, type, cval1, cval2));
8724 protected_set_expr_location (tem, loc);
8725 return tem;
8726 }
8727 return fold_build2_loc (loc, code, type, cval1, cval2);
8728 }
8729 }
8730 }
8731
8732 return NULL_TREE;
8733 }
8734
8735
8736 /* Subroutine of fold_binary. Optimize complex multiplications of the
8737 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8738 argument EXPR represents the expression "z" of type TYPE. */
8739
8740 static tree
8741 fold_mult_zconjz (location_t loc, tree type, tree expr)
8742 {
8743 tree itype = TREE_TYPE (type);
8744 tree rpart, ipart, tem;
8745
8746 if (TREE_CODE (expr) == COMPLEX_EXPR)
8747 {
8748 rpart = TREE_OPERAND (expr, 0);
8749 ipart = TREE_OPERAND (expr, 1);
8750 }
8751 else if (TREE_CODE (expr) == COMPLEX_CST)
8752 {
8753 rpart = TREE_REALPART (expr);
8754 ipart = TREE_IMAGPART (expr);
8755 }
8756 else
8757 {
8758 expr = save_expr (expr);
8759 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
8760 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
8761 }
8762
8763 rpart = save_expr (rpart);
8764 ipart = save_expr (ipart);
8765 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
8766 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
8767 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
8768 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
8769 build_zero_cst (itype));
8770 }
8771
8772
8773 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
8774 CONSTRUCTOR ARG into array ELTS and return true if successful. */
8775
8776 static bool
8777 vec_cst_ctor_to_array (tree arg, tree *elts)
8778 {
8779 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
8780
8781 if (TREE_CODE (arg) == VECTOR_CST)
8782 {
8783 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
8784 elts[i] = VECTOR_CST_ELT (arg, i);
8785 }
8786 else if (TREE_CODE (arg) == CONSTRUCTOR)
8787 {
8788 constructor_elt *elt;
8789
8790 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
8791 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
8792 return false;
8793 else
8794 elts[i] = elt->value;
8795 }
8796 else
8797 return false;
8798 for (; i < nelts; i++)
8799 elts[i]
8800 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
8801 return true;
8802 }
8803
8804 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
8805 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
8806 NULL_TREE otherwise. */
8807
8808 static tree
8809 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
8810 {
8811 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8812 tree *elts;
8813 bool need_ctor = false;
8814
8815 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
8816 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
8817 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
8818 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
8819 return NULL_TREE;
8820
8821 elts = XALLOCAVEC (tree, nelts * 3);
8822 if (!vec_cst_ctor_to_array (arg0, elts)
8823 || !vec_cst_ctor_to_array (arg1, elts + nelts))
8824 return NULL_TREE;
8825
8826 for (i = 0; i < nelts; i++)
8827 {
8828 if (!CONSTANT_CLASS_P (elts[sel[i]]))
8829 need_ctor = true;
8830 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
8831 }
8832
8833 if (need_ctor)
8834 {
8835 vec<constructor_elt, va_gc> *v;
8836 vec_alloc (v, nelts);
8837 for (i = 0; i < nelts; i++)
8838 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
8839 return build_constructor (type, v);
8840 }
8841 else
8842 return build_vector (type, &elts[2 * nelts]);
8843 }
8844
8845 /* Try to fold a pointer difference of type TYPE two address expressions of
8846 array references AREF0 and AREF1 using location LOC. Return a
8847 simplified expression for the difference or NULL_TREE. */
8848
8849 static tree
8850 fold_addr_of_array_ref_difference (location_t loc, tree type,
8851 tree aref0, tree aref1)
8852 {
8853 tree base0 = TREE_OPERAND (aref0, 0);
8854 tree base1 = TREE_OPERAND (aref1, 0);
8855 tree base_offset = build_int_cst (type, 0);
8856
8857 /* If the bases are array references as well, recurse. If the bases
8858 are pointer indirections compute the difference of the pointers.
8859 If the bases are equal, we are set. */
8860 if ((TREE_CODE (base0) == ARRAY_REF
8861 && TREE_CODE (base1) == ARRAY_REF
8862 && (base_offset
8863 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
8864 || (INDIRECT_REF_P (base0)
8865 && INDIRECT_REF_P (base1)
8866 && (base_offset
8867 = fold_binary_loc (loc, MINUS_EXPR, type,
8868 fold_convert (type, TREE_OPERAND (base0, 0)),
8869 fold_convert (type,
8870 TREE_OPERAND (base1, 0)))))
8871 || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
8872 {
8873 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
8874 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
8875 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
8876 tree diff = fold_build2_loc (loc, MINUS_EXPR, type, op0, op1);
8877 return fold_build2_loc (loc, PLUS_EXPR, type,
8878 base_offset,
8879 fold_build2_loc (loc, MULT_EXPR, type,
8880 diff, esz));
8881 }
8882 return NULL_TREE;
8883 }
8884
8885 /* If the real or vector real constant CST of type TYPE has an exact
8886 inverse, return it, else return NULL. */
8887
8888 tree
8889 exact_inverse (tree type, tree cst)
8890 {
8891 REAL_VALUE_TYPE r;
8892 tree unit_type, *elts;
8893 machine_mode mode;
8894 unsigned vec_nelts, i;
8895
8896 switch (TREE_CODE (cst))
8897 {
8898 case REAL_CST:
8899 r = TREE_REAL_CST (cst);
8900
8901 if (exact_real_inverse (TYPE_MODE (type), &r))
8902 return build_real (type, r);
8903
8904 return NULL_TREE;
8905
8906 case VECTOR_CST:
8907 vec_nelts = VECTOR_CST_NELTS (cst);
8908 elts = XALLOCAVEC (tree, vec_nelts);
8909 unit_type = TREE_TYPE (type);
8910 mode = TYPE_MODE (unit_type);
8911
8912 for (i = 0; i < vec_nelts; i++)
8913 {
8914 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
8915 if (!exact_real_inverse (mode, &r))
8916 return NULL_TREE;
8917 elts[i] = build_real (unit_type, r);
8918 }
8919
8920 return build_vector (type, elts);
8921
8922 default:
8923 return NULL_TREE;
8924 }
8925 }
8926
8927 /* Mask out the tz least significant bits of X of type TYPE where
8928 tz is the number of trailing zeroes in Y. */
8929 static wide_int
8930 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
8931 {
8932 int tz = wi::ctz (y);
8933 if (tz > 0)
8934 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
8935 return x;
8936 }
8937
8938 /* Return true when T is an address and is known to be nonzero.
8939 For floating point we further ensure that T is not denormal.
8940 Similar logic is present in nonzero_address in rtlanal.h.
8941
8942 If the return value is based on the assumption that signed overflow
8943 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
8944 change *STRICT_OVERFLOW_P. */
8945
8946 static bool
8947 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
8948 {
8949 tree type = TREE_TYPE (t);
8950 enum tree_code code;
8951
8952 /* Doing something useful for floating point would need more work. */
8953 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
8954 return false;
8955
8956 code = TREE_CODE (t);
8957 switch (TREE_CODE_CLASS (code))
8958 {
8959 case tcc_unary:
8960 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
8961 strict_overflow_p);
8962 case tcc_binary:
8963 case tcc_comparison:
8964 return tree_binary_nonzero_warnv_p (code, type,
8965 TREE_OPERAND (t, 0),
8966 TREE_OPERAND (t, 1),
8967 strict_overflow_p);
8968 case tcc_constant:
8969 case tcc_declaration:
8970 case tcc_reference:
8971 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
8972
8973 default:
8974 break;
8975 }
8976
8977 switch (code)
8978 {
8979 case TRUTH_NOT_EXPR:
8980 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
8981 strict_overflow_p);
8982
8983 case TRUTH_AND_EXPR:
8984 case TRUTH_OR_EXPR:
8985 case TRUTH_XOR_EXPR:
8986 return tree_binary_nonzero_warnv_p (code, type,
8987 TREE_OPERAND (t, 0),
8988 TREE_OPERAND (t, 1),
8989 strict_overflow_p);
8990
8991 case COND_EXPR:
8992 case CONSTRUCTOR:
8993 case OBJ_TYPE_REF:
8994 case ASSERT_EXPR:
8995 case ADDR_EXPR:
8996 case WITH_SIZE_EXPR:
8997 case SSA_NAME:
8998 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
8999
9000 case COMPOUND_EXPR:
9001 case MODIFY_EXPR:
9002 case BIND_EXPR:
9003 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
9004 strict_overflow_p);
9005
9006 case SAVE_EXPR:
9007 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
9008 strict_overflow_p);
9009
9010 case CALL_EXPR:
9011 {
9012 tree fndecl = get_callee_fndecl (t);
9013 if (!fndecl) return false;
9014 if (flag_delete_null_pointer_checks && !flag_check_new
9015 && DECL_IS_OPERATOR_NEW (fndecl)
9016 && !TREE_NOTHROW (fndecl))
9017 return true;
9018 if (flag_delete_null_pointer_checks
9019 && lookup_attribute ("returns_nonnull",
9020 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9021 return true;
9022 return alloca_call_p (t);
9023 }
9024
9025 default:
9026 break;
9027 }
9028 return false;
9029 }
9030
9031 /* Return true when T is an address and is known to be nonzero.
9032 Handle warnings about undefined signed overflow. */
9033
9034 bool
9035 tree_expr_nonzero_p (tree t)
9036 {
9037 bool ret, strict_overflow_p;
9038
9039 strict_overflow_p = false;
9040 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9041 if (strict_overflow_p)
9042 fold_overflow_warning (("assuming signed overflow does not occur when "
9043 "determining that expression is always "
9044 "non-zero"),
9045 WARN_STRICT_OVERFLOW_MISC);
9046 return ret;
9047 }
9048
9049 /* Return true if T is known not to be equal to an integer W. */
9050
9051 bool
9052 expr_not_equal_to (tree t, const wide_int &w)
9053 {
9054 wide_int min, max, nz;
9055 value_range_type rtype;
9056 switch (TREE_CODE (t))
9057 {
9058 case INTEGER_CST:
9059 return wi::ne_p (t, w);
9060
9061 case SSA_NAME:
9062 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
9063 return false;
9064 rtype = get_range_info (t, &min, &max);
9065 if (rtype == VR_RANGE)
9066 {
9067 if (wi::lt_p (max, w, TYPE_SIGN (TREE_TYPE (t))))
9068 return true;
9069 if (wi::lt_p (w, min, TYPE_SIGN (TREE_TYPE (t))))
9070 return true;
9071 }
9072 else if (rtype == VR_ANTI_RANGE
9073 && wi::le_p (min, w, TYPE_SIGN (TREE_TYPE (t)))
9074 && wi::le_p (w, max, TYPE_SIGN (TREE_TYPE (t))))
9075 return true;
9076 /* If T has some known zero bits and W has any of those bits set,
9077 then T is known not to be equal to W. */
9078 if (wi::ne_p (wi::zext (wi::bit_and_not (w, get_nonzero_bits (t)),
9079 TYPE_PRECISION (TREE_TYPE (t))), 0))
9080 return true;
9081 return false;
9082
9083 default:
9084 return false;
9085 }
9086 }
9087
9088 /* Fold a binary expression of code CODE and type TYPE with operands
9089 OP0 and OP1. LOC is the location of the resulting expression.
9090 Return the folded expression if folding is successful. Otherwise,
9091 return NULL_TREE. */
9092
9093 tree
9094 fold_binary_loc (location_t loc,
9095 enum tree_code code, tree type, tree op0, tree op1)
9096 {
9097 enum tree_code_class kind = TREE_CODE_CLASS (code);
9098 tree arg0, arg1, tem;
9099 tree t1 = NULL_TREE;
9100 bool strict_overflow_p;
9101 unsigned int prec;
9102
9103 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9104 && TREE_CODE_LENGTH (code) == 2
9105 && op0 != NULL_TREE
9106 && op1 != NULL_TREE);
9107
9108 arg0 = op0;
9109 arg1 = op1;
9110
9111 /* Strip any conversions that don't change the mode. This is
9112 safe for every expression, except for a comparison expression
9113 because its signedness is derived from its operands. So, in
9114 the latter case, only strip conversions that don't change the
9115 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9116 preserved.
9117
9118 Note that this is done as an internal manipulation within the
9119 constant folder, in order to find the simplest representation
9120 of the arguments so that their form can be studied. In any
9121 cases, the appropriate type conversions should be put back in
9122 the tree that will get out of the constant folder. */
9123
9124 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9125 {
9126 STRIP_SIGN_NOPS (arg0);
9127 STRIP_SIGN_NOPS (arg1);
9128 }
9129 else
9130 {
9131 STRIP_NOPS (arg0);
9132 STRIP_NOPS (arg1);
9133 }
9134
9135 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9136 constant but we can't do arithmetic on them. */
9137 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
9138 {
9139 tem = const_binop (code, type, arg0, arg1);
9140 if (tem != NULL_TREE)
9141 {
9142 if (TREE_TYPE (tem) != type)
9143 tem = fold_convert_loc (loc, type, tem);
9144 return tem;
9145 }
9146 }
9147
9148 /* If this is a commutative operation, and ARG0 is a constant, move it
9149 to ARG1 to reduce the number of tests below. */
9150 if (commutative_tree_code (code)
9151 && tree_swap_operands_p (arg0, arg1))
9152 return fold_build2_loc (loc, code, type, op1, op0);
9153
9154 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9155 to ARG1 to reduce the number of tests below. */
9156 if (kind == tcc_comparison
9157 && tree_swap_operands_p (arg0, arg1))
9158 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9159
9160 tem = generic_simplify (loc, code, type, op0, op1);
9161 if (tem)
9162 return tem;
9163
9164 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9165
9166 First check for cases where an arithmetic operation is applied to a
9167 compound, conditional, or comparison operation. Push the arithmetic
9168 operation inside the compound or conditional to see if any folding
9169 can then be done. Convert comparison to conditional for this purpose.
9170 The also optimizes non-constant cases that used to be done in
9171 expand_expr.
9172
9173 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9174 one of the operands is a comparison and the other is a comparison, a
9175 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9176 code below would make the expression more complex. Change it to a
9177 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9178 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9179
9180 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9181 || code == EQ_EXPR || code == NE_EXPR)
9182 && TREE_CODE (type) != VECTOR_TYPE
9183 && ((truth_value_p (TREE_CODE (arg0))
9184 && (truth_value_p (TREE_CODE (arg1))
9185 || (TREE_CODE (arg1) == BIT_AND_EXPR
9186 && integer_onep (TREE_OPERAND (arg1, 1)))))
9187 || (truth_value_p (TREE_CODE (arg1))
9188 && (truth_value_p (TREE_CODE (arg0))
9189 || (TREE_CODE (arg0) == BIT_AND_EXPR
9190 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9191 {
9192 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9193 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9194 : TRUTH_XOR_EXPR,
9195 boolean_type_node,
9196 fold_convert_loc (loc, boolean_type_node, arg0),
9197 fold_convert_loc (loc, boolean_type_node, arg1));
9198
9199 if (code == EQ_EXPR)
9200 tem = invert_truthvalue_loc (loc, tem);
9201
9202 return fold_convert_loc (loc, type, tem);
9203 }
9204
9205 if (TREE_CODE_CLASS (code) == tcc_binary
9206 || TREE_CODE_CLASS (code) == tcc_comparison)
9207 {
9208 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9209 {
9210 tem = fold_build2_loc (loc, code, type,
9211 fold_convert_loc (loc, TREE_TYPE (op0),
9212 TREE_OPERAND (arg0, 1)), op1);
9213 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9214 tem);
9215 }
9216 if (TREE_CODE (arg1) == COMPOUND_EXPR)
9217 {
9218 tem = fold_build2_loc (loc, code, type, op0,
9219 fold_convert_loc (loc, TREE_TYPE (op1),
9220 TREE_OPERAND (arg1, 1)));
9221 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9222 tem);
9223 }
9224
9225 if (TREE_CODE (arg0) == COND_EXPR
9226 || TREE_CODE (arg0) == VEC_COND_EXPR
9227 || COMPARISON_CLASS_P (arg0))
9228 {
9229 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9230 arg0, arg1,
9231 /*cond_first_p=*/1);
9232 if (tem != NULL_TREE)
9233 return tem;
9234 }
9235
9236 if (TREE_CODE (arg1) == COND_EXPR
9237 || TREE_CODE (arg1) == VEC_COND_EXPR
9238 || COMPARISON_CLASS_P (arg1))
9239 {
9240 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9241 arg1, arg0,
9242 /*cond_first_p=*/0);
9243 if (tem != NULL_TREE)
9244 return tem;
9245 }
9246 }
9247
9248 switch (code)
9249 {
9250 case MEM_REF:
9251 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9252 if (TREE_CODE (arg0) == ADDR_EXPR
9253 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9254 {
9255 tree iref = TREE_OPERAND (arg0, 0);
9256 return fold_build2 (MEM_REF, type,
9257 TREE_OPERAND (iref, 0),
9258 int_const_binop (PLUS_EXPR, arg1,
9259 TREE_OPERAND (iref, 1)));
9260 }
9261
9262 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9263 if (TREE_CODE (arg0) == ADDR_EXPR
9264 && handled_component_p (TREE_OPERAND (arg0, 0)))
9265 {
9266 tree base;
9267 HOST_WIDE_INT coffset;
9268 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9269 &coffset);
9270 if (!base)
9271 return NULL_TREE;
9272 return fold_build2 (MEM_REF, type,
9273 build_fold_addr_expr (base),
9274 int_const_binop (PLUS_EXPR, arg1,
9275 size_int (coffset)));
9276 }
9277
9278 return NULL_TREE;
9279
9280 case POINTER_PLUS_EXPR:
9281 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9282 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9283 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9284 return fold_convert_loc (loc, type,
9285 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9286 fold_convert_loc (loc, sizetype,
9287 arg1),
9288 fold_convert_loc (loc, sizetype,
9289 arg0)));
9290
9291 return NULL_TREE;
9292
9293 case PLUS_EXPR:
9294 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
9295 {
9296 /* X + (X / CST) * -CST is X % CST. */
9297 if (TREE_CODE (arg1) == MULT_EXPR
9298 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9299 && operand_equal_p (arg0,
9300 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9301 {
9302 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9303 tree cst1 = TREE_OPERAND (arg1, 1);
9304 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9305 cst1, cst0);
9306 if (sum && integer_zerop (sum))
9307 return fold_convert_loc (loc, type,
9308 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9309 TREE_TYPE (arg0), arg0,
9310 cst0));
9311 }
9312 }
9313
9314 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
9315 one. Make sure the type is not saturating and has the signedness of
9316 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9317 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9318 if ((TREE_CODE (arg0) == MULT_EXPR
9319 || TREE_CODE (arg1) == MULT_EXPR)
9320 && !TYPE_SATURATING (type)
9321 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9322 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9323 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9324 {
9325 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9326 if (tem)
9327 return tem;
9328 }
9329
9330 if (! FLOAT_TYPE_P (type))
9331 {
9332 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9333 (plus (plus (mult) (mult)) (foo)) so that we can
9334 take advantage of the factoring cases below. */
9335 if (ANY_INTEGRAL_TYPE_P (type)
9336 && TYPE_OVERFLOW_WRAPS (type)
9337 && (((TREE_CODE (arg0) == PLUS_EXPR
9338 || TREE_CODE (arg0) == MINUS_EXPR)
9339 && TREE_CODE (arg1) == MULT_EXPR)
9340 || ((TREE_CODE (arg1) == PLUS_EXPR
9341 || TREE_CODE (arg1) == MINUS_EXPR)
9342 && TREE_CODE (arg0) == MULT_EXPR)))
9343 {
9344 tree parg0, parg1, parg, marg;
9345 enum tree_code pcode;
9346
9347 if (TREE_CODE (arg1) == MULT_EXPR)
9348 parg = arg0, marg = arg1;
9349 else
9350 parg = arg1, marg = arg0;
9351 pcode = TREE_CODE (parg);
9352 parg0 = TREE_OPERAND (parg, 0);
9353 parg1 = TREE_OPERAND (parg, 1);
9354 STRIP_NOPS (parg0);
9355 STRIP_NOPS (parg1);
9356
9357 if (TREE_CODE (parg0) == MULT_EXPR
9358 && TREE_CODE (parg1) != MULT_EXPR)
9359 return fold_build2_loc (loc, pcode, type,
9360 fold_build2_loc (loc, PLUS_EXPR, type,
9361 fold_convert_loc (loc, type,
9362 parg0),
9363 fold_convert_loc (loc, type,
9364 marg)),
9365 fold_convert_loc (loc, type, parg1));
9366 if (TREE_CODE (parg0) != MULT_EXPR
9367 && TREE_CODE (parg1) == MULT_EXPR)
9368 return
9369 fold_build2_loc (loc, PLUS_EXPR, type,
9370 fold_convert_loc (loc, type, parg0),
9371 fold_build2_loc (loc, pcode, type,
9372 fold_convert_loc (loc, type, marg),
9373 fold_convert_loc (loc, type,
9374 parg1)));
9375 }
9376 }
9377 else
9378 {
9379 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9380 to __complex__ ( x, y ). This is not the same for SNaNs or
9381 if signed zeros are involved. */
9382 if (!HONOR_SNANS (element_mode (arg0))
9383 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9384 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9385 {
9386 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9387 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9388 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9389 bool arg0rz = false, arg0iz = false;
9390 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9391 || (arg0i && (arg0iz = real_zerop (arg0i))))
9392 {
9393 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9394 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9395 if (arg0rz && arg1i && real_zerop (arg1i))
9396 {
9397 tree rp = arg1r ? arg1r
9398 : build1 (REALPART_EXPR, rtype, arg1);
9399 tree ip = arg0i ? arg0i
9400 : build1 (IMAGPART_EXPR, rtype, arg0);
9401 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9402 }
9403 else if (arg0iz && arg1r && real_zerop (arg1r))
9404 {
9405 tree rp = arg0r ? arg0r
9406 : build1 (REALPART_EXPR, rtype, arg0);
9407 tree ip = arg1i ? arg1i
9408 : build1 (IMAGPART_EXPR, rtype, arg1);
9409 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9410 }
9411 }
9412 }
9413
9414 if (flag_unsafe_math_optimizations
9415 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9416 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9417 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9418 return tem;
9419
9420 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9421 We associate floats only if the user has specified
9422 -fassociative-math. */
9423 if (flag_associative_math
9424 && TREE_CODE (arg1) == PLUS_EXPR
9425 && TREE_CODE (arg0) != MULT_EXPR)
9426 {
9427 tree tree10 = TREE_OPERAND (arg1, 0);
9428 tree tree11 = TREE_OPERAND (arg1, 1);
9429 if (TREE_CODE (tree11) == MULT_EXPR
9430 && TREE_CODE (tree10) == MULT_EXPR)
9431 {
9432 tree tree0;
9433 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
9434 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
9435 }
9436 }
9437 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9438 We associate floats only if the user has specified
9439 -fassociative-math. */
9440 if (flag_associative_math
9441 && TREE_CODE (arg0) == PLUS_EXPR
9442 && TREE_CODE (arg1) != MULT_EXPR)
9443 {
9444 tree tree00 = TREE_OPERAND (arg0, 0);
9445 tree tree01 = TREE_OPERAND (arg0, 1);
9446 if (TREE_CODE (tree01) == MULT_EXPR
9447 && TREE_CODE (tree00) == MULT_EXPR)
9448 {
9449 tree tree0;
9450 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
9451 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
9452 }
9453 }
9454 }
9455
9456 bit_rotate:
9457 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9458 is a rotate of A by C1 bits. */
9459 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9460 is a rotate of A by B bits. */
9461 {
9462 enum tree_code code0, code1;
9463 tree rtype;
9464 code0 = TREE_CODE (arg0);
9465 code1 = TREE_CODE (arg1);
9466 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9467 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9468 && operand_equal_p (TREE_OPERAND (arg0, 0),
9469 TREE_OPERAND (arg1, 0), 0)
9470 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9471 TYPE_UNSIGNED (rtype))
9472 /* Only create rotates in complete modes. Other cases are not
9473 expanded properly. */
9474 && (element_precision (rtype)
9475 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
9476 {
9477 tree tree01, tree11;
9478 enum tree_code code01, code11;
9479
9480 tree01 = TREE_OPERAND (arg0, 1);
9481 tree11 = TREE_OPERAND (arg1, 1);
9482 STRIP_NOPS (tree01);
9483 STRIP_NOPS (tree11);
9484 code01 = TREE_CODE (tree01);
9485 code11 = TREE_CODE (tree11);
9486 if (code01 == INTEGER_CST
9487 && code11 == INTEGER_CST
9488 && (wi::to_widest (tree01) + wi::to_widest (tree11)
9489 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9490 {
9491 tem = build2_loc (loc, LROTATE_EXPR,
9492 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9493 TREE_OPERAND (arg0, 0),
9494 code0 == LSHIFT_EXPR
9495 ? TREE_OPERAND (arg0, 1)
9496 : TREE_OPERAND (arg1, 1));
9497 return fold_convert_loc (loc, type, tem);
9498 }
9499 else if (code11 == MINUS_EXPR)
9500 {
9501 tree tree110, tree111;
9502 tree110 = TREE_OPERAND (tree11, 0);
9503 tree111 = TREE_OPERAND (tree11, 1);
9504 STRIP_NOPS (tree110);
9505 STRIP_NOPS (tree111);
9506 if (TREE_CODE (tree110) == INTEGER_CST
9507 && 0 == compare_tree_int (tree110,
9508 element_precision
9509 (TREE_TYPE (TREE_OPERAND
9510 (arg0, 0))))
9511 && operand_equal_p (tree01, tree111, 0))
9512 return
9513 fold_convert_loc (loc, type,
9514 build2 ((code0 == LSHIFT_EXPR
9515 ? LROTATE_EXPR
9516 : RROTATE_EXPR),
9517 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9518 TREE_OPERAND (arg0, 0),
9519 TREE_OPERAND (arg0, 1)));
9520 }
9521 else if (code01 == MINUS_EXPR)
9522 {
9523 tree tree010, tree011;
9524 tree010 = TREE_OPERAND (tree01, 0);
9525 tree011 = TREE_OPERAND (tree01, 1);
9526 STRIP_NOPS (tree010);
9527 STRIP_NOPS (tree011);
9528 if (TREE_CODE (tree010) == INTEGER_CST
9529 && 0 == compare_tree_int (tree010,
9530 element_precision
9531 (TREE_TYPE (TREE_OPERAND
9532 (arg0, 0))))
9533 && operand_equal_p (tree11, tree011, 0))
9534 return fold_convert_loc
9535 (loc, type,
9536 build2 ((code0 != LSHIFT_EXPR
9537 ? LROTATE_EXPR
9538 : RROTATE_EXPR),
9539 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9540 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1)));
9541 }
9542 }
9543 }
9544
9545 associate:
9546 /* In most languages, can't associate operations on floats through
9547 parentheses. Rather than remember where the parentheses were, we
9548 don't associate floats at all, unless the user has specified
9549 -fassociative-math.
9550 And, we need to make sure type is not saturating. */
9551
9552 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9553 && !TYPE_SATURATING (type))
9554 {
9555 tree var0, minus_var0, con0, minus_con0, lit0, minus_lit0;
9556 tree var1, minus_var1, con1, minus_con1, lit1, minus_lit1;
9557 tree atype = type;
9558 bool ok = true;
9559
9560 /* Split both trees into variables, constants, and literals. Then
9561 associate each group together, the constants with literals,
9562 then the result with variables. This increases the chances of
9563 literals being recombined later and of generating relocatable
9564 expressions for the sum of a constant and literal. */
9565 var0 = split_tree (arg0, type, code,
9566 &minus_var0, &con0, &minus_con0,
9567 &lit0, &minus_lit0, 0);
9568 var1 = split_tree (arg1, type, code,
9569 &minus_var1, &con1, &minus_con1,
9570 &lit1, &minus_lit1, code == MINUS_EXPR);
9571
9572 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9573 if (code == MINUS_EXPR)
9574 code = PLUS_EXPR;
9575
9576 /* With undefined overflow prefer doing association in a type
9577 which wraps on overflow, if that is one of the operand types. */
9578 if (POINTER_TYPE_P (type)
9579 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9580 {
9581 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9582 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
9583 atype = TREE_TYPE (arg0);
9584 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9585 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
9586 atype = TREE_TYPE (arg1);
9587 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
9588 }
9589
9590 /* With undefined overflow we can only associate constants with one
9591 variable, and constants whose association doesn't overflow. */
9592 if (POINTER_TYPE_P (atype)
9593 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
9594 {
9595 if (var0 && var1)
9596 {
9597 /* ??? If split_tree would handle NEGATE_EXPR we could
9598 simplify this down to the var0/minus_var1 cases. */
9599 tree tmp0 = var0;
9600 tree tmp1 = var1;
9601 bool one_neg = false;
9602
9603 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9604 {
9605 tmp0 = TREE_OPERAND (tmp0, 0);
9606 one_neg = !one_neg;
9607 }
9608 if (CONVERT_EXPR_P (tmp0)
9609 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9610 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9611 <= TYPE_PRECISION (atype)))
9612 tmp0 = TREE_OPERAND (tmp0, 0);
9613 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9614 {
9615 tmp1 = TREE_OPERAND (tmp1, 0);
9616 one_neg = !one_neg;
9617 }
9618 if (CONVERT_EXPR_P (tmp1)
9619 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9620 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9621 <= TYPE_PRECISION (atype)))
9622 tmp1 = TREE_OPERAND (tmp1, 0);
9623 /* The only case we can still associate with two variables
9624 is if they cancel out. */
9625 if (!one_neg
9626 || !operand_equal_p (tmp0, tmp1, 0))
9627 ok = false;
9628 }
9629 else if ((var0 && minus_var1
9630 && ! operand_equal_p (var0, minus_var1, 0))
9631 || (minus_var0 && var1
9632 && ! operand_equal_p (minus_var0, var1, 0)))
9633 ok = false;
9634 }
9635
9636 /* Only do something if we found more than two objects. Otherwise,
9637 nothing has changed and we risk infinite recursion. */
9638 if (ok
9639 && (2 < ((var0 != 0) + (var1 != 0)
9640 + (minus_var0 != 0) + (minus_var1 != 0)
9641 + (con0 != 0) + (con1 != 0)
9642 + (minus_con0 != 0) + (minus_con1 != 0)
9643 + (lit0 != 0) + (lit1 != 0)
9644 + (minus_lit0 != 0) + (minus_lit1 != 0))))
9645 {
9646 var0 = associate_trees (loc, var0, var1, code, atype);
9647 minus_var0 = associate_trees (loc, minus_var0, minus_var1,
9648 code, atype);
9649 con0 = associate_trees (loc, con0, con1, code, atype);
9650 minus_con0 = associate_trees (loc, minus_con0, minus_con1,
9651 code, atype);
9652 lit0 = associate_trees (loc, lit0, lit1, code, atype);
9653 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
9654 code, atype);
9655
9656 if (minus_var0 && var0)
9657 {
9658 var0 = associate_trees (loc, var0, minus_var0,
9659 MINUS_EXPR, atype);
9660 minus_var0 = 0;
9661 }
9662 if (minus_con0 && con0)
9663 {
9664 con0 = associate_trees (loc, con0, minus_con0,
9665 MINUS_EXPR, atype);
9666 minus_con0 = 0;
9667 }
9668
9669 /* Preserve the MINUS_EXPR if the negative part of the literal is
9670 greater than the positive part. Otherwise, the multiplicative
9671 folding code (i.e extract_muldiv) may be fooled in case
9672 unsigned constants are subtracted, like in the following
9673 example: ((X*2 + 4) - 8U)/2. */
9674 if (minus_lit0 && lit0)
9675 {
9676 if (TREE_CODE (lit0) == INTEGER_CST
9677 && TREE_CODE (minus_lit0) == INTEGER_CST
9678 && tree_int_cst_lt (lit0, minus_lit0)
9679 /* But avoid ending up with only negated parts. */
9680 && (var0 || con0))
9681 {
9682 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
9683 MINUS_EXPR, atype);
9684 lit0 = 0;
9685 }
9686 else
9687 {
9688 lit0 = associate_trees (loc, lit0, minus_lit0,
9689 MINUS_EXPR, atype);
9690 minus_lit0 = 0;
9691 }
9692 }
9693
9694 /* Don't introduce overflows through reassociation. */
9695 if ((lit0 && TREE_OVERFLOW_P (lit0))
9696 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0)))
9697 return NULL_TREE;
9698
9699 /* Eliminate lit0 and minus_lit0 to con0 and minus_con0. */
9700 con0 = associate_trees (loc, con0, lit0, code, atype);
9701 lit0 = 0;
9702 minus_con0 = associate_trees (loc, minus_con0, minus_lit0,
9703 code, atype);
9704 minus_lit0 = 0;
9705
9706 /* Eliminate minus_con0. */
9707 if (minus_con0)
9708 {
9709 if (con0)
9710 con0 = associate_trees (loc, con0, minus_con0,
9711 MINUS_EXPR, atype);
9712 else if (var0)
9713 var0 = associate_trees (loc, var0, minus_con0,
9714 MINUS_EXPR, atype);
9715 else
9716 gcc_unreachable ();
9717 minus_con0 = 0;
9718 }
9719
9720 /* Eliminate minus_var0. */
9721 if (minus_var0)
9722 {
9723 if (con0)
9724 con0 = associate_trees (loc, con0, minus_var0,
9725 MINUS_EXPR, atype);
9726 else
9727 gcc_unreachable ();
9728 minus_var0 = 0;
9729 }
9730
9731 return
9732 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
9733 code, atype));
9734 }
9735 }
9736
9737 return NULL_TREE;
9738
9739 case MINUS_EXPR:
9740 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9741 if (TREE_CODE (arg0) == NEGATE_EXPR
9742 && negate_expr_p (op1))
9743 return fold_build2_loc (loc, MINUS_EXPR, type,
9744 negate_expr (op1),
9745 fold_convert_loc (loc, type,
9746 TREE_OPERAND (arg0, 0)));
9747
9748 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
9749 __complex__ ( x, -y ). This is not the same for SNaNs or if
9750 signed zeros are involved. */
9751 if (!HONOR_SNANS (element_mode (arg0))
9752 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9753 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9754 {
9755 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9756 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9757 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9758 bool arg0rz = false, arg0iz = false;
9759 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9760 || (arg0i && (arg0iz = real_zerop (arg0i))))
9761 {
9762 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9763 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9764 if (arg0rz && arg1i && real_zerop (arg1i))
9765 {
9766 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9767 arg1r ? arg1r
9768 : build1 (REALPART_EXPR, rtype, arg1));
9769 tree ip = arg0i ? arg0i
9770 : build1 (IMAGPART_EXPR, rtype, arg0);
9771 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9772 }
9773 else if (arg0iz && arg1r && real_zerop (arg1r))
9774 {
9775 tree rp = arg0r ? arg0r
9776 : build1 (REALPART_EXPR, rtype, arg0);
9777 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9778 arg1i ? arg1i
9779 : build1 (IMAGPART_EXPR, rtype, arg1));
9780 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9781 }
9782 }
9783 }
9784
9785 /* A - B -> A + (-B) if B is easily negatable. */
9786 if (negate_expr_p (op1)
9787 && ! TYPE_OVERFLOW_SANITIZED (type)
9788 && ((FLOAT_TYPE_P (type)
9789 /* Avoid this transformation if B is a positive REAL_CST. */
9790 && (TREE_CODE (op1) != REAL_CST
9791 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1))))
9792 || INTEGRAL_TYPE_P (type)))
9793 return fold_build2_loc (loc, PLUS_EXPR, type,
9794 fold_convert_loc (loc, type, arg0),
9795 negate_expr (op1));
9796
9797 /* Fold &a[i] - &a[j] to i-j. */
9798 if (TREE_CODE (arg0) == ADDR_EXPR
9799 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9800 && TREE_CODE (arg1) == ADDR_EXPR
9801 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9802 {
9803 tree tem = fold_addr_of_array_ref_difference (loc, type,
9804 TREE_OPERAND (arg0, 0),
9805 TREE_OPERAND (arg1, 0));
9806 if (tem)
9807 return tem;
9808 }
9809
9810 if (FLOAT_TYPE_P (type)
9811 && flag_unsafe_math_optimizations
9812 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9813 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9814 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9815 return tem;
9816
9817 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
9818 one. Make sure the type is not saturating and has the signedness of
9819 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9820 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9821 if ((TREE_CODE (arg0) == MULT_EXPR
9822 || TREE_CODE (arg1) == MULT_EXPR)
9823 && !TYPE_SATURATING (type)
9824 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9825 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9826 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9827 {
9828 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9829 if (tem)
9830 return tem;
9831 }
9832
9833 goto associate;
9834
9835 case MULT_EXPR:
9836 if (! FLOAT_TYPE_P (type))
9837 {
9838 /* Transform x * -C into -x * C if x is easily negatable. */
9839 if (TREE_CODE (op1) == INTEGER_CST
9840 && tree_int_cst_sgn (op1) == -1
9841 && negate_expr_p (op0)
9842 && negate_expr_p (op1)
9843 && (tem = negate_expr (op1)) != op1
9844 && ! TREE_OVERFLOW (tem))
9845 return fold_build2_loc (loc, MULT_EXPR, type,
9846 fold_convert_loc (loc, type,
9847 negate_expr (op0)), tem);
9848
9849 strict_overflow_p = false;
9850 if (TREE_CODE (arg1) == INTEGER_CST
9851 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
9852 &strict_overflow_p)))
9853 {
9854 if (strict_overflow_p)
9855 fold_overflow_warning (("assuming signed overflow does not "
9856 "occur when simplifying "
9857 "multiplication"),
9858 WARN_STRICT_OVERFLOW_MISC);
9859 return fold_convert_loc (loc, type, tem);
9860 }
9861
9862 /* Optimize z * conj(z) for integer complex numbers. */
9863 if (TREE_CODE (arg0) == CONJ_EXPR
9864 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9865 return fold_mult_zconjz (loc, type, arg1);
9866 if (TREE_CODE (arg1) == CONJ_EXPR
9867 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9868 return fold_mult_zconjz (loc, type, arg0);
9869 }
9870 else
9871 {
9872 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
9873 This is not the same for NaNs or if signed zeros are
9874 involved. */
9875 if (!HONOR_NANS (arg0)
9876 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9877 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9878 && TREE_CODE (arg1) == COMPLEX_CST
9879 && real_zerop (TREE_REALPART (arg1)))
9880 {
9881 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9882 if (real_onep (TREE_IMAGPART (arg1)))
9883 return
9884 fold_build2_loc (loc, COMPLEX_EXPR, type,
9885 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
9886 rtype, arg0)),
9887 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
9888 else if (real_minus_onep (TREE_IMAGPART (arg1)))
9889 return
9890 fold_build2_loc (loc, COMPLEX_EXPR, type,
9891 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
9892 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
9893 rtype, arg0)));
9894 }
9895
9896 /* Optimize z * conj(z) for floating point complex numbers.
9897 Guarded by flag_unsafe_math_optimizations as non-finite
9898 imaginary components don't produce scalar results. */
9899 if (flag_unsafe_math_optimizations
9900 && TREE_CODE (arg0) == CONJ_EXPR
9901 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9902 return fold_mult_zconjz (loc, type, arg1);
9903 if (flag_unsafe_math_optimizations
9904 && TREE_CODE (arg1) == CONJ_EXPR
9905 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9906 return fold_mult_zconjz (loc, type, arg0);
9907 }
9908 goto associate;
9909
9910 case BIT_IOR_EXPR:
9911 /* Canonicalize (X & C1) | C2. */
9912 if (TREE_CODE (arg0) == BIT_AND_EXPR
9913 && TREE_CODE (arg1) == INTEGER_CST
9914 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9915 {
9916 int width = TYPE_PRECISION (type), w;
9917 wide_int c1 = TREE_OPERAND (arg0, 1);
9918 wide_int c2 = arg1;
9919
9920 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
9921 if ((c1 & c2) == c1)
9922 return omit_one_operand_loc (loc, type, arg1,
9923 TREE_OPERAND (arg0, 0));
9924
9925 wide_int msk = wi::mask (width, false,
9926 TYPE_PRECISION (TREE_TYPE (arg1)));
9927
9928 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
9929 if (msk.and_not (c1 | c2) == 0)
9930 {
9931 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
9932 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
9933 }
9934
9935 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
9936 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
9937 mode which allows further optimizations. */
9938 c1 &= msk;
9939 c2 &= msk;
9940 wide_int c3 = c1.and_not (c2);
9941 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
9942 {
9943 wide_int mask = wi::mask (w, false,
9944 TYPE_PRECISION (type));
9945 if (((c1 | c2) & mask) == mask && c1.and_not (mask) == 0)
9946 {
9947 c3 = mask;
9948 break;
9949 }
9950 }
9951
9952 if (c3 != c1)
9953 {
9954 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
9955 tem = fold_build2_loc (loc, BIT_AND_EXPR, type, tem,
9956 wide_int_to_tree (type, c3));
9957 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
9958 }
9959 }
9960
9961 /* See if this can be simplified into a rotate first. If that
9962 is unsuccessful continue in the association code. */
9963 goto bit_rotate;
9964
9965 case BIT_XOR_EXPR:
9966 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
9967 if (TREE_CODE (arg0) == BIT_AND_EXPR
9968 && INTEGRAL_TYPE_P (type)
9969 && integer_onep (TREE_OPERAND (arg0, 1))
9970 && integer_onep (arg1))
9971 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
9972 build_zero_cst (TREE_TYPE (arg0)));
9973
9974 /* See if this can be simplified into a rotate first. If that
9975 is unsuccessful continue in the association code. */
9976 goto bit_rotate;
9977
9978 case BIT_AND_EXPR:
9979 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
9980 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9981 && INTEGRAL_TYPE_P (type)
9982 && integer_onep (TREE_OPERAND (arg0, 1))
9983 && integer_onep (arg1))
9984 {
9985 tree tem2;
9986 tem = TREE_OPERAND (arg0, 0);
9987 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
9988 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
9989 tem, tem2);
9990 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
9991 build_zero_cst (TREE_TYPE (tem)));
9992 }
9993 /* Fold ~X & 1 as (X & 1) == 0. */
9994 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9995 && INTEGRAL_TYPE_P (type)
9996 && integer_onep (arg1))
9997 {
9998 tree tem2;
9999 tem = TREE_OPERAND (arg0, 0);
10000 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
10001 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
10002 tem, tem2);
10003 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
10004 build_zero_cst (TREE_TYPE (tem)));
10005 }
10006 /* Fold !X & 1 as X == 0. */
10007 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10008 && integer_onep (arg1))
10009 {
10010 tem = TREE_OPERAND (arg0, 0);
10011 return fold_build2_loc (loc, EQ_EXPR, type, tem,
10012 build_zero_cst (TREE_TYPE (tem)));
10013 }
10014
10015 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
10016 multiple of 1 << CST. */
10017 if (TREE_CODE (arg1) == INTEGER_CST)
10018 {
10019 wide_int cst1 = arg1;
10020 wide_int ncst1 = -cst1;
10021 if ((cst1 & ncst1) == ncst1
10022 && multiple_of_p (type, arg0,
10023 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
10024 return fold_convert_loc (loc, type, arg0);
10025 }
10026
10027 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
10028 bits from CST2. */
10029 if (TREE_CODE (arg1) == INTEGER_CST
10030 && TREE_CODE (arg0) == MULT_EXPR
10031 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10032 {
10033 wide_int warg1 = arg1;
10034 wide_int masked = mask_with_tz (type, warg1, TREE_OPERAND (arg0, 1));
10035
10036 if (masked == 0)
10037 return omit_two_operands_loc (loc, type, build_zero_cst (type),
10038 arg0, arg1);
10039 else if (masked != warg1)
10040 {
10041 /* Avoid the transform if arg1 is a mask of some
10042 mode which allows further optimizations. */
10043 int pop = wi::popcount (warg1);
10044 if (!(pop >= BITS_PER_UNIT
10045 && pow2p_hwi (pop)
10046 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
10047 return fold_build2_loc (loc, code, type, op0,
10048 wide_int_to_tree (type, masked));
10049 }
10050 }
10051
10052 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
10053 ((A & N) + B) & M -> (A + B) & M
10054 Similarly if (N & M) == 0,
10055 ((A | N) + B) & M -> (A + B) & M
10056 and for - instead of + (or unary - instead of +)
10057 and/or ^ instead of |.
10058 If B is constant and (B & M) == 0, fold into A & M. */
10059 if (TREE_CODE (arg1) == INTEGER_CST)
10060 {
10061 wide_int cst1 = arg1;
10062 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
10063 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10064 && (TREE_CODE (arg0) == PLUS_EXPR
10065 || TREE_CODE (arg0) == MINUS_EXPR
10066 || TREE_CODE (arg0) == NEGATE_EXPR)
10067 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
10068 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
10069 {
10070 tree pmop[2];
10071 int which = 0;
10072 wide_int cst0;
10073
10074 /* Now we know that arg0 is (C + D) or (C - D) or
10075 -C and arg1 (M) is == (1LL << cst) - 1.
10076 Store C into PMOP[0] and D into PMOP[1]. */
10077 pmop[0] = TREE_OPERAND (arg0, 0);
10078 pmop[1] = NULL;
10079 if (TREE_CODE (arg0) != NEGATE_EXPR)
10080 {
10081 pmop[1] = TREE_OPERAND (arg0, 1);
10082 which = 1;
10083 }
10084
10085 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
10086 which = -1;
10087
10088 for (; which >= 0; which--)
10089 switch (TREE_CODE (pmop[which]))
10090 {
10091 case BIT_AND_EXPR:
10092 case BIT_IOR_EXPR:
10093 case BIT_XOR_EXPR:
10094 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
10095 != INTEGER_CST)
10096 break;
10097 cst0 = TREE_OPERAND (pmop[which], 1);
10098 cst0 &= cst1;
10099 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
10100 {
10101 if (cst0 != cst1)
10102 break;
10103 }
10104 else if (cst0 != 0)
10105 break;
10106 /* If C or D is of the form (A & N) where
10107 (N & M) == M, or of the form (A | N) or
10108 (A ^ N) where (N & M) == 0, replace it with A. */
10109 pmop[which] = TREE_OPERAND (pmop[which], 0);
10110 break;
10111 case INTEGER_CST:
10112 /* If C or D is a N where (N & M) == 0, it can be
10113 omitted (assumed 0). */
10114 if ((TREE_CODE (arg0) == PLUS_EXPR
10115 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
10116 && (cst1 & pmop[which]) == 0)
10117 pmop[which] = NULL;
10118 break;
10119 default:
10120 break;
10121 }
10122
10123 /* Only build anything new if we optimized one or both arguments
10124 above. */
10125 if (pmop[0] != TREE_OPERAND (arg0, 0)
10126 || (TREE_CODE (arg0) != NEGATE_EXPR
10127 && pmop[1] != TREE_OPERAND (arg0, 1)))
10128 {
10129 tree utype = TREE_TYPE (arg0);
10130 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10131 {
10132 /* Perform the operations in a type that has defined
10133 overflow behavior. */
10134 utype = unsigned_type_for (TREE_TYPE (arg0));
10135 if (pmop[0] != NULL)
10136 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
10137 if (pmop[1] != NULL)
10138 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
10139 }
10140
10141 if (TREE_CODE (arg0) == NEGATE_EXPR)
10142 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
10143 else if (TREE_CODE (arg0) == PLUS_EXPR)
10144 {
10145 if (pmop[0] != NULL && pmop[1] != NULL)
10146 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
10147 pmop[0], pmop[1]);
10148 else if (pmop[0] != NULL)
10149 tem = pmop[0];
10150 else if (pmop[1] != NULL)
10151 tem = pmop[1];
10152 else
10153 return build_int_cst (type, 0);
10154 }
10155 else if (pmop[0] == NULL)
10156 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
10157 else
10158 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
10159 pmop[0], pmop[1]);
10160 /* TEM is now the new binary +, - or unary - replacement. */
10161 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
10162 fold_convert_loc (loc, utype, arg1));
10163 return fold_convert_loc (loc, type, tem);
10164 }
10165 }
10166 }
10167
10168 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10169 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10170 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10171 {
10172 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10173
10174 wide_int mask = wide_int::from (arg1, prec, UNSIGNED);
10175 if (mask == -1)
10176 return
10177 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10178 }
10179
10180 goto associate;
10181
10182 case RDIV_EXPR:
10183 /* Don't touch a floating-point divide by zero unless the mode
10184 of the constant can represent infinity. */
10185 if (TREE_CODE (arg1) == REAL_CST
10186 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10187 && real_zerop (arg1))
10188 return NULL_TREE;
10189
10190 /* (-A) / (-B) -> A / B */
10191 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10192 return fold_build2_loc (loc, RDIV_EXPR, type,
10193 TREE_OPERAND (arg0, 0),
10194 negate_expr (arg1));
10195 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10196 return fold_build2_loc (loc, RDIV_EXPR, type,
10197 negate_expr (arg0),
10198 TREE_OPERAND (arg1, 0));
10199 return NULL_TREE;
10200
10201 case TRUNC_DIV_EXPR:
10202 /* Fall through */
10203
10204 case FLOOR_DIV_EXPR:
10205 /* Simplify A / (B << N) where A and B are positive and B is
10206 a power of 2, to A >> (N + log2(B)). */
10207 strict_overflow_p = false;
10208 if (TREE_CODE (arg1) == LSHIFT_EXPR
10209 && (TYPE_UNSIGNED (type)
10210 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
10211 {
10212 tree sval = TREE_OPERAND (arg1, 0);
10213 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10214 {
10215 tree sh_cnt = TREE_OPERAND (arg1, 1);
10216 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
10217 wi::exact_log2 (sval));
10218
10219 if (strict_overflow_p)
10220 fold_overflow_warning (("assuming signed overflow does not "
10221 "occur when simplifying A / (B << N)"),
10222 WARN_STRICT_OVERFLOW_MISC);
10223
10224 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
10225 sh_cnt, pow2);
10226 return fold_build2_loc (loc, RSHIFT_EXPR, type,
10227 fold_convert_loc (loc, type, arg0), sh_cnt);
10228 }
10229 }
10230
10231 /* Fall through */
10232
10233 case ROUND_DIV_EXPR:
10234 case CEIL_DIV_EXPR:
10235 case EXACT_DIV_EXPR:
10236 if (integer_zerop (arg1))
10237 return NULL_TREE;
10238
10239 /* Convert -A / -B to A / B when the type is signed and overflow is
10240 undefined. */
10241 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10242 && TREE_CODE (op0) == NEGATE_EXPR
10243 && negate_expr_p (op1))
10244 {
10245 if (INTEGRAL_TYPE_P (type))
10246 fold_overflow_warning (("assuming signed overflow does not occur "
10247 "when distributing negation across "
10248 "division"),
10249 WARN_STRICT_OVERFLOW_MISC);
10250 return fold_build2_loc (loc, code, type,
10251 fold_convert_loc (loc, type,
10252 TREE_OPERAND (arg0, 0)),
10253 negate_expr (op1));
10254 }
10255 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10256 && TREE_CODE (arg1) == NEGATE_EXPR
10257 && negate_expr_p (op0))
10258 {
10259 if (INTEGRAL_TYPE_P (type))
10260 fold_overflow_warning (("assuming signed overflow does not occur "
10261 "when distributing negation across "
10262 "division"),
10263 WARN_STRICT_OVERFLOW_MISC);
10264 return fold_build2_loc (loc, code, type,
10265 negate_expr (op0),
10266 fold_convert_loc (loc, type,
10267 TREE_OPERAND (arg1, 0)));
10268 }
10269
10270 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10271 operation, EXACT_DIV_EXPR.
10272
10273 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10274 At one time others generated faster code, it's not clear if they do
10275 after the last round to changes to the DIV code in expmed.c. */
10276 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10277 && multiple_of_p (type, arg0, arg1))
10278 return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
10279 fold_convert (type, arg0),
10280 fold_convert (type, arg1));
10281
10282 strict_overflow_p = false;
10283 if (TREE_CODE (arg1) == INTEGER_CST
10284 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10285 &strict_overflow_p)))
10286 {
10287 if (strict_overflow_p)
10288 fold_overflow_warning (("assuming signed overflow does not occur "
10289 "when simplifying division"),
10290 WARN_STRICT_OVERFLOW_MISC);
10291 return fold_convert_loc (loc, type, tem);
10292 }
10293
10294 return NULL_TREE;
10295
10296 case CEIL_MOD_EXPR:
10297 case FLOOR_MOD_EXPR:
10298 case ROUND_MOD_EXPR:
10299 case TRUNC_MOD_EXPR:
10300 strict_overflow_p = false;
10301 if (TREE_CODE (arg1) == INTEGER_CST
10302 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10303 &strict_overflow_p)))
10304 {
10305 if (strict_overflow_p)
10306 fold_overflow_warning (("assuming signed overflow does not occur "
10307 "when simplifying modulus"),
10308 WARN_STRICT_OVERFLOW_MISC);
10309 return fold_convert_loc (loc, type, tem);
10310 }
10311
10312 return NULL_TREE;
10313
10314 case LROTATE_EXPR:
10315 case RROTATE_EXPR:
10316 case RSHIFT_EXPR:
10317 case LSHIFT_EXPR:
10318 /* Since negative shift count is not well-defined,
10319 don't try to compute it in the compiler. */
10320 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10321 return NULL_TREE;
10322
10323 prec = element_precision (type);
10324
10325 /* If we have a rotate of a bit operation with the rotate count and
10326 the second operand of the bit operation both constant,
10327 permute the two operations. */
10328 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10329 && (TREE_CODE (arg0) == BIT_AND_EXPR
10330 || TREE_CODE (arg0) == BIT_IOR_EXPR
10331 || TREE_CODE (arg0) == BIT_XOR_EXPR)
10332 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10333 {
10334 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10335 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10336 return fold_build2_loc (loc, TREE_CODE (arg0), type,
10337 fold_build2_loc (loc, code, type,
10338 arg00, arg1),
10339 fold_build2_loc (loc, code, type,
10340 arg01, arg1));
10341 }
10342
10343 /* Two consecutive rotates adding up to the some integer
10344 multiple of the precision of the type can be ignored. */
10345 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10346 && TREE_CODE (arg0) == RROTATE_EXPR
10347 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10348 && wi::umod_trunc (wi::add (arg1, TREE_OPERAND (arg0, 1)),
10349 prec) == 0)
10350 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10351
10352 return NULL_TREE;
10353
10354 case MIN_EXPR:
10355 case MAX_EXPR:
10356 goto associate;
10357
10358 case TRUTH_ANDIF_EXPR:
10359 /* Note that the operands of this must be ints
10360 and their values must be 0 or 1.
10361 ("true" is a fixed value perhaps depending on the language.) */
10362 /* If first arg is constant zero, return it. */
10363 if (integer_zerop (arg0))
10364 return fold_convert_loc (loc, type, arg0);
10365 /* FALLTHRU */
10366 case TRUTH_AND_EXPR:
10367 /* If either arg is constant true, drop it. */
10368 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10369 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10370 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10371 /* Preserve sequence points. */
10372 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10373 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10374 /* If second arg is constant zero, result is zero, but first arg
10375 must be evaluated. */
10376 if (integer_zerop (arg1))
10377 return omit_one_operand_loc (loc, type, arg1, arg0);
10378 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10379 case will be handled here. */
10380 if (integer_zerop (arg0))
10381 return omit_one_operand_loc (loc, type, arg0, arg1);
10382
10383 /* !X && X is always false. */
10384 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10385 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10386 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
10387 /* X && !X is always false. */
10388 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10389 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10390 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10391
10392 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10393 means A >= Y && A != MAX, but in this case we know that
10394 A < X <= MAX. */
10395
10396 if (!TREE_SIDE_EFFECTS (arg0)
10397 && !TREE_SIDE_EFFECTS (arg1))
10398 {
10399 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
10400 if (tem && !operand_equal_p (tem, arg0, 0))
10401 return fold_build2_loc (loc, code, type, tem, arg1);
10402
10403 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
10404 if (tem && !operand_equal_p (tem, arg1, 0))
10405 return fold_build2_loc (loc, code, type, arg0, tem);
10406 }
10407
10408 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10409 != NULL_TREE)
10410 return tem;
10411
10412 return NULL_TREE;
10413
10414 case TRUTH_ORIF_EXPR:
10415 /* Note that the operands of this must be ints
10416 and their values must be 0 or true.
10417 ("true" is a fixed value perhaps depending on the language.) */
10418 /* If first arg is constant true, return it. */
10419 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10420 return fold_convert_loc (loc, type, arg0);
10421 /* FALLTHRU */
10422 case TRUTH_OR_EXPR:
10423 /* If either arg is constant zero, drop it. */
10424 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10425 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10426 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10427 /* Preserve sequence points. */
10428 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10429 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10430 /* If second arg is constant true, result is true, but we must
10431 evaluate first arg. */
10432 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10433 return omit_one_operand_loc (loc, type, arg1, arg0);
10434 /* Likewise for first arg, but note this only occurs here for
10435 TRUTH_OR_EXPR. */
10436 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10437 return omit_one_operand_loc (loc, type, arg0, arg1);
10438
10439 /* !X || X is always true. */
10440 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10441 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10442 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10443 /* X || !X is always true. */
10444 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10445 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10446 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10447
10448 /* (X && !Y) || (!X && Y) is X ^ Y */
10449 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
10450 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
10451 {
10452 tree a0, a1, l0, l1, n0, n1;
10453
10454 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10455 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10456
10457 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10458 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10459
10460 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
10461 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
10462
10463 if ((operand_equal_p (n0, a0, 0)
10464 && operand_equal_p (n1, a1, 0))
10465 || (operand_equal_p (n0, a1, 0)
10466 && operand_equal_p (n1, a0, 0)))
10467 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
10468 }
10469
10470 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10471 != NULL_TREE)
10472 return tem;
10473
10474 return NULL_TREE;
10475
10476 case TRUTH_XOR_EXPR:
10477 /* If the second arg is constant zero, drop it. */
10478 if (integer_zerop (arg1))
10479 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10480 /* If the second arg is constant true, this is a logical inversion. */
10481 if (integer_onep (arg1))
10482 {
10483 tem = invert_truthvalue_loc (loc, arg0);
10484 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
10485 }
10486 /* Identical arguments cancel to zero. */
10487 if (operand_equal_p (arg0, arg1, 0))
10488 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10489
10490 /* !X ^ X is always true. */
10491 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10492 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10493 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10494
10495 /* X ^ !X is always true. */
10496 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10497 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10498 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10499
10500 return NULL_TREE;
10501
10502 case EQ_EXPR:
10503 case NE_EXPR:
10504 STRIP_NOPS (arg0);
10505 STRIP_NOPS (arg1);
10506
10507 tem = fold_comparison (loc, code, type, op0, op1);
10508 if (tem != NULL_TREE)
10509 return tem;
10510
10511 /* bool_var != 1 becomes !bool_var. */
10512 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10513 && code == NE_EXPR)
10514 return fold_convert_loc (loc, type,
10515 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10516 TREE_TYPE (arg0), arg0));
10517
10518 /* bool_var == 0 becomes !bool_var. */
10519 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10520 && code == EQ_EXPR)
10521 return fold_convert_loc (loc, type,
10522 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10523 TREE_TYPE (arg0), arg0));
10524
10525 /* !exp != 0 becomes !exp */
10526 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
10527 && code == NE_EXPR)
10528 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10529
10530 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
10531 if ((TREE_CODE (arg0) == PLUS_EXPR
10532 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
10533 || TREE_CODE (arg0) == MINUS_EXPR)
10534 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
10535 0)),
10536 arg1, 0)
10537 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10538 || POINTER_TYPE_P (TREE_TYPE (arg0))))
10539 {
10540 tree val = TREE_OPERAND (arg0, 1);
10541 val = fold_build2_loc (loc, code, type, val,
10542 build_int_cst (TREE_TYPE (val), 0));
10543 return omit_two_operands_loc (loc, type, val,
10544 TREE_OPERAND (arg0, 0), arg1);
10545 }
10546
10547 /* Transform comparisons of the form X CMP X +- Y to Y CMP 0. */
10548 if ((TREE_CODE (arg1) == PLUS_EXPR
10549 || TREE_CODE (arg1) == POINTER_PLUS_EXPR
10550 || TREE_CODE (arg1) == MINUS_EXPR)
10551 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg1,
10552 0)),
10553 arg0, 0)
10554 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10555 || POINTER_TYPE_P (TREE_TYPE (arg1))))
10556 {
10557 tree val = TREE_OPERAND (arg1, 1);
10558 val = fold_build2_loc (loc, code, type, val,
10559 build_int_cst (TREE_TYPE (val), 0));
10560 return omit_two_operands_loc (loc, type, val,
10561 TREE_OPERAND (arg1, 0), arg0);
10562 }
10563
10564 /* If this is an EQ or NE comparison with zero and ARG0 is
10565 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10566 two operations, but the latter can be done in one less insn
10567 on machines that have only two-operand insns or on which a
10568 constant cannot be the first operand. */
10569 if (TREE_CODE (arg0) == BIT_AND_EXPR
10570 && integer_zerop (arg1))
10571 {
10572 tree arg00 = TREE_OPERAND (arg0, 0);
10573 tree arg01 = TREE_OPERAND (arg0, 1);
10574 if (TREE_CODE (arg00) == LSHIFT_EXPR
10575 && integer_onep (TREE_OPERAND (arg00, 0)))
10576 {
10577 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
10578 arg01, TREE_OPERAND (arg00, 1));
10579 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10580 build_int_cst (TREE_TYPE (arg0), 1));
10581 return fold_build2_loc (loc, code, type,
10582 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10583 arg1);
10584 }
10585 else if (TREE_CODE (arg01) == LSHIFT_EXPR
10586 && integer_onep (TREE_OPERAND (arg01, 0)))
10587 {
10588 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
10589 arg00, TREE_OPERAND (arg01, 1));
10590 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10591 build_int_cst (TREE_TYPE (arg0), 1));
10592 return fold_build2_loc (loc, code, type,
10593 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10594 arg1);
10595 }
10596 }
10597
10598 /* If this is an NE or EQ comparison of zero against the result of a
10599 signed MOD operation whose second operand is a power of 2, make
10600 the MOD operation unsigned since it is simpler and equivalent. */
10601 if (integer_zerop (arg1)
10602 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10603 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10604 || TREE_CODE (arg0) == CEIL_MOD_EXPR
10605 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10606 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10607 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10608 {
10609 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
10610 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
10611 fold_convert_loc (loc, newtype,
10612 TREE_OPERAND (arg0, 0)),
10613 fold_convert_loc (loc, newtype,
10614 TREE_OPERAND (arg0, 1)));
10615
10616 return fold_build2_loc (loc, code, type, newmod,
10617 fold_convert_loc (loc, newtype, arg1));
10618 }
10619
10620 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10621 C1 is a valid shift constant, and C2 is a power of two, i.e.
10622 a single bit. */
10623 if (TREE_CODE (arg0) == BIT_AND_EXPR
10624 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10625 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10626 == INTEGER_CST
10627 && integer_pow2p (TREE_OPERAND (arg0, 1))
10628 && integer_zerop (arg1))
10629 {
10630 tree itype = TREE_TYPE (arg0);
10631 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10632 prec = TYPE_PRECISION (itype);
10633
10634 /* Check for a valid shift count. */
10635 if (wi::ltu_p (arg001, prec))
10636 {
10637 tree arg01 = TREE_OPERAND (arg0, 1);
10638 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10639 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10640 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10641 can be rewritten as (X & (C2 << C1)) != 0. */
10642 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
10643 {
10644 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
10645 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
10646 return fold_build2_loc (loc, code, type, tem,
10647 fold_convert_loc (loc, itype, arg1));
10648 }
10649 /* Otherwise, for signed (arithmetic) shifts,
10650 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10651 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10652 else if (!TYPE_UNSIGNED (itype))
10653 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10654 arg000, build_int_cst (itype, 0));
10655 /* Otherwise, of unsigned (logical) shifts,
10656 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10657 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10658 else
10659 return omit_one_operand_loc (loc, type,
10660 code == EQ_EXPR ? integer_one_node
10661 : integer_zero_node,
10662 arg000);
10663 }
10664 }
10665
10666 /* If this is a comparison of a field, we may be able to simplify it. */
10667 if ((TREE_CODE (arg0) == COMPONENT_REF
10668 || TREE_CODE (arg0) == BIT_FIELD_REF)
10669 /* Handle the constant case even without -O
10670 to make sure the warnings are given. */
10671 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
10672 {
10673 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
10674 if (t1)
10675 return t1;
10676 }
10677
10678 /* Optimize comparisons of strlen vs zero to a compare of the
10679 first character of the string vs zero. To wit,
10680 strlen(ptr) == 0 => *ptr == 0
10681 strlen(ptr) != 0 => *ptr != 0
10682 Other cases should reduce to one of these two (or a constant)
10683 due to the return value of strlen being unsigned. */
10684 if (TREE_CODE (arg0) == CALL_EXPR
10685 && integer_zerop (arg1))
10686 {
10687 tree fndecl = get_callee_fndecl (arg0);
10688
10689 if (fndecl
10690 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
10691 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
10692 && call_expr_nargs (arg0) == 1
10693 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
10694 {
10695 tree iref = build_fold_indirect_ref_loc (loc,
10696 CALL_EXPR_ARG (arg0, 0));
10697 return fold_build2_loc (loc, code, type, iref,
10698 build_int_cst (TREE_TYPE (iref), 0));
10699 }
10700 }
10701
10702 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10703 of X. Similarly fold (X >> C) == 0 into X >= 0. */
10704 if (TREE_CODE (arg0) == RSHIFT_EXPR
10705 && integer_zerop (arg1)
10706 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10707 {
10708 tree arg00 = TREE_OPERAND (arg0, 0);
10709 tree arg01 = TREE_OPERAND (arg0, 1);
10710 tree itype = TREE_TYPE (arg00);
10711 if (wi::eq_p (arg01, element_precision (itype) - 1))
10712 {
10713 if (TYPE_UNSIGNED (itype))
10714 {
10715 itype = signed_type_for (itype);
10716 arg00 = fold_convert_loc (loc, itype, arg00);
10717 }
10718 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
10719 type, arg00, build_zero_cst (itype));
10720 }
10721 }
10722
10723 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
10724 (X & C) == 0 when C is a single bit. */
10725 if (TREE_CODE (arg0) == BIT_AND_EXPR
10726 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
10727 && integer_zerop (arg1)
10728 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10729 {
10730 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
10731 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
10732 TREE_OPERAND (arg0, 1));
10733 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
10734 type, tem,
10735 fold_convert_loc (loc, TREE_TYPE (arg0),
10736 arg1));
10737 }
10738
10739 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
10740 constant C is a power of two, i.e. a single bit. */
10741 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10742 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10743 && integer_zerop (arg1)
10744 && integer_pow2p (TREE_OPERAND (arg0, 1))
10745 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10746 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10747 {
10748 tree arg00 = TREE_OPERAND (arg0, 0);
10749 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10750 arg00, build_int_cst (TREE_TYPE (arg00), 0));
10751 }
10752
10753 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
10754 when is C is a power of two, i.e. a single bit. */
10755 if (TREE_CODE (arg0) == BIT_AND_EXPR
10756 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
10757 && integer_zerop (arg1)
10758 && integer_pow2p (TREE_OPERAND (arg0, 1))
10759 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10760 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10761 {
10762 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10763 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
10764 arg000, TREE_OPERAND (arg0, 1));
10765 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10766 tem, build_int_cst (TREE_TYPE (tem), 0));
10767 }
10768
10769 if (integer_zerop (arg1)
10770 && tree_expr_nonzero_p (arg0))
10771 {
10772 tree res = constant_boolean_node (code==NE_EXPR, type);
10773 return omit_one_operand_loc (loc, type, res, arg0);
10774 }
10775
10776 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
10777 if (TREE_CODE (arg0) == BIT_AND_EXPR
10778 && TREE_CODE (arg1) == BIT_AND_EXPR)
10779 {
10780 tree arg00 = TREE_OPERAND (arg0, 0);
10781 tree arg01 = TREE_OPERAND (arg0, 1);
10782 tree arg10 = TREE_OPERAND (arg1, 0);
10783 tree arg11 = TREE_OPERAND (arg1, 1);
10784 tree itype = TREE_TYPE (arg0);
10785
10786 if (operand_equal_p (arg01, arg11, 0))
10787 {
10788 tem = fold_convert_loc (loc, itype, arg10);
10789 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
10790 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg01);
10791 return fold_build2_loc (loc, code, type, tem,
10792 build_zero_cst (itype));
10793 }
10794 if (operand_equal_p (arg01, arg10, 0))
10795 {
10796 tem = fold_convert_loc (loc, itype, arg11);
10797 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
10798 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg01);
10799 return fold_build2_loc (loc, code, type, tem,
10800 build_zero_cst (itype));
10801 }
10802 if (operand_equal_p (arg00, arg11, 0))
10803 {
10804 tem = fold_convert_loc (loc, itype, arg10);
10805 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01, tem);
10806 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg00);
10807 return fold_build2_loc (loc, code, type, tem,
10808 build_zero_cst (itype));
10809 }
10810 if (operand_equal_p (arg00, arg10, 0))
10811 {
10812 tem = fold_convert_loc (loc, itype, arg11);
10813 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01, tem);
10814 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg00);
10815 return fold_build2_loc (loc, code, type, tem,
10816 build_zero_cst (itype));
10817 }
10818 }
10819
10820 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10821 && TREE_CODE (arg1) == BIT_XOR_EXPR)
10822 {
10823 tree arg00 = TREE_OPERAND (arg0, 0);
10824 tree arg01 = TREE_OPERAND (arg0, 1);
10825 tree arg10 = TREE_OPERAND (arg1, 0);
10826 tree arg11 = TREE_OPERAND (arg1, 1);
10827 tree itype = TREE_TYPE (arg0);
10828
10829 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
10830 operand_equal_p guarantees no side-effects so we don't need
10831 to use omit_one_operand on Z. */
10832 if (operand_equal_p (arg01, arg11, 0))
10833 return fold_build2_loc (loc, code, type, arg00,
10834 fold_convert_loc (loc, TREE_TYPE (arg00),
10835 arg10));
10836 if (operand_equal_p (arg01, arg10, 0))
10837 return fold_build2_loc (loc, code, type, arg00,
10838 fold_convert_loc (loc, TREE_TYPE (arg00),
10839 arg11));
10840 if (operand_equal_p (arg00, arg11, 0))
10841 return fold_build2_loc (loc, code, type, arg01,
10842 fold_convert_loc (loc, TREE_TYPE (arg01),
10843 arg10));
10844 if (operand_equal_p (arg00, arg10, 0))
10845 return fold_build2_loc (loc, code, type, arg01,
10846 fold_convert_loc (loc, TREE_TYPE (arg01),
10847 arg11));
10848
10849 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
10850 if (TREE_CODE (arg01) == INTEGER_CST
10851 && TREE_CODE (arg11) == INTEGER_CST)
10852 {
10853 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
10854 fold_convert_loc (loc, itype, arg11));
10855 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
10856 return fold_build2_loc (loc, code, type, tem,
10857 fold_convert_loc (loc, itype, arg10));
10858 }
10859 }
10860
10861 /* Attempt to simplify equality/inequality comparisons of complex
10862 values. Only lower the comparison if the result is known or
10863 can be simplified to a single scalar comparison. */
10864 if ((TREE_CODE (arg0) == COMPLEX_EXPR
10865 || TREE_CODE (arg0) == COMPLEX_CST)
10866 && (TREE_CODE (arg1) == COMPLEX_EXPR
10867 || TREE_CODE (arg1) == COMPLEX_CST))
10868 {
10869 tree real0, imag0, real1, imag1;
10870 tree rcond, icond;
10871
10872 if (TREE_CODE (arg0) == COMPLEX_EXPR)
10873 {
10874 real0 = TREE_OPERAND (arg0, 0);
10875 imag0 = TREE_OPERAND (arg0, 1);
10876 }
10877 else
10878 {
10879 real0 = TREE_REALPART (arg0);
10880 imag0 = TREE_IMAGPART (arg0);
10881 }
10882
10883 if (TREE_CODE (arg1) == COMPLEX_EXPR)
10884 {
10885 real1 = TREE_OPERAND (arg1, 0);
10886 imag1 = TREE_OPERAND (arg1, 1);
10887 }
10888 else
10889 {
10890 real1 = TREE_REALPART (arg1);
10891 imag1 = TREE_IMAGPART (arg1);
10892 }
10893
10894 rcond = fold_binary_loc (loc, code, type, real0, real1);
10895 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
10896 {
10897 if (integer_zerop (rcond))
10898 {
10899 if (code == EQ_EXPR)
10900 return omit_two_operands_loc (loc, type, boolean_false_node,
10901 imag0, imag1);
10902 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
10903 }
10904 else
10905 {
10906 if (code == NE_EXPR)
10907 return omit_two_operands_loc (loc, type, boolean_true_node,
10908 imag0, imag1);
10909 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
10910 }
10911 }
10912
10913 icond = fold_binary_loc (loc, code, type, imag0, imag1);
10914 if (icond && TREE_CODE (icond) == INTEGER_CST)
10915 {
10916 if (integer_zerop (icond))
10917 {
10918 if (code == EQ_EXPR)
10919 return omit_two_operands_loc (loc, type, boolean_false_node,
10920 real0, real1);
10921 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
10922 }
10923 else
10924 {
10925 if (code == NE_EXPR)
10926 return omit_two_operands_loc (loc, type, boolean_true_node,
10927 real0, real1);
10928 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
10929 }
10930 }
10931 }
10932
10933 return NULL_TREE;
10934
10935 case LT_EXPR:
10936 case GT_EXPR:
10937 case LE_EXPR:
10938 case GE_EXPR:
10939 tem = fold_comparison (loc, code, type, op0, op1);
10940 if (tem != NULL_TREE)
10941 return tem;
10942
10943 /* Transform comparisons of the form X +- C CMP X. */
10944 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
10945 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10946 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
10947 && !HONOR_SNANS (arg0))
10948 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10949 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
10950 {
10951 tree arg01 = TREE_OPERAND (arg0, 1);
10952 enum tree_code code0 = TREE_CODE (arg0);
10953 int is_positive;
10954
10955 if (TREE_CODE (arg01) == REAL_CST)
10956 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
10957 else
10958 is_positive = tree_int_cst_sgn (arg01);
10959
10960 /* (X - c) > X becomes false. */
10961 if (code == GT_EXPR
10962 && ((code0 == MINUS_EXPR && is_positive >= 0)
10963 || (code0 == PLUS_EXPR && is_positive <= 0)))
10964 {
10965 if (TREE_CODE (arg01) == INTEGER_CST
10966 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
10967 fold_overflow_warning (("assuming signed overflow does not "
10968 "occur when assuming that (X - c) > X "
10969 "is always false"),
10970 WARN_STRICT_OVERFLOW_ALL);
10971 return constant_boolean_node (0, type);
10972 }
10973
10974 /* Likewise (X + c) < X becomes false. */
10975 if (code == LT_EXPR
10976 && ((code0 == PLUS_EXPR && is_positive >= 0)
10977 || (code0 == MINUS_EXPR && is_positive <= 0)))
10978 {
10979 if (TREE_CODE (arg01) == INTEGER_CST
10980 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
10981 fold_overflow_warning (("assuming signed overflow does not "
10982 "occur when assuming that "
10983 "(X + c) < X is always false"),
10984 WARN_STRICT_OVERFLOW_ALL);
10985 return constant_boolean_node (0, type);
10986 }
10987
10988 /* Convert (X - c) <= X to true. */
10989 if (!HONOR_NANS (arg1)
10990 && code == LE_EXPR
10991 && ((code0 == MINUS_EXPR && is_positive >= 0)
10992 || (code0 == PLUS_EXPR && is_positive <= 0)))
10993 {
10994 if (TREE_CODE (arg01) == INTEGER_CST
10995 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
10996 fold_overflow_warning (("assuming signed overflow does not "
10997 "occur when assuming that "
10998 "(X - c) <= X is always true"),
10999 WARN_STRICT_OVERFLOW_ALL);
11000 return constant_boolean_node (1, type);
11001 }
11002
11003 /* Convert (X + c) >= X to true. */
11004 if (!HONOR_NANS (arg1)
11005 && code == GE_EXPR
11006 && ((code0 == PLUS_EXPR && is_positive >= 0)
11007 || (code0 == MINUS_EXPR && is_positive <= 0)))
11008 {
11009 if (TREE_CODE (arg01) == INTEGER_CST
11010 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11011 fold_overflow_warning (("assuming signed overflow does not "
11012 "occur when assuming that "
11013 "(X + c) >= X is always true"),
11014 WARN_STRICT_OVERFLOW_ALL);
11015 return constant_boolean_node (1, type);
11016 }
11017
11018 if (TREE_CODE (arg01) == INTEGER_CST)
11019 {
11020 /* Convert X + c > X and X - c < X to true for integers. */
11021 if (code == GT_EXPR
11022 && ((code0 == PLUS_EXPR && is_positive > 0)
11023 || (code0 == MINUS_EXPR && is_positive < 0)))
11024 {
11025 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11026 fold_overflow_warning (("assuming signed overflow does "
11027 "not occur when assuming that "
11028 "(X + c) > X is always true"),
11029 WARN_STRICT_OVERFLOW_ALL);
11030 return constant_boolean_node (1, type);
11031 }
11032
11033 if (code == LT_EXPR
11034 && ((code0 == MINUS_EXPR && is_positive > 0)
11035 || (code0 == PLUS_EXPR && is_positive < 0)))
11036 {
11037 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11038 fold_overflow_warning (("assuming signed overflow does "
11039 "not occur when assuming that "
11040 "(X - c) < X is always true"),
11041 WARN_STRICT_OVERFLOW_ALL);
11042 return constant_boolean_node (1, type);
11043 }
11044
11045 /* Convert X + c <= X and X - c >= X to false for integers. */
11046 if (code == LE_EXPR
11047 && ((code0 == PLUS_EXPR && is_positive > 0)
11048 || (code0 == MINUS_EXPR && is_positive < 0)))
11049 {
11050 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11051 fold_overflow_warning (("assuming signed overflow does "
11052 "not occur when assuming that "
11053 "(X + c) <= X is always false"),
11054 WARN_STRICT_OVERFLOW_ALL);
11055 return constant_boolean_node (0, type);
11056 }
11057
11058 if (code == GE_EXPR
11059 && ((code0 == MINUS_EXPR && is_positive > 0)
11060 || (code0 == PLUS_EXPR && is_positive < 0)))
11061 {
11062 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11063 fold_overflow_warning (("assuming signed overflow does "
11064 "not occur when assuming that "
11065 "(X - c) >= X is always false"),
11066 WARN_STRICT_OVERFLOW_ALL);
11067 return constant_boolean_node (0, type);
11068 }
11069 }
11070 }
11071
11072 /* If we are comparing an ABS_EXPR with a constant, we can
11073 convert all the cases into explicit comparisons, but they may
11074 well not be faster than doing the ABS and one comparison.
11075 But ABS (X) <= C is a range comparison, which becomes a subtraction
11076 and a comparison, and is probably faster. */
11077 if (code == LE_EXPR
11078 && TREE_CODE (arg1) == INTEGER_CST
11079 && TREE_CODE (arg0) == ABS_EXPR
11080 && ! TREE_SIDE_EFFECTS (arg0)
11081 && (0 != (tem = negate_expr (arg1)))
11082 && TREE_CODE (tem) == INTEGER_CST
11083 && !TREE_OVERFLOW (tem))
11084 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
11085 build2 (GE_EXPR, type,
11086 TREE_OPERAND (arg0, 0), tem),
11087 build2 (LE_EXPR, type,
11088 TREE_OPERAND (arg0, 0), arg1));
11089
11090 /* Convert ABS_EXPR<x> >= 0 to true. */
11091 strict_overflow_p = false;
11092 if (code == GE_EXPR
11093 && (integer_zerop (arg1)
11094 || (! HONOR_NANS (arg0)
11095 && real_zerop (arg1)))
11096 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11097 {
11098 if (strict_overflow_p)
11099 fold_overflow_warning (("assuming signed overflow does not occur "
11100 "when simplifying comparison of "
11101 "absolute value and zero"),
11102 WARN_STRICT_OVERFLOW_CONDITIONAL);
11103 return omit_one_operand_loc (loc, type,
11104 constant_boolean_node (true, type),
11105 arg0);
11106 }
11107
11108 /* Convert ABS_EXPR<x> < 0 to false. */
11109 strict_overflow_p = false;
11110 if (code == LT_EXPR
11111 && (integer_zerop (arg1) || real_zerop (arg1))
11112 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11113 {
11114 if (strict_overflow_p)
11115 fold_overflow_warning (("assuming signed overflow does not occur "
11116 "when simplifying comparison of "
11117 "absolute value and zero"),
11118 WARN_STRICT_OVERFLOW_CONDITIONAL);
11119 return omit_one_operand_loc (loc, type,
11120 constant_boolean_node (false, type),
11121 arg0);
11122 }
11123
11124 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11125 and similarly for >= into !=. */
11126 if ((code == LT_EXPR || code == GE_EXPR)
11127 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11128 && TREE_CODE (arg1) == LSHIFT_EXPR
11129 && integer_onep (TREE_OPERAND (arg1, 0)))
11130 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11131 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11132 TREE_OPERAND (arg1, 1)),
11133 build_zero_cst (TREE_TYPE (arg0)));
11134
11135 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
11136 otherwise Y might be >= # of bits in X's type and thus e.g.
11137 (unsigned char) (1 << Y) for Y 15 might be 0.
11138 If the cast is widening, then 1 << Y should have unsigned type,
11139 otherwise if Y is number of bits in the signed shift type minus 1,
11140 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
11141 31 might be 0xffffffff80000000. */
11142 if ((code == LT_EXPR || code == GE_EXPR)
11143 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11144 && CONVERT_EXPR_P (arg1)
11145 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
11146 && (element_precision (TREE_TYPE (arg1))
11147 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
11148 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
11149 || (element_precision (TREE_TYPE (arg1))
11150 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
11151 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11152 {
11153 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11154 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
11155 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11156 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
11157 build_zero_cst (TREE_TYPE (arg0)));
11158 }
11159
11160 return NULL_TREE;
11161
11162 case UNORDERED_EXPR:
11163 case ORDERED_EXPR:
11164 case UNLT_EXPR:
11165 case UNLE_EXPR:
11166 case UNGT_EXPR:
11167 case UNGE_EXPR:
11168 case UNEQ_EXPR:
11169 case LTGT_EXPR:
11170 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
11171 {
11172 tree targ0 = strip_float_extensions (arg0);
11173 tree targ1 = strip_float_extensions (arg1);
11174 tree newtype = TREE_TYPE (targ0);
11175
11176 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
11177 newtype = TREE_TYPE (targ1);
11178
11179 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
11180 return fold_build2_loc (loc, code, type,
11181 fold_convert_loc (loc, newtype, targ0),
11182 fold_convert_loc (loc, newtype, targ1));
11183 }
11184
11185 return NULL_TREE;
11186
11187 case COMPOUND_EXPR:
11188 /* When pedantic, a compound expression can be neither an lvalue
11189 nor an integer constant expression. */
11190 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11191 return NULL_TREE;
11192 /* Don't let (0, 0) be null pointer constant. */
11193 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11194 : fold_convert_loc (loc, type, arg1);
11195 return pedantic_non_lvalue_loc (loc, tem);
11196
11197 case ASSERT_EXPR:
11198 /* An ASSERT_EXPR should never be passed to fold_binary. */
11199 gcc_unreachable ();
11200
11201 default:
11202 return NULL_TREE;
11203 } /* switch (code) */
11204 }
11205
11206 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
11207 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
11208 of GOTO_EXPR. */
11209
11210 static tree
11211 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
11212 {
11213 switch (TREE_CODE (*tp))
11214 {
11215 case LABEL_EXPR:
11216 return *tp;
11217
11218 case GOTO_EXPR:
11219 *walk_subtrees = 0;
11220
11221 /* fall through */
11222
11223 default:
11224 return NULL_TREE;
11225 }
11226 }
11227
11228 /* Return whether the sub-tree ST contains a label which is accessible from
11229 outside the sub-tree. */
11230
11231 static bool
11232 contains_label_p (tree st)
11233 {
11234 return
11235 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
11236 }
11237
11238 /* Fold a ternary expression of code CODE and type TYPE with operands
11239 OP0, OP1, and OP2. Return the folded expression if folding is
11240 successful. Otherwise, return NULL_TREE. */
11241
11242 tree
11243 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
11244 tree op0, tree op1, tree op2)
11245 {
11246 tree tem;
11247 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
11248 enum tree_code_class kind = TREE_CODE_CLASS (code);
11249
11250 gcc_assert (IS_EXPR_CODE_CLASS (kind)
11251 && TREE_CODE_LENGTH (code) == 3);
11252
11253 /* If this is a commutative operation, and OP0 is a constant, move it
11254 to OP1 to reduce the number of tests below. */
11255 if (commutative_ternary_tree_code (code)
11256 && tree_swap_operands_p (op0, op1))
11257 return fold_build3_loc (loc, code, type, op1, op0, op2);
11258
11259 tem = generic_simplify (loc, code, type, op0, op1, op2);
11260 if (tem)
11261 return tem;
11262
11263 /* Strip any conversions that don't change the mode. This is safe
11264 for every expression, except for a comparison expression because
11265 its signedness is derived from its operands. So, in the latter
11266 case, only strip conversions that don't change the signedness.
11267
11268 Note that this is done as an internal manipulation within the
11269 constant folder, in order to find the simplest representation of
11270 the arguments so that their form can be studied. In any cases,
11271 the appropriate type conversions should be put back in the tree
11272 that will get out of the constant folder. */
11273 if (op0)
11274 {
11275 arg0 = op0;
11276 STRIP_NOPS (arg0);
11277 }
11278
11279 if (op1)
11280 {
11281 arg1 = op1;
11282 STRIP_NOPS (arg1);
11283 }
11284
11285 if (op2)
11286 {
11287 arg2 = op2;
11288 STRIP_NOPS (arg2);
11289 }
11290
11291 switch (code)
11292 {
11293 case COMPONENT_REF:
11294 if (TREE_CODE (arg0) == CONSTRUCTOR
11295 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11296 {
11297 unsigned HOST_WIDE_INT idx;
11298 tree field, value;
11299 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11300 if (field == arg1)
11301 return value;
11302 }
11303 return NULL_TREE;
11304
11305 case COND_EXPR:
11306 case VEC_COND_EXPR:
11307 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11308 so all simple results must be passed through pedantic_non_lvalue. */
11309 if (TREE_CODE (arg0) == INTEGER_CST)
11310 {
11311 tree unused_op = integer_zerop (arg0) ? op1 : op2;
11312 tem = integer_zerop (arg0) ? op2 : op1;
11313 /* Only optimize constant conditions when the selected branch
11314 has the same type as the COND_EXPR. This avoids optimizing
11315 away "c ? x : throw", where the throw has a void type.
11316 Avoid throwing away that operand which contains label. */
11317 if ((!TREE_SIDE_EFFECTS (unused_op)
11318 || !contains_label_p (unused_op))
11319 && (! VOID_TYPE_P (TREE_TYPE (tem))
11320 || VOID_TYPE_P (type)))
11321 return pedantic_non_lvalue_loc (loc, tem);
11322 return NULL_TREE;
11323 }
11324 else if (TREE_CODE (arg0) == VECTOR_CST)
11325 {
11326 if ((TREE_CODE (arg1) == VECTOR_CST
11327 || TREE_CODE (arg1) == CONSTRUCTOR)
11328 && (TREE_CODE (arg2) == VECTOR_CST
11329 || TREE_CODE (arg2) == CONSTRUCTOR))
11330 {
11331 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
11332 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
11333 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
11334 for (i = 0; i < nelts; i++)
11335 {
11336 tree val = VECTOR_CST_ELT (arg0, i);
11337 if (integer_all_onesp (val))
11338 sel[i] = i;
11339 else if (integer_zerop (val))
11340 sel[i] = nelts + i;
11341 else /* Currently unreachable. */
11342 return NULL_TREE;
11343 }
11344 tree t = fold_vec_perm (type, arg1, arg2, sel);
11345 if (t != NULL_TREE)
11346 return t;
11347 }
11348 }
11349
11350 /* If we have A op B ? A : C, we may be able to convert this to a
11351 simpler expression, depending on the operation and the values
11352 of B and C. Signed zeros prevent all of these transformations,
11353 for reasons given above each one.
11354
11355 Also try swapping the arguments and inverting the conditional. */
11356 if (COMPARISON_CLASS_P (arg0)
11357 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11358 arg1, TREE_OPERAND (arg0, 1))
11359 && !HONOR_SIGNED_ZEROS (element_mode (arg1)))
11360 {
11361 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
11362 if (tem)
11363 return tem;
11364 }
11365
11366 if (COMPARISON_CLASS_P (arg0)
11367 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11368 op2,
11369 TREE_OPERAND (arg0, 1))
11370 && !HONOR_SIGNED_ZEROS (element_mode (op2)))
11371 {
11372 location_t loc0 = expr_location_or (arg0, loc);
11373 tem = fold_invert_truthvalue (loc0, arg0);
11374 if (tem && COMPARISON_CLASS_P (tem))
11375 {
11376 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
11377 if (tem)
11378 return tem;
11379 }
11380 }
11381
11382 /* If the second operand is simpler than the third, swap them
11383 since that produces better jump optimization results. */
11384 if (truth_value_p (TREE_CODE (arg0))
11385 && tree_swap_operands_p (op1, op2))
11386 {
11387 location_t loc0 = expr_location_or (arg0, loc);
11388 /* See if this can be inverted. If it can't, possibly because
11389 it was a floating-point inequality comparison, don't do
11390 anything. */
11391 tem = fold_invert_truthvalue (loc0, arg0);
11392 if (tem)
11393 return fold_build3_loc (loc, code, type, tem, op2, op1);
11394 }
11395
11396 /* Convert A ? 1 : 0 to simply A. */
11397 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
11398 : (integer_onep (op1)
11399 && !VECTOR_TYPE_P (type)))
11400 && integer_zerop (op2)
11401 /* If we try to convert OP0 to our type, the
11402 call to fold will try to move the conversion inside
11403 a COND, which will recurse. In that case, the COND_EXPR
11404 is probably the best choice, so leave it alone. */
11405 && type == TREE_TYPE (arg0))
11406 return pedantic_non_lvalue_loc (loc, arg0);
11407
11408 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11409 over COND_EXPR in cases such as floating point comparisons. */
11410 if (integer_zerop (op1)
11411 && code == COND_EXPR
11412 && integer_onep (op2)
11413 && !VECTOR_TYPE_P (type)
11414 && truth_value_p (TREE_CODE (arg0)))
11415 return pedantic_non_lvalue_loc (loc,
11416 fold_convert_loc (loc, type,
11417 invert_truthvalue_loc (loc,
11418 arg0)));
11419
11420 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11421 if (TREE_CODE (arg0) == LT_EXPR
11422 && integer_zerop (TREE_OPERAND (arg0, 1))
11423 && integer_zerop (op2)
11424 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11425 {
11426 /* sign_bit_p looks through both zero and sign extensions,
11427 but for this optimization only sign extensions are
11428 usable. */
11429 tree tem2 = TREE_OPERAND (arg0, 0);
11430 while (tem != tem2)
11431 {
11432 if (TREE_CODE (tem2) != NOP_EXPR
11433 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
11434 {
11435 tem = NULL_TREE;
11436 break;
11437 }
11438 tem2 = TREE_OPERAND (tem2, 0);
11439 }
11440 /* sign_bit_p only checks ARG1 bits within A's precision.
11441 If <sign bit of A> has wider type than A, bits outside
11442 of A's precision in <sign bit of A> need to be checked.
11443 If they are all 0, this optimization needs to be done
11444 in unsigned A's type, if they are all 1 in signed A's type,
11445 otherwise this can't be done. */
11446 if (tem
11447 && TYPE_PRECISION (TREE_TYPE (tem))
11448 < TYPE_PRECISION (TREE_TYPE (arg1))
11449 && TYPE_PRECISION (TREE_TYPE (tem))
11450 < TYPE_PRECISION (type))
11451 {
11452 int inner_width, outer_width;
11453 tree tem_type;
11454
11455 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
11456 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
11457 if (outer_width > TYPE_PRECISION (type))
11458 outer_width = TYPE_PRECISION (type);
11459
11460 wide_int mask = wi::shifted_mask
11461 (inner_width, outer_width - inner_width, false,
11462 TYPE_PRECISION (TREE_TYPE (arg1)));
11463
11464 wide_int common = mask & arg1;
11465 if (common == mask)
11466 {
11467 tem_type = signed_type_for (TREE_TYPE (tem));
11468 tem = fold_convert_loc (loc, tem_type, tem);
11469 }
11470 else if (common == 0)
11471 {
11472 tem_type = unsigned_type_for (TREE_TYPE (tem));
11473 tem = fold_convert_loc (loc, tem_type, tem);
11474 }
11475 else
11476 tem = NULL;
11477 }
11478
11479 if (tem)
11480 return
11481 fold_convert_loc (loc, type,
11482 fold_build2_loc (loc, BIT_AND_EXPR,
11483 TREE_TYPE (tem), tem,
11484 fold_convert_loc (loc,
11485 TREE_TYPE (tem),
11486 arg1)));
11487 }
11488
11489 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11490 already handled above. */
11491 if (TREE_CODE (arg0) == BIT_AND_EXPR
11492 && integer_onep (TREE_OPERAND (arg0, 1))
11493 && integer_zerop (op2)
11494 && integer_pow2p (arg1))
11495 {
11496 tree tem = TREE_OPERAND (arg0, 0);
11497 STRIP_NOPS (tem);
11498 if (TREE_CODE (tem) == RSHIFT_EXPR
11499 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
11500 && (unsigned HOST_WIDE_INT) tree_log2 (arg1)
11501 == tree_to_uhwi (TREE_OPERAND (tem, 1)))
11502 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11503 fold_convert_loc (loc, type,
11504 TREE_OPERAND (tem, 0)),
11505 op1);
11506 }
11507
11508 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11509 is probably obsolete because the first operand should be a
11510 truth value (that's why we have the two cases above), but let's
11511 leave it in until we can confirm this for all front-ends. */
11512 if (integer_zerop (op2)
11513 && TREE_CODE (arg0) == NE_EXPR
11514 && integer_zerop (TREE_OPERAND (arg0, 1))
11515 && integer_pow2p (arg1)
11516 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11517 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11518 arg1, OEP_ONLY_CONST))
11519 return pedantic_non_lvalue_loc (loc,
11520 fold_convert_loc (loc, type,
11521 TREE_OPERAND (arg0, 0)));
11522
11523 /* Disable the transformations below for vectors, since
11524 fold_binary_op_with_conditional_arg may undo them immediately,
11525 yielding an infinite loop. */
11526 if (code == VEC_COND_EXPR)
11527 return NULL_TREE;
11528
11529 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11530 if (integer_zerop (op2)
11531 && truth_value_p (TREE_CODE (arg0))
11532 && truth_value_p (TREE_CODE (arg1))
11533 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11534 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
11535 : TRUTH_ANDIF_EXPR,
11536 type, fold_convert_loc (loc, type, arg0), op1);
11537
11538 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11539 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
11540 && truth_value_p (TREE_CODE (arg0))
11541 && truth_value_p (TREE_CODE (arg1))
11542 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11543 {
11544 location_t loc0 = expr_location_or (arg0, loc);
11545 /* Only perform transformation if ARG0 is easily inverted. */
11546 tem = fold_invert_truthvalue (loc0, arg0);
11547 if (tem)
11548 return fold_build2_loc (loc, code == VEC_COND_EXPR
11549 ? BIT_IOR_EXPR
11550 : TRUTH_ORIF_EXPR,
11551 type, fold_convert_loc (loc, type, tem),
11552 op1);
11553 }
11554
11555 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11556 if (integer_zerop (arg1)
11557 && truth_value_p (TREE_CODE (arg0))
11558 && truth_value_p (TREE_CODE (op2))
11559 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11560 {
11561 location_t loc0 = expr_location_or (arg0, loc);
11562 /* Only perform transformation if ARG0 is easily inverted. */
11563 tem = fold_invert_truthvalue (loc0, arg0);
11564 if (tem)
11565 return fold_build2_loc (loc, code == VEC_COND_EXPR
11566 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
11567 type, fold_convert_loc (loc, type, tem),
11568 op2);
11569 }
11570
11571 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11572 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
11573 && truth_value_p (TREE_CODE (arg0))
11574 && truth_value_p (TREE_CODE (op2))
11575 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11576 return fold_build2_loc (loc, code == VEC_COND_EXPR
11577 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
11578 type, fold_convert_loc (loc, type, arg0), op2);
11579
11580 return NULL_TREE;
11581
11582 case CALL_EXPR:
11583 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
11584 of fold_ternary on them. */
11585 gcc_unreachable ();
11586
11587 case BIT_FIELD_REF:
11588 if (TREE_CODE (arg0) == VECTOR_CST
11589 && (type == TREE_TYPE (TREE_TYPE (arg0))
11590 || (TREE_CODE (type) == VECTOR_TYPE
11591 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
11592 {
11593 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
11594 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
11595 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
11596 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
11597
11598 if (n != 0
11599 && (idx % width) == 0
11600 && (n % width) == 0
11601 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
11602 {
11603 idx = idx / width;
11604 n = n / width;
11605
11606 if (TREE_CODE (arg0) == VECTOR_CST)
11607 {
11608 if (n == 1)
11609 return VECTOR_CST_ELT (arg0, idx);
11610
11611 tree *vals = XALLOCAVEC (tree, n);
11612 for (unsigned i = 0; i < n; ++i)
11613 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
11614 return build_vector (type, vals);
11615 }
11616 }
11617 }
11618
11619 /* On constants we can use native encode/interpret to constant
11620 fold (nearly) all BIT_FIELD_REFs. */
11621 if (CONSTANT_CLASS_P (arg0)
11622 && can_native_interpret_type_p (type)
11623 && BITS_PER_UNIT == 8)
11624 {
11625 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11626 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
11627 /* Limit us to a reasonable amount of work. To relax the
11628 other limitations we need bit-shifting of the buffer
11629 and rounding up the size. */
11630 if (bitpos % BITS_PER_UNIT == 0
11631 && bitsize % BITS_PER_UNIT == 0
11632 && bitsize <= MAX_BITSIZE_MODE_ANY_MODE)
11633 {
11634 unsigned char b[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
11635 unsigned HOST_WIDE_INT len
11636 = native_encode_expr (arg0, b, bitsize / BITS_PER_UNIT,
11637 bitpos / BITS_PER_UNIT);
11638 if (len > 0
11639 && len * BITS_PER_UNIT >= bitsize)
11640 {
11641 tree v = native_interpret_expr (type, b,
11642 bitsize / BITS_PER_UNIT);
11643 if (v)
11644 return v;
11645 }
11646 }
11647 }
11648
11649 return NULL_TREE;
11650
11651 case FMA_EXPR:
11652 /* For integers we can decompose the FMA if possible. */
11653 if (TREE_CODE (arg0) == INTEGER_CST
11654 && TREE_CODE (arg1) == INTEGER_CST)
11655 return fold_build2_loc (loc, PLUS_EXPR, type,
11656 const_binop (MULT_EXPR, arg0, arg1), arg2);
11657 if (integer_zerop (arg2))
11658 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11659
11660 return fold_fma (loc, type, arg0, arg1, arg2);
11661
11662 case VEC_PERM_EXPR:
11663 if (TREE_CODE (arg2) == VECTOR_CST)
11664 {
11665 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask, mask2;
11666 unsigned char *sel = XALLOCAVEC (unsigned char, 2 * nelts);
11667 unsigned char *sel2 = sel + nelts;
11668 bool need_mask_canon = false;
11669 bool need_mask_canon2 = false;
11670 bool all_in_vec0 = true;
11671 bool all_in_vec1 = true;
11672 bool maybe_identity = true;
11673 bool single_arg = (op0 == op1);
11674 bool changed = false;
11675
11676 mask2 = 2 * nelts - 1;
11677 mask = single_arg ? (nelts - 1) : mask2;
11678 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
11679 for (i = 0; i < nelts; i++)
11680 {
11681 tree val = VECTOR_CST_ELT (arg2, i);
11682 if (TREE_CODE (val) != INTEGER_CST)
11683 return NULL_TREE;
11684
11685 /* Make sure that the perm value is in an acceptable
11686 range. */
11687 wide_int t = val;
11688 need_mask_canon |= wi::gtu_p (t, mask);
11689 need_mask_canon2 |= wi::gtu_p (t, mask2);
11690 sel[i] = t.to_uhwi () & mask;
11691 sel2[i] = t.to_uhwi () & mask2;
11692
11693 if (sel[i] < nelts)
11694 all_in_vec1 = false;
11695 else
11696 all_in_vec0 = false;
11697
11698 if ((sel[i] & (nelts-1)) != i)
11699 maybe_identity = false;
11700 }
11701
11702 if (maybe_identity)
11703 {
11704 if (all_in_vec0)
11705 return op0;
11706 if (all_in_vec1)
11707 return op1;
11708 }
11709
11710 if (all_in_vec0)
11711 op1 = op0;
11712 else if (all_in_vec1)
11713 {
11714 op0 = op1;
11715 for (i = 0; i < nelts; i++)
11716 sel[i] -= nelts;
11717 need_mask_canon = true;
11718 }
11719
11720 if ((TREE_CODE (op0) == VECTOR_CST
11721 || TREE_CODE (op0) == CONSTRUCTOR)
11722 && (TREE_CODE (op1) == VECTOR_CST
11723 || TREE_CODE (op1) == CONSTRUCTOR))
11724 {
11725 tree t = fold_vec_perm (type, op0, op1, sel);
11726 if (t != NULL_TREE)
11727 return t;
11728 }
11729
11730 if (op0 == op1 && !single_arg)
11731 changed = true;
11732
11733 /* Some targets are deficient and fail to expand a single
11734 argument permutation while still allowing an equivalent
11735 2-argument version. */
11736 if (need_mask_canon && arg2 == op2
11737 && !can_vec_perm_p (TYPE_MODE (type), false, sel)
11738 && can_vec_perm_p (TYPE_MODE (type), false, sel2))
11739 {
11740 need_mask_canon = need_mask_canon2;
11741 sel = sel2;
11742 }
11743
11744 if (need_mask_canon && arg2 == op2)
11745 {
11746 tree *tsel = XALLOCAVEC (tree, nelts);
11747 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
11748 for (i = 0; i < nelts; i++)
11749 tsel[i] = build_int_cst (eltype, sel[i]);
11750 op2 = build_vector (TREE_TYPE (arg2), tsel);
11751 changed = true;
11752 }
11753
11754 if (changed)
11755 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
11756 }
11757 return NULL_TREE;
11758
11759 case BIT_INSERT_EXPR:
11760 /* Perform (partial) constant folding of BIT_INSERT_EXPR. */
11761 if (TREE_CODE (arg0) == INTEGER_CST
11762 && TREE_CODE (arg1) == INTEGER_CST)
11763 {
11764 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11765 unsigned bitsize = TYPE_PRECISION (TREE_TYPE (arg1));
11766 wide_int tem = wi::bit_and (arg0,
11767 wi::shifted_mask (bitpos, bitsize, true,
11768 TYPE_PRECISION (type)));
11769 wide_int tem2
11770 = wi::lshift (wi::zext (wi::to_wide (arg1, TYPE_PRECISION (type)),
11771 bitsize), bitpos);
11772 return wide_int_to_tree (type, wi::bit_or (tem, tem2));
11773 }
11774 else if (TREE_CODE (arg0) == VECTOR_CST
11775 && CONSTANT_CLASS_P (arg1)
11776 && types_compatible_p (TREE_TYPE (TREE_TYPE (arg0)),
11777 TREE_TYPE (arg1)))
11778 {
11779 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11780 unsigned HOST_WIDE_INT elsize
11781 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (arg1)));
11782 if (bitpos % elsize == 0)
11783 {
11784 unsigned k = bitpos / elsize;
11785 if (operand_equal_p (VECTOR_CST_ELT (arg0, k), arg1, 0))
11786 return arg0;
11787 else
11788 {
11789 tree *elts = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
11790 memcpy (elts, VECTOR_CST_ELTS (arg0),
11791 sizeof (tree) * TYPE_VECTOR_SUBPARTS (type));
11792 elts[k] = arg1;
11793 return build_vector (type, elts);
11794 }
11795 }
11796 }
11797 return NULL_TREE;
11798
11799 default:
11800 return NULL_TREE;
11801 } /* switch (code) */
11802 }
11803
11804 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
11805 of an array (or vector). */
11806
11807 tree
11808 get_array_ctor_element_at_index (tree ctor, offset_int access_index)
11809 {
11810 tree index_type = NULL_TREE;
11811 offset_int low_bound = 0;
11812
11813 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
11814 {
11815 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
11816 if (domain_type && TYPE_MIN_VALUE (domain_type))
11817 {
11818 /* Static constructors for variably sized objects makes no sense. */
11819 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
11820 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
11821 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
11822 }
11823 }
11824
11825 if (index_type)
11826 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
11827 TYPE_SIGN (index_type));
11828
11829 offset_int index = low_bound - 1;
11830 if (index_type)
11831 index = wi::ext (index, TYPE_PRECISION (index_type),
11832 TYPE_SIGN (index_type));
11833
11834 offset_int max_index;
11835 unsigned HOST_WIDE_INT cnt;
11836 tree cfield, cval;
11837
11838 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
11839 {
11840 /* Array constructor might explicitly set index, or specify a range,
11841 or leave index NULL meaning that it is next index after previous
11842 one. */
11843 if (cfield)
11844 {
11845 if (TREE_CODE (cfield) == INTEGER_CST)
11846 max_index = index = wi::to_offset (cfield);
11847 else
11848 {
11849 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
11850 index = wi::to_offset (TREE_OPERAND (cfield, 0));
11851 max_index = wi::to_offset (TREE_OPERAND (cfield, 1));
11852 }
11853 }
11854 else
11855 {
11856 index += 1;
11857 if (index_type)
11858 index = wi::ext (index, TYPE_PRECISION (index_type),
11859 TYPE_SIGN (index_type));
11860 max_index = index;
11861 }
11862
11863 /* Do we have match? */
11864 if (wi::cmpu (access_index, index) >= 0
11865 && wi::cmpu (access_index, max_index) <= 0)
11866 return cval;
11867 }
11868 return NULL_TREE;
11869 }
11870
11871 /* Perform constant folding and related simplification of EXPR.
11872 The related simplifications include x*1 => x, x*0 => 0, etc.,
11873 and application of the associative law.
11874 NOP_EXPR conversions may be removed freely (as long as we
11875 are careful not to change the type of the overall expression).
11876 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
11877 but we can constant-fold them if they have constant operands. */
11878
11879 #ifdef ENABLE_FOLD_CHECKING
11880 # define fold(x) fold_1 (x)
11881 static tree fold_1 (tree);
11882 static
11883 #endif
11884 tree
11885 fold (tree expr)
11886 {
11887 const tree t = expr;
11888 enum tree_code code = TREE_CODE (t);
11889 enum tree_code_class kind = TREE_CODE_CLASS (code);
11890 tree tem;
11891 location_t loc = EXPR_LOCATION (expr);
11892
11893 /* Return right away if a constant. */
11894 if (kind == tcc_constant)
11895 return t;
11896
11897 /* CALL_EXPR-like objects with variable numbers of operands are
11898 treated specially. */
11899 if (kind == tcc_vl_exp)
11900 {
11901 if (code == CALL_EXPR)
11902 {
11903 tem = fold_call_expr (loc, expr, false);
11904 return tem ? tem : expr;
11905 }
11906 return expr;
11907 }
11908
11909 if (IS_EXPR_CODE_CLASS (kind))
11910 {
11911 tree type = TREE_TYPE (t);
11912 tree op0, op1, op2;
11913
11914 switch (TREE_CODE_LENGTH (code))
11915 {
11916 case 1:
11917 op0 = TREE_OPERAND (t, 0);
11918 tem = fold_unary_loc (loc, code, type, op0);
11919 return tem ? tem : expr;
11920 case 2:
11921 op0 = TREE_OPERAND (t, 0);
11922 op1 = TREE_OPERAND (t, 1);
11923 tem = fold_binary_loc (loc, code, type, op0, op1);
11924 return tem ? tem : expr;
11925 case 3:
11926 op0 = TREE_OPERAND (t, 0);
11927 op1 = TREE_OPERAND (t, 1);
11928 op2 = TREE_OPERAND (t, 2);
11929 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
11930 return tem ? tem : expr;
11931 default:
11932 break;
11933 }
11934 }
11935
11936 switch (code)
11937 {
11938 case ARRAY_REF:
11939 {
11940 tree op0 = TREE_OPERAND (t, 0);
11941 tree op1 = TREE_OPERAND (t, 1);
11942
11943 if (TREE_CODE (op1) == INTEGER_CST
11944 && TREE_CODE (op0) == CONSTRUCTOR
11945 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
11946 {
11947 tree val = get_array_ctor_element_at_index (op0,
11948 wi::to_offset (op1));
11949 if (val)
11950 return val;
11951 }
11952
11953 return t;
11954 }
11955
11956 /* Return a VECTOR_CST if possible. */
11957 case CONSTRUCTOR:
11958 {
11959 tree type = TREE_TYPE (t);
11960 if (TREE_CODE (type) != VECTOR_TYPE)
11961 return t;
11962
11963 unsigned i;
11964 tree val;
11965 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
11966 if (! CONSTANT_CLASS_P (val))
11967 return t;
11968
11969 return build_vector_from_ctor (type, CONSTRUCTOR_ELTS (t));
11970 }
11971
11972 case CONST_DECL:
11973 return fold (DECL_INITIAL (t));
11974
11975 default:
11976 return t;
11977 } /* switch (code) */
11978 }
11979
11980 #ifdef ENABLE_FOLD_CHECKING
11981 #undef fold
11982
11983 static void fold_checksum_tree (const_tree, struct md5_ctx *,
11984 hash_table<nofree_ptr_hash<const tree_node> > *);
11985 static void fold_check_failed (const_tree, const_tree);
11986 void print_fold_checksum (const_tree);
11987
11988 /* When --enable-checking=fold, compute a digest of expr before
11989 and after actual fold call to see if fold did not accidentally
11990 change original expr. */
11991
11992 tree
11993 fold (tree expr)
11994 {
11995 tree ret;
11996 struct md5_ctx ctx;
11997 unsigned char checksum_before[16], checksum_after[16];
11998 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
11999
12000 md5_init_ctx (&ctx);
12001 fold_checksum_tree (expr, &ctx, &ht);
12002 md5_finish_ctx (&ctx, checksum_before);
12003 ht.empty ();
12004
12005 ret = fold_1 (expr);
12006
12007 md5_init_ctx (&ctx);
12008 fold_checksum_tree (expr, &ctx, &ht);
12009 md5_finish_ctx (&ctx, checksum_after);
12010
12011 if (memcmp (checksum_before, checksum_after, 16))
12012 fold_check_failed (expr, ret);
12013
12014 return ret;
12015 }
12016
12017 void
12018 print_fold_checksum (const_tree expr)
12019 {
12020 struct md5_ctx ctx;
12021 unsigned char checksum[16], cnt;
12022 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12023
12024 md5_init_ctx (&ctx);
12025 fold_checksum_tree (expr, &ctx, &ht);
12026 md5_finish_ctx (&ctx, checksum);
12027 for (cnt = 0; cnt < 16; ++cnt)
12028 fprintf (stderr, "%02x", checksum[cnt]);
12029 putc ('\n', stderr);
12030 }
12031
12032 static void
12033 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
12034 {
12035 internal_error ("fold check: original tree changed by fold");
12036 }
12037
12038 static void
12039 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
12040 hash_table<nofree_ptr_hash <const tree_node> > *ht)
12041 {
12042 const tree_node **slot;
12043 enum tree_code code;
12044 union tree_node buf;
12045 int i, len;
12046
12047 recursive_label:
12048 if (expr == NULL)
12049 return;
12050 slot = ht->find_slot (expr, INSERT);
12051 if (*slot != NULL)
12052 return;
12053 *slot = expr;
12054 code = TREE_CODE (expr);
12055 if (TREE_CODE_CLASS (code) == tcc_declaration
12056 && HAS_DECL_ASSEMBLER_NAME_P (expr))
12057 {
12058 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
12059 memcpy ((char *) &buf, expr, tree_size (expr));
12060 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
12061 buf.decl_with_vis.symtab_node = NULL;
12062 expr = (tree) &buf;
12063 }
12064 else if (TREE_CODE_CLASS (code) == tcc_type
12065 && (TYPE_POINTER_TO (expr)
12066 || TYPE_REFERENCE_TO (expr)
12067 || TYPE_CACHED_VALUES_P (expr)
12068 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
12069 || TYPE_NEXT_VARIANT (expr)
12070 || TYPE_ALIAS_SET_KNOWN_P (expr)))
12071 {
12072 /* Allow these fields to be modified. */
12073 tree tmp;
12074 memcpy ((char *) &buf, expr, tree_size (expr));
12075 expr = tmp = (tree) &buf;
12076 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
12077 TYPE_POINTER_TO (tmp) = NULL;
12078 TYPE_REFERENCE_TO (tmp) = NULL;
12079 TYPE_NEXT_VARIANT (tmp) = NULL;
12080 TYPE_ALIAS_SET (tmp) = -1;
12081 if (TYPE_CACHED_VALUES_P (tmp))
12082 {
12083 TYPE_CACHED_VALUES_P (tmp) = 0;
12084 TYPE_CACHED_VALUES (tmp) = NULL;
12085 }
12086 }
12087 md5_process_bytes (expr, tree_size (expr), ctx);
12088 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
12089 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
12090 if (TREE_CODE_CLASS (code) != tcc_type
12091 && TREE_CODE_CLASS (code) != tcc_declaration
12092 && code != TREE_LIST
12093 && code != SSA_NAME
12094 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
12095 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
12096 switch (TREE_CODE_CLASS (code))
12097 {
12098 case tcc_constant:
12099 switch (code)
12100 {
12101 case STRING_CST:
12102 md5_process_bytes (TREE_STRING_POINTER (expr),
12103 TREE_STRING_LENGTH (expr), ctx);
12104 break;
12105 case COMPLEX_CST:
12106 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
12107 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
12108 break;
12109 case VECTOR_CST:
12110 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
12111 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
12112 break;
12113 default:
12114 break;
12115 }
12116 break;
12117 case tcc_exceptional:
12118 switch (code)
12119 {
12120 case TREE_LIST:
12121 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
12122 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
12123 expr = TREE_CHAIN (expr);
12124 goto recursive_label;
12125 break;
12126 case TREE_VEC:
12127 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
12128 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
12129 break;
12130 default:
12131 break;
12132 }
12133 break;
12134 case tcc_expression:
12135 case tcc_reference:
12136 case tcc_comparison:
12137 case tcc_unary:
12138 case tcc_binary:
12139 case tcc_statement:
12140 case tcc_vl_exp:
12141 len = TREE_OPERAND_LENGTH (expr);
12142 for (i = 0; i < len; ++i)
12143 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
12144 break;
12145 case tcc_declaration:
12146 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
12147 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
12148 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
12149 {
12150 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
12151 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
12152 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
12153 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
12154 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
12155 }
12156
12157 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
12158 {
12159 if (TREE_CODE (expr) == FUNCTION_DECL)
12160 {
12161 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
12162 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
12163 }
12164 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
12165 }
12166 break;
12167 case tcc_type:
12168 if (TREE_CODE (expr) == ENUMERAL_TYPE)
12169 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
12170 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
12171 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
12172 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
12173 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
12174 if (INTEGRAL_TYPE_P (expr)
12175 || SCALAR_FLOAT_TYPE_P (expr))
12176 {
12177 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12178 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
12179 }
12180 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
12181 if (TREE_CODE (expr) == RECORD_TYPE
12182 || TREE_CODE (expr) == UNION_TYPE
12183 || TREE_CODE (expr) == QUAL_UNION_TYPE)
12184 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
12185 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12186 break;
12187 default:
12188 break;
12189 }
12190 }
12191
12192 /* Helper function for outputting the checksum of a tree T. When
12193 debugging with gdb, you can "define mynext" to be "next" followed
12194 by "call debug_fold_checksum (op0)", then just trace down till the
12195 outputs differ. */
12196
12197 DEBUG_FUNCTION void
12198 debug_fold_checksum (const_tree t)
12199 {
12200 int i;
12201 unsigned char checksum[16];
12202 struct md5_ctx ctx;
12203 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12204
12205 md5_init_ctx (&ctx);
12206 fold_checksum_tree (t, &ctx, &ht);
12207 md5_finish_ctx (&ctx, checksum);
12208 ht.empty ();
12209
12210 for (i = 0; i < 16; i++)
12211 fprintf (stderr, "%d ", checksum[i]);
12212
12213 fprintf (stderr, "\n");
12214 }
12215
12216 #endif
12217
12218 /* Fold a unary tree expression with code CODE of type TYPE with an
12219 operand OP0. LOC is the location of the resulting expression.
12220 Return a folded expression if successful. Otherwise, return a tree
12221 expression with code CODE of type TYPE with an operand OP0. */
12222
12223 tree
12224 fold_build1_loc (location_t loc,
12225 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
12226 {
12227 tree tem;
12228 #ifdef ENABLE_FOLD_CHECKING
12229 unsigned char checksum_before[16], checksum_after[16];
12230 struct md5_ctx ctx;
12231 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12232
12233 md5_init_ctx (&ctx);
12234 fold_checksum_tree (op0, &ctx, &ht);
12235 md5_finish_ctx (&ctx, checksum_before);
12236 ht.empty ();
12237 #endif
12238
12239 tem = fold_unary_loc (loc, code, type, op0);
12240 if (!tem)
12241 tem = build1_loc (loc, code, type, op0 PASS_MEM_STAT);
12242
12243 #ifdef ENABLE_FOLD_CHECKING
12244 md5_init_ctx (&ctx);
12245 fold_checksum_tree (op0, &ctx, &ht);
12246 md5_finish_ctx (&ctx, checksum_after);
12247
12248 if (memcmp (checksum_before, checksum_after, 16))
12249 fold_check_failed (op0, tem);
12250 #endif
12251 return tem;
12252 }
12253
12254 /* Fold a binary tree expression with code CODE of type TYPE with
12255 operands OP0 and OP1. LOC is the location of the resulting
12256 expression. Return a folded expression if successful. Otherwise,
12257 return a tree expression with code CODE of type TYPE with operands
12258 OP0 and OP1. */
12259
12260 tree
12261 fold_build2_loc (location_t loc,
12262 enum tree_code code, tree type, tree op0, tree op1
12263 MEM_STAT_DECL)
12264 {
12265 tree tem;
12266 #ifdef ENABLE_FOLD_CHECKING
12267 unsigned char checksum_before_op0[16],
12268 checksum_before_op1[16],
12269 checksum_after_op0[16],
12270 checksum_after_op1[16];
12271 struct md5_ctx ctx;
12272 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12273
12274 md5_init_ctx (&ctx);
12275 fold_checksum_tree (op0, &ctx, &ht);
12276 md5_finish_ctx (&ctx, checksum_before_op0);
12277 ht.empty ();
12278
12279 md5_init_ctx (&ctx);
12280 fold_checksum_tree (op1, &ctx, &ht);
12281 md5_finish_ctx (&ctx, checksum_before_op1);
12282 ht.empty ();
12283 #endif
12284
12285 tem = fold_binary_loc (loc, code, type, op0, op1);
12286 if (!tem)
12287 tem = build2_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
12288
12289 #ifdef ENABLE_FOLD_CHECKING
12290 md5_init_ctx (&ctx);
12291 fold_checksum_tree (op0, &ctx, &ht);
12292 md5_finish_ctx (&ctx, checksum_after_op0);
12293 ht.empty ();
12294
12295 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12296 fold_check_failed (op0, tem);
12297
12298 md5_init_ctx (&ctx);
12299 fold_checksum_tree (op1, &ctx, &ht);
12300 md5_finish_ctx (&ctx, checksum_after_op1);
12301
12302 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12303 fold_check_failed (op1, tem);
12304 #endif
12305 return tem;
12306 }
12307
12308 /* Fold a ternary tree expression with code CODE of type TYPE with
12309 operands OP0, OP1, and OP2. Return a folded expression if
12310 successful. Otherwise, return a tree expression with code CODE of
12311 type TYPE with operands OP0, OP1, and OP2. */
12312
12313 tree
12314 fold_build3_loc (location_t loc, enum tree_code code, tree type,
12315 tree op0, tree op1, tree op2 MEM_STAT_DECL)
12316 {
12317 tree tem;
12318 #ifdef ENABLE_FOLD_CHECKING
12319 unsigned char checksum_before_op0[16],
12320 checksum_before_op1[16],
12321 checksum_before_op2[16],
12322 checksum_after_op0[16],
12323 checksum_after_op1[16],
12324 checksum_after_op2[16];
12325 struct md5_ctx ctx;
12326 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12327
12328 md5_init_ctx (&ctx);
12329 fold_checksum_tree (op0, &ctx, &ht);
12330 md5_finish_ctx (&ctx, checksum_before_op0);
12331 ht.empty ();
12332
12333 md5_init_ctx (&ctx);
12334 fold_checksum_tree (op1, &ctx, &ht);
12335 md5_finish_ctx (&ctx, checksum_before_op1);
12336 ht.empty ();
12337
12338 md5_init_ctx (&ctx);
12339 fold_checksum_tree (op2, &ctx, &ht);
12340 md5_finish_ctx (&ctx, checksum_before_op2);
12341 ht.empty ();
12342 #endif
12343
12344 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
12345 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12346 if (!tem)
12347 tem = build3_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
12348
12349 #ifdef ENABLE_FOLD_CHECKING
12350 md5_init_ctx (&ctx);
12351 fold_checksum_tree (op0, &ctx, &ht);
12352 md5_finish_ctx (&ctx, checksum_after_op0);
12353 ht.empty ();
12354
12355 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12356 fold_check_failed (op0, tem);
12357
12358 md5_init_ctx (&ctx);
12359 fold_checksum_tree (op1, &ctx, &ht);
12360 md5_finish_ctx (&ctx, checksum_after_op1);
12361 ht.empty ();
12362
12363 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12364 fold_check_failed (op1, tem);
12365
12366 md5_init_ctx (&ctx);
12367 fold_checksum_tree (op2, &ctx, &ht);
12368 md5_finish_ctx (&ctx, checksum_after_op2);
12369
12370 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12371 fold_check_failed (op2, tem);
12372 #endif
12373 return tem;
12374 }
12375
12376 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
12377 arguments in ARGARRAY, and a null static chain.
12378 Return a folded expression if successful. Otherwise, return a CALL_EXPR
12379 of type TYPE from the given operands as constructed by build_call_array. */
12380
12381 tree
12382 fold_build_call_array_loc (location_t loc, tree type, tree fn,
12383 int nargs, tree *argarray)
12384 {
12385 tree tem;
12386 #ifdef ENABLE_FOLD_CHECKING
12387 unsigned char checksum_before_fn[16],
12388 checksum_before_arglist[16],
12389 checksum_after_fn[16],
12390 checksum_after_arglist[16];
12391 struct md5_ctx ctx;
12392 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12393 int i;
12394
12395 md5_init_ctx (&ctx);
12396 fold_checksum_tree (fn, &ctx, &ht);
12397 md5_finish_ctx (&ctx, checksum_before_fn);
12398 ht.empty ();
12399
12400 md5_init_ctx (&ctx);
12401 for (i = 0; i < nargs; i++)
12402 fold_checksum_tree (argarray[i], &ctx, &ht);
12403 md5_finish_ctx (&ctx, checksum_before_arglist);
12404 ht.empty ();
12405 #endif
12406
12407 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
12408 if (!tem)
12409 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
12410
12411 #ifdef ENABLE_FOLD_CHECKING
12412 md5_init_ctx (&ctx);
12413 fold_checksum_tree (fn, &ctx, &ht);
12414 md5_finish_ctx (&ctx, checksum_after_fn);
12415 ht.empty ();
12416
12417 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
12418 fold_check_failed (fn, tem);
12419
12420 md5_init_ctx (&ctx);
12421 for (i = 0; i < nargs; i++)
12422 fold_checksum_tree (argarray[i], &ctx, &ht);
12423 md5_finish_ctx (&ctx, checksum_after_arglist);
12424
12425 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
12426 fold_check_failed (NULL_TREE, tem);
12427 #endif
12428 return tem;
12429 }
12430
12431 /* Perform constant folding and related simplification of initializer
12432 expression EXPR. These behave identically to "fold_buildN" but ignore
12433 potential run-time traps and exceptions that fold must preserve. */
12434
12435 #define START_FOLD_INIT \
12436 int saved_signaling_nans = flag_signaling_nans;\
12437 int saved_trapping_math = flag_trapping_math;\
12438 int saved_rounding_math = flag_rounding_math;\
12439 int saved_trapv = flag_trapv;\
12440 int saved_folding_initializer = folding_initializer;\
12441 flag_signaling_nans = 0;\
12442 flag_trapping_math = 0;\
12443 flag_rounding_math = 0;\
12444 flag_trapv = 0;\
12445 folding_initializer = 1;
12446
12447 #define END_FOLD_INIT \
12448 flag_signaling_nans = saved_signaling_nans;\
12449 flag_trapping_math = saved_trapping_math;\
12450 flag_rounding_math = saved_rounding_math;\
12451 flag_trapv = saved_trapv;\
12452 folding_initializer = saved_folding_initializer;
12453
12454 tree
12455 fold_build1_initializer_loc (location_t loc, enum tree_code code,
12456 tree type, tree op)
12457 {
12458 tree result;
12459 START_FOLD_INIT;
12460
12461 result = fold_build1_loc (loc, code, type, op);
12462
12463 END_FOLD_INIT;
12464 return result;
12465 }
12466
12467 tree
12468 fold_build2_initializer_loc (location_t loc, enum tree_code code,
12469 tree type, tree op0, tree op1)
12470 {
12471 tree result;
12472 START_FOLD_INIT;
12473
12474 result = fold_build2_loc (loc, code, type, op0, op1);
12475
12476 END_FOLD_INIT;
12477 return result;
12478 }
12479
12480 tree
12481 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
12482 int nargs, tree *argarray)
12483 {
12484 tree result;
12485 START_FOLD_INIT;
12486
12487 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
12488
12489 END_FOLD_INIT;
12490 return result;
12491 }
12492
12493 #undef START_FOLD_INIT
12494 #undef END_FOLD_INIT
12495
12496 /* Determine if first argument is a multiple of second argument. Return 0 if
12497 it is not, or we cannot easily determined it to be.
12498
12499 An example of the sort of thing we care about (at this point; this routine
12500 could surely be made more general, and expanded to do what the *_DIV_EXPR's
12501 fold cases do now) is discovering that
12502
12503 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12504
12505 is a multiple of
12506
12507 SAVE_EXPR (J * 8)
12508
12509 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12510
12511 This code also handles discovering that
12512
12513 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12514
12515 is a multiple of 8 so we don't have to worry about dealing with a
12516 possible remainder.
12517
12518 Note that we *look* inside a SAVE_EXPR only to determine how it was
12519 calculated; it is not safe for fold to do much of anything else with the
12520 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12521 at run time. For example, the latter example above *cannot* be implemented
12522 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12523 evaluation time of the original SAVE_EXPR is not necessarily the same at
12524 the time the new expression is evaluated. The only optimization of this
12525 sort that would be valid is changing
12526
12527 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12528
12529 divided by 8 to
12530
12531 SAVE_EXPR (I) * SAVE_EXPR (J)
12532
12533 (where the same SAVE_EXPR (J) is used in the original and the
12534 transformed version). */
12535
12536 int
12537 multiple_of_p (tree type, const_tree top, const_tree bottom)
12538 {
12539 gimple *stmt;
12540 tree t1, op1, op2;
12541
12542 if (operand_equal_p (top, bottom, 0))
12543 return 1;
12544
12545 if (TREE_CODE (type) != INTEGER_TYPE)
12546 return 0;
12547
12548 switch (TREE_CODE (top))
12549 {
12550 case BIT_AND_EXPR:
12551 /* Bitwise and provides a power of two multiple. If the mask is
12552 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
12553 if (!integer_pow2p (bottom))
12554 return 0;
12555 /* FALLTHRU */
12556
12557 case MULT_EXPR:
12558 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12559 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
12560
12561 case MINUS_EXPR:
12562 /* It is impossible to prove if op0 - op1 is multiple of bottom
12563 precisely, so be conservative here checking if both op0 and op1
12564 are multiple of bottom. Note we check the second operand first
12565 since it's usually simpler. */
12566 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12567 && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
12568
12569 case PLUS_EXPR:
12570 /* The same as MINUS_EXPR, but handle cases like op0 + 0xfffffffd
12571 as op0 - 3 if the expression has unsigned type. For example,
12572 (X / 3) + 0xfffffffd is multiple of 3, but 0xfffffffd is not. */
12573 op1 = TREE_OPERAND (top, 1);
12574 if (TYPE_UNSIGNED (type)
12575 && TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sign_bit (op1))
12576 op1 = fold_build1 (NEGATE_EXPR, type, op1);
12577 return (multiple_of_p (type, op1, bottom)
12578 && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
12579
12580 case LSHIFT_EXPR:
12581 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
12582 {
12583 op1 = TREE_OPERAND (top, 1);
12584 /* const_binop may not detect overflow correctly,
12585 so check for it explicitly here. */
12586 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
12587 && 0 != (t1 = fold_convert (type,
12588 const_binop (LSHIFT_EXPR,
12589 size_one_node,
12590 op1)))
12591 && !TREE_OVERFLOW (t1))
12592 return multiple_of_p (type, t1, bottom);
12593 }
12594 return 0;
12595
12596 case NOP_EXPR:
12597 /* Can't handle conversions from non-integral or wider integral type. */
12598 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
12599 || (TYPE_PRECISION (type)
12600 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
12601 return 0;
12602
12603 /* fall through */
12604
12605 case SAVE_EXPR:
12606 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
12607
12608 case COND_EXPR:
12609 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12610 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
12611
12612 case INTEGER_CST:
12613 if (TREE_CODE (bottom) != INTEGER_CST
12614 || integer_zerop (bottom)
12615 || (TYPE_UNSIGNED (type)
12616 && (tree_int_cst_sgn (top) < 0
12617 || tree_int_cst_sgn (bottom) < 0)))
12618 return 0;
12619 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
12620 SIGNED);
12621
12622 case SSA_NAME:
12623 if (TREE_CODE (bottom) == INTEGER_CST
12624 && (stmt = SSA_NAME_DEF_STMT (top)) != NULL
12625 && gimple_code (stmt) == GIMPLE_ASSIGN)
12626 {
12627 enum tree_code code = gimple_assign_rhs_code (stmt);
12628
12629 /* Check for special cases to see if top is defined as multiple
12630 of bottom:
12631
12632 top = (X & ~(bottom - 1) ; bottom is power of 2
12633
12634 or
12635
12636 Y = X % bottom
12637 top = X - Y. */
12638 if (code == BIT_AND_EXPR
12639 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
12640 && TREE_CODE (op2) == INTEGER_CST
12641 && integer_pow2p (bottom)
12642 && wi::multiple_of_p (wi::to_widest (op2),
12643 wi::to_widest (bottom), UNSIGNED))
12644 return 1;
12645
12646 op1 = gimple_assign_rhs1 (stmt);
12647 if (code == MINUS_EXPR
12648 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
12649 && TREE_CODE (op2) == SSA_NAME
12650 && (stmt = SSA_NAME_DEF_STMT (op2)) != NULL
12651 && gimple_code (stmt) == GIMPLE_ASSIGN
12652 && (code = gimple_assign_rhs_code (stmt)) == TRUNC_MOD_EXPR
12653 && operand_equal_p (op1, gimple_assign_rhs1 (stmt), 0)
12654 && operand_equal_p (bottom, gimple_assign_rhs2 (stmt), 0))
12655 return 1;
12656 }
12657
12658 /* fall through */
12659
12660 default:
12661 return 0;
12662 }
12663 }
12664
12665 #define tree_expr_nonnegative_warnv_p(X, Y) \
12666 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
12667
12668 #define RECURSE(X) \
12669 ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
12670
12671 /* Return true if CODE or TYPE is known to be non-negative. */
12672
12673 static bool
12674 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
12675 {
12676 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
12677 && truth_value_p (code))
12678 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
12679 have a signed:1 type (where the value is -1 and 0). */
12680 return true;
12681 return false;
12682 }
12683
12684 /* Return true if (CODE OP0) is known to be non-negative. If the return
12685 value is based on the assumption that signed overflow is undefined,
12686 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12687 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12688
12689 bool
12690 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12691 bool *strict_overflow_p, int depth)
12692 {
12693 if (TYPE_UNSIGNED (type))
12694 return true;
12695
12696 switch (code)
12697 {
12698 case ABS_EXPR:
12699 /* We can't return 1 if flag_wrapv is set because
12700 ABS_EXPR<INT_MIN> = INT_MIN. */
12701 if (!ANY_INTEGRAL_TYPE_P (type))
12702 return true;
12703 if (TYPE_OVERFLOW_UNDEFINED (type))
12704 {
12705 *strict_overflow_p = true;
12706 return true;
12707 }
12708 break;
12709
12710 case NON_LVALUE_EXPR:
12711 case FLOAT_EXPR:
12712 case FIX_TRUNC_EXPR:
12713 return RECURSE (op0);
12714
12715 CASE_CONVERT:
12716 {
12717 tree inner_type = TREE_TYPE (op0);
12718 tree outer_type = type;
12719
12720 if (TREE_CODE (outer_type) == REAL_TYPE)
12721 {
12722 if (TREE_CODE (inner_type) == REAL_TYPE)
12723 return RECURSE (op0);
12724 if (INTEGRAL_TYPE_P (inner_type))
12725 {
12726 if (TYPE_UNSIGNED (inner_type))
12727 return true;
12728 return RECURSE (op0);
12729 }
12730 }
12731 else if (INTEGRAL_TYPE_P (outer_type))
12732 {
12733 if (TREE_CODE (inner_type) == REAL_TYPE)
12734 return RECURSE (op0);
12735 if (INTEGRAL_TYPE_P (inner_type))
12736 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
12737 && TYPE_UNSIGNED (inner_type);
12738 }
12739 }
12740 break;
12741
12742 default:
12743 return tree_simple_nonnegative_warnv_p (code, type);
12744 }
12745
12746 /* We don't know sign of `t', so be conservative and return false. */
12747 return false;
12748 }
12749
12750 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
12751 value is based on the assumption that signed overflow is undefined,
12752 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12753 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12754
12755 bool
12756 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12757 tree op1, bool *strict_overflow_p,
12758 int depth)
12759 {
12760 if (TYPE_UNSIGNED (type))
12761 return true;
12762
12763 switch (code)
12764 {
12765 case POINTER_PLUS_EXPR:
12766 case PLUS_EXPR:
12767 if (FLOAT_TYPE_P (type))
12768 return RECURSE (op0) && RECURSE (op1);
12769
12770 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12771 both unsigned and at least 2 bits shorter than the result. */
12772 if (TREE_CODE (type) == INTEGER_TYPE
12773 && TREE_CODE (op0) == NOP_EXPR
12774 && TREE_CODE (op1) == NOP_EXPR)
12775 {
12776 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
12777 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
12778 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12779 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12780 {
12781 unsigned int prec = MAX (TYPE_PRECISION (inner1),
12782 TYPE_PRECISION (inner2)) + 1;
12783 return prec < TYPE_PRECISION (type);
12784 }
12785 }
12786 break;
12787
12788 case MULT_EXPR:
12789 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12790 {
12791 /* x * x is always non-negative for floating point x
12792 or without overflow. */
12793 if (operand_equal_p (op0, op1, 0)
12794 || (RECURSE (op0) && RECURSE (op1)))
12795 {
12796 if (ANY_INTEGRAL_TYPE_P (type)
12797 && TYPE_OVERFLOW_UNDEFINED (type))
12798 *strict_overflow_p = true;
12799 return true;
12800 }
12801 }
12802
12803 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12804 both unsigned and their total bits is shorter than the result. */
12805 if (TREE_CODE (type) == INTEGER_TYPE
12806 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
12807 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
12808 {
12809 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
12810 ? TREE_TYPE (TREE_OPERAND (op0, 0))
12811 : TREE_TYPE (op0);
12812 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
12813 ? TREE_TYPE (TREE_OPERAND (op1, 0))
12814 : TREE_TYPE (op1);
12815
12816 bool unsigned0 = TYPE_UNSIGNED (inner0);
12817 bool unsigned1 = TYPE_UNSIGNED (inner1);
12818
12819 if (TREE_CODE (op0) == INTEGER_CST)
12820 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
12821
12822 if (TREE_CODE (op1) == INTEGER_CST)
12823 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
12824
12825 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
12826 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
12827 {
12828 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
12829 ? tree_int_cst_min_precision (op0, UNSIGNED)
12830 : TYPE_PRECISION (inner0);
12831
12832 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
12833 ? tree_int_cst_min_precision (op1, UNSIGNED)
12834 : TYPE_PRECISION (inner1);
12835
12836 return precision0 + precision1 < TYPE_PRECISION (type);
12837 }
12838 }
12839 return false;
12840
12841 case BIT_AND_EXPR:
12842 case MAX_EXPR:
12843 return RECURSE (op0) || RECURSE (op1);
12844
12845 case BIT_IOR_EXPR:
12846 case BIT_XOR_EXPR:
12847 case MIN_EXPR:
12848 case RDIV_EXPR:
12849 case TRUNC_DIV_EXPR:
12850 case CEIL_DIV_EXPR:
12851 case FLOOR_DIV_EXPR:
12852 case ROUND_DIV_EXPR:
12853 return RECURSE (op0) && RECURSE (op1);
12854
12855 case TRUNC_MOD_EXPR:
12856 return RECURSE (op0);
12857
12858 case FLOOR_MOD_EXPR:
12859 return RECURSE (op1);
12860
12861 case CEIL_MOD_EXPR:
12862 case ROUND_MOD_EXPR:
12863 default:
12864 return tree_simple_nonnegative_warnv_p (code, type);
12865 }
12866
12867 /* We don't know sign of `t', so be conservative and return false. */
12868 return false;
12869 }
12870
12871 /* Return true if T is known to be non-negative. If the return
12872 value is based on the assumption that signed overflow is undefined,
12873 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12874 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12875
12876 bool
12877 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
12878 {
12879 if (TYPE_UNSIGNED (TREE_TYPE (t)))
12880 return true;
12881
12882 switch (TREE_CODE (t))
12883 {
12884 case INTEGER_CST:
12885 return tree_int_cst_sgn (t) >= 0;
12886
12887 case REAL_CST:
12888 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
12889
12890 case FIXED_CST:
12891 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
12892
12893 case COND_EXPR:
12894 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
12895
12896 case SSA_NAME:
12897 /* Limit the depth of recursion to avoid quadratic behavior.
12898 This is expected to catch almost all occurrences in practice.
12899 If this code misses important cases that unbounded recursion
12900 would not, passes that need this information could be revised
12901 to provide it through dataflow propagation. */
12902 return (!name_registered_for_update_p (t)
12903 && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
12904 && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
12905 strict_overflow_p, depth));
12906
12907 default:
12908 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
12909 }
12910 }
12911
12912 /* Return true if T is known to be non-negative. If the return
12913 value is based on the assumption that signed overflow is undefined,
12914 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12915 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12916
12917 bool
12918 tree_call_nonnegative_warnv_p (tree type, combined_fn fn, tree arg0, tree arg1,
12919 bool *strict_overflow_p, int depth)
12920 {
12921 switch (fn)
12922 {
12923 CASE_CFN_ACOS:
12924 CASE_CFN_ACOSH:
12925 CASE_CFN_CABS:
12926 CASE_CFN_COSH:
12927 CASE_CFN_ERFC:
12928 CASE_CFN_EXP:
12929 CASE_CFN_EXP10:
12930 CASE_CFN_EXP2:
12931 CASE_CFN_FABS:
12932 CASE_CFN_FDIM:
12933 CASE_CFN_HYPOT:
12934 CASE_CFN_POW10:
12935 CASE_CFN_FFS:
12936 CASE_CFN_PARITY:
12937 CASE_CFN_POPCOUNT:
12938 CASE_CFN_CLZ:
12939 CASE_CFN_CLRSB:
12940 case CFN_BUILT_IN_BSWAP32:
12941 case CFN_BUILT_IN_BSWAP64:
12942 /* Always true. */
12943 return true;
12944
12945 CASE_CFN_SQRT:
12946 /* sqrt(-0.0) is -0.0. */
12947 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
12948 return true;
12949 return RECURSE (arg0);
12950
12951 CASE_CFN_ASINH:
12952 CASE_CFN_ATAN:
12953 CASE_CFN_ATANH:
12954 CASE_CFN_CBRT:
12955 CASE_CFN_CEIL:
12956 CASE_CFN_ERF:
12957 CASE_CFN_EXPM1:
12958 CASE_CFN_FLOOR:
12959 CASE_CFN_FMOD:
12960 CASE_CFN_FREXP:
12961 CASE_CFN_ICEIL:
12962 CASE_CFN_IFLOOR:
12963 CASE_CFN_IRINT:
12964 CASE_CFN_IROUND:
12965 CASE_CFN_LCEIL:
12966 CASE_CFN_LDEXP:
12967 CASE_CFN_LFLOOR:
12968 CASE_CFN_LLCEIL:
12969 CASE_CFN_LLFLOOR:
12970 CASE_CFN_LLRINT:
12971 CASE_CFN_LLROUND:
12972 CASE_CFN_LRINT:
12973 CASE_CFN_LROUND:
12974 CASE_CFN_MODF:
12975 CASE_CFN_NEARBYINT:
12976 CASE_CFN_RINT:
12977 CASE_CFN_ROUND:
12978 CASE_CFN_SCALB:
12979 CASE_CFN_SCALBLN:
12980 CASE_CFN_SCALBN:
12981 CASE_CFN_SIGNBIT:
12982 CASE_CFN_SIGNIFICAND:
12983 CASE_CFN_SINH:
12984 CASE_CFN_TANH:
12985 CASE_CFN_TRUNC:
12986 /* True if the 1st argument is nonnegative. */
12987 return RECURSE (arg0);
12988
12989 CASE_CFN_FMAX:
12990 /* True if the 1st OR 2nd arguments are nonnegative. */
12991 return RECURSE (arg0) || RECURSE (arg1);
12992
12993 CASE_CFN_FMIN:
12994 /* True if the 1st AND 2nd arguments are nonnegative. */
12995 return RECURSE (arg0) && RECURSE (arg1);
12996
12997 CASE_CFN_COPYSIGN:
12998 /* True if the 2nd argument is nonnegative. */
12999 return RECURSE (arg1);
13000
13001 CASE_CFN_POWI:
13002 /* True if the 1st argument is nonnegative or the second
13003 argument is an even integer. */
13004 if (TREE_CODE (arg1) == INTEGER_CST
13005 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
13006 return true;
13007 return RECURSE (arg0);
13008
13009 CASE_CFN_POW:
13010 /* True if the 1st argument is nonnegative or the second
13011 argument is an even integer valued real. */
13012 if (TREE_CODE (arg1) == REAL_CST)
13013 {
13014 REAL_VALUE_TYPE c;
13015 HOST_WIDE_INT n;
13016
13017 c = TREE_REAL_CST (arg1);
13018 n = real_to_integer (&c);
13019 if ((n & 1) == 0)
13020 {
13021 REAL_VALUE_TYPE cint;
13022 real_from_integer (&cint, VOIDmode, n, SIGNED);
13023 if (real_identical (&c, &cint))
13024 return true;
13025 }
13026 }
13027 return RECURSE (arg0);
13028
13029 default:
13030 break;
13031 }
13032 return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
13033 }
13034
13035 /* Return true if T is known to be non-negative. If the return
13036 value is based on the assumption that signed overflow is undefined,
13037 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13038 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13039
13040 static bool
13041 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13042 {
13043 enum tree_code code = TREE_CODE (t);
13044 if (TYPE_UNSIGNED (TREE_TYPE (t)))
13045 return true;
13046
13047 switch (code)
13048 {
13049 case TARGET_EXPR:
13050 {
13051 tree temp = TARGET_EXPR_SLOT (t);
13052 t = TARGET_EXPR_INITIAL (t);
13053
13054 /* If the initializer is non-void, then it's a normal expression
13055 that will be assigned to the slot. */
13056 if (!VOID_TYPE_P (t))
13057 return RECURSE (t);
13058
13059 /* Otherwise, the initializer sets the slot in some way. One common
13060 way is an assignment statement at the end of the initializer. */
13061 while (1)
13062 {
13063 if (TREE_CODE (t) == BIND_EXPR)
13064 t = expr_last (BIND_EXPR_BODY (t));
13065 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
13066 || TREE_CODE (t) == TRY_CATCH_EXPR)
13067 t = expr_last (TREE_OPERAND (t, 0));
13068 else if (TREE_CODE (t) == STATEMENT_LIST)
13069 t = expr_last (t);
13070 else
13071 break;
13072 }
13073 if (TREE_CODE (t) == MODIFY_EXPR
13074 && TREE_OPERAND (t, 0) == temp)
13075 return RECURSE (TREE_OPERAND (t, 1));
13076
13077 return false;
13078 }
13079
13080 case CALL_EXPR:
13081 {
13082 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
13083 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
13084
13085 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
13086 get_call_combined_fn (t),
13087 arg0,
13088 arg1,
13089 strict_overflow_p, depth);
13090 }
13091 case COMPOUND_EXPR:
13092 case MODIFY_EXPR:
13093 return RECURSE (TREE_OPERAND (t, 1));
13094
13095 case BIND_EXPR:
13096 return RECURSE (expr_last (TREE_OPERAND (t, 1)));
13097
13098 case SAVE_EXPR:
13099 return RECURSE (TREE_OPERAND (t, 0));
13100
13101 default:
13102 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
13103 }
13104 }
13105
13106 #undef RECURSE
13107 #undef tree_expr_nonnegative_warnv_p
13108
13109 /* Return true if T is known to be non-negative. If the return
13110 value is based on the assumption that signed overflow is undefined,
13111 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13112 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13113
13114 bool
13115 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13116 {
13117 enum tree_code code;
13118 if (t == error_mark_node)
13119 return false;
13120
13121 code = TREE_CODE (t);
13122 switch (TREE_CODE_CLASS (code))
13123 {
13124 case tcc_binary:
13125 case tcc_comparison:
13126 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13127 TREE_TYPE (t),
13128 TREE_OPERAND (t, 0),
13129 TREE_OPERAND (t, 1),
13130 strict_overflow_p, depth);
13131
13132 case tcc_unary:
13133 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13134 TREE_TYPE (t),
13135 TREE_OPERAND (t, 0),
13136 strict_overflow_p, depth);
13137
13138 case tcc_constant:
13139 case tcc_declaration:
13140 case tcc_reference:
13141 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13142
13143 default:
13144 break;
13145 }
13146
13147 switch (code)
13148 {
13149 case TRUTH_AND_EXPR:
13150 case TRUTH_OR_EXPR:
13151 case TRUTH_XOR_EXPR:
13152 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13153 TREE_TYPE (t),
13154 TREE_OPERAND (t, 0),
13155 TREE_OPERAND (t, 1),
13156 strict_overflow_p, depth);
13157 case TRUTH_NOT_EXPR:
13158 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13159 TREE_TYPE (t),
13160 TREE_OPERAND (t, 0),
13161 strict_overflow_p, depth);
13162
13163 case COND_EXPR:
13164 case CONSTRUCTOR:
13165 case OBJ_TYPE_REF:
13166 case ASSERT_EXPR:
13167 case ADDR_EXPR:
13168 case WITH_SIZE_EXPR:
13169 case SSA_NAME:
13170 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13171
13172 default:
13173 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
13174 }
13175 }
13176
13177 /* Return true if `t' is known to be non-negative. Handle warnings
13178 about undefined signed overflow. */
13179
13180 bool
13181 tree_expr_nonnegative_p (tree t)
13182 {
13183 bool ret, strict_overflow_p;
13184
13185 strict_overflow_p = false;
13186 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
13187 if (strict_overflow_p)
13188 fold_overflow_warning (("assuming signed overflow does not occur when "
13189 "determining that expression is always "
13190 "non-negative"),
13191 WARN_STRICT_OVERFLOW_MISC);
13192 return ret;
13193 }
13194
13195
13196 /* Return true when (CODE OP0) is an address and is known to be nonzero.
13197 For floating point we further ensure that T is not denormal.
13198 Similar logic is present in nonzero_address in rtlanal.h.
13199
13200 If the return value is based on the assumption that signed overflow
13201 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13202 change *STRICT_OVERFLOW_P. */
13203
13204 bool
13205 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
13206 bool *strict_overflow_p)
13207 {
13208 switch (code)
13209 {
13210 case ABS_EXPR:
13211 return tree_expr_nonzero_warnv_p (op0,
13212 strict_overflow_p);
13213
13214 case NOP_EXPR:
13215 {
13216 tree inner_type = TREE_TYPE (op0);
13217 tree outer_type = type;
13218
13219 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
13220 && tree_expr_nonzero_warnv_p (op0,
13221 strict_overflow_p));
13222 }
13223 break;
13224
13225 case NON_LVALUE_EXPR:
13226 return tree_expr_nonzero_warnv_p (op0,
13227 strict_overflow_p);
13228
13229 default:
13230 break;
13231 }
13232
13233 return false;
13234 }
13235
13236 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
13237 For floating point we further ensure that T is not denormal.
13238 Similar logic is present in nonzero_address in rtlanal.h.
13239
13240 If the return value is based on the assumption that signed overflow
13241 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13242 change *STRICT_OVERFLOW_P. */
13243
13244 bool
13245 tree_binary_nonzero_warnv_p (enum tree_code code,
13246 tree type,
13247 tree op0,
13248 tree op1, bool *strict_overflow_p)
13249 {
13250 bool sub_strict_overflow_p;
13251 switch (code)
13252 {
13253 case POINTER_PLUS_EXPR:
13254 case PLUS_EXPR:
13255 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
13256 {
13257 /* With the presence of negative values it is hard
13258 to say something. */
13259 sub_strict_overflow_p = false;
13260 if (!tree_expr_nonnegative_warnv_p (op0,
13261 &sub_strict_overflow_p)
13262 || !tree_expr_nonnegative_warnv_p (op1,
13263 &sub_strict_overflow_p))
13264 return false;
13265 /* One of operands must be positive and the other non-negative. */
13266 /* We don't set *STRICT_OVERFLOW_P here: even if this value
13267 overflows, on a twos-complement machine the sum of two
13268 nonnegative numbers can never be zero. */
13269 return (tree_expr_nonzero_warnv_p (op0,
13270 strict_overflow_p)
13271 || tree_expr_nonzero_warnv_p (op1,
13272 strict_overflow_p));
13273 }
13274 break;
13275
13276 case MULT_EXPR:
13277 if (TYPE_OVERFLOW_UNDEFINED (type))
13278 {
13279 if (tree_expr_nonzero_warnv_p (op0,
13280 strict_overflow_p)
13281 && tree_expr_nonzero_warnv_p (op1,
13282 strict_overflow_p))
13283 {
13284 *strict_overflow_p = true;
13285 return true;
13286 }
13287 }
13288 break;
13289
13290 case MIN_EXPR:
13291 sub_strict_overflow_p = false;
13292 if (tree_expr_nonzero_warnv_p (op0,
13293 &sub_strict_overflow_p)
13294 && tree_expr_nonzero_warnv_p (op1,
13295 &sub_strict_overflow_p))
13296 {
13297 if (sub_strict_overflow_p)
13298 *strict_overflow_p = true;
13299 }
13300 break;
13301
13302 case MAX_EXPR:
13303 sub_strict_overflow_p = false;
13304 if (tree_expr_nonzero_warnv_p (op0,
13305 &sub_strict_overflow_p))
13306 {
13307 if (sub_strict_overflow_p)
13308 *strict_overflow_p = true;
13309
13310 /* When both operands are nonzero, then MAX must be too. */
13311 if (tree_expr_nonzero_warnv_p (op1,
13312 strict_overflow_p))
13313 return true;
13314
13315 /* MAX where operand 0 is positive is positive. */
13316 return tree_expr_nonnegative_warnv_p (op0,
13317 strict_overflow_p);
13318 }
13319 /* MAX where operand 1 is positive is positive. */
13320 else if (tree_expr_nonzero_warnv_p (op1,
13321 &sub_strict_overflow_p)
13322 && tree_expr_nonnegative_warnv_p (op1,
13323 &sub_strict_overflow_p))
13324 {
13325 if (sub_strict_overflow_p)
13326 *strict_overflow_p = true;
13327 return true;
13328 }
13329 break;
13330
13331 case BIT_IOR_EXPR:
13332 return (tree_expr_nonzero_warnv_p (op1,
13333 strict_overflow_p)
13334 || tree_expr_nonzero_warnv_p (op0,
13335 strict_overflow_p));
13336
13337 default:
13338 break;
13339 }
13340
13341 return false;
13342 }
13343
13344 /* Return true when T is an address and is known to be nonzero.
13345 For floating point we further ensure that T is not denormal.
13346 Similar logic is present in nonzero_address in rtlanal.h.
13347
13348 If the return value is based on the assumption that signed overflow
13349 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13350 change *STRICT_OVERFLOW_P. */
13351
13352 bool
13353 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
13354 {
13355 bool sub_strict_overflow_p;
13356 switch (TREE_CODE (t))
13357 {
13358 case INTEGER_CST:
13359 return !integer_zerop (t);
13360
13361 case ADDR_EXPR:
13362 {
13363 tree base = TREE_OPERAND (t, 0);
13364
13365 if (!DECL_P (base))
13366 base = get_base_address (base);
13367
13368 if (base && TREE_CODE (base) == TARGET_EXPR)
13369 base = TARGET_EXPR_SLOT (base);
13370
13371 if (!base)
13372 return false;
13373
13374 /* For objects in symbol table check if we know they are non-zero.
13375 Don't do anything for variables and functions before symtab is built;
13376 it is quite possible that they will be declared weak later. */
13377 int nonzero_addr = maybe_nonzero_address (base);
13378 if (nonzero_addr >= 0)
13379 return nonzero_addr;
13380
13381 /* Constants are never weak. */
13382 if (CONSTANT_CLASS_P (base))
13383 return true;
13384
13385 return false;
13386 }
13387
13388 case COND_EXPR:
13389 sub_strict_overflow_p = false;
13390 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13391 &sub_strict_overflow_p)
13392 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
13393 &sub_strict_overflow_p))
13394 {
13395 if (sub_strict_overflow_p)
13396 *strict_overflow_p = true;
13397 return true;
13398 }
13399 break;
13400
13401 case SSA_NAME:
13402 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
13403 break;
13404 return expr_not_equal_to (t, wi::zero (TYPE_PRECISION (TREE_TYPE (t))));
13405
13406 default:
13407 break;
13408 }
13409 return false;
13410 }
13411
13412 #define integer_valued_real_p(X) \
13413 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
13414
13415 #define RECURSE(X) \
13416 ((integer_valued_real_p) (X, depth + 1))
13417
13418 /* Return true if the floating point result of (CODE OP0) has an
13419 integer value. We also allow +Inf, -Inf and NaN to be considered
13420 integer values. Return false for signaling NaN.
13421
13422 DEPTH is the current nesting depth of the query. */
13423
13424 bool
13425 integer_valued_real_unary_p (tree_code code, tree op0, int depth)
13426 {
13427 switch (code)
13428 {
13429 case FLOAT_EXPR:
13430 return true;
13431
13432 case ABS_EXPR:
13433 return RECURSE (op0);
13434
13435 CASE_CONVERT:
13436 {
13437 tree type = TREE_TYPE (op0);
13438 if (TREE_CODE (type) == INTEGER_TYPE)
13439 return true;
13440 if (TREE_CODE (type) == REAL_TYPE)
13441 return RECURSE (op0);
13442 break;
13443 }
13444
13445 default:
13446 break;
13447 }
13448 return false;
13449 }
13450
13451 /* Return true if the floating point result of (CODE OP0 OP1) has an
13452 integer value. We also allow +Inf, -Inf and NaN to be considered
13453 integer values. Return false for signaling NaN.
13454
13455 DEPTH is the current nesting depth of the query. */
13456
13457 bool
13458 integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth)
13459 {
13460 switch (code)
13461 {
13462 case PLUS_EXPR:
13463 case MINUS_EXPR:
13464 case MULT_EXPR:
13465 case MIN_EXPR:
13466 case MAX_EXPR:
13467 return RECURSE (op0) && RECURSE (op1);
13468
13469 default:
13470 break;
13471 }
13472 return false;
13473 }
13474
13475 /* Return true if the floating point result of calling FNDECL with arguments
13476 ARG0 and ARG1 has an integer value. We also allow +Inf, -Inf and NaN to be
13477 considered integer values. Return false for signaling NaN. If FNDECL
13478 takes fewer than 2 arguments, the remaining ARGn are null.
13479
13480 DEPTH is the current nesting depth of the query. */
13481
13482 bool
13483 integer_valued_real_call_p (combined_fn fn, tree arg0, tree arg1, int depth)
13484 {
13485 switch (fn)
13486 {
13487 CASE_CFN_CEIL:
13488 CASE_CFN_FLOOR:
13489 CASE_CFN_NEARBYINT:
13490 CASE_CFN_RINT:
13491 CASE_CFN_ROUND:
13492 CASE_CFN_TRUNC:
13493 return true;
13494
13495 CASE_CFN_FMIN:
13496 CASE_CFN_FMAX:
13497 return RECURSE (arg0) && RECURSE (arg1);
13498
13499 default:
13500 break;
13501 }
13502 return false;
13503 }
13504
13505 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
13506 has an integer value. We also allow +Inf, -Inf and NaN to be
13507 considered integer values. Return false for signaling NaN.
13508
13509 DEPTH is the current nesting depth of the query. */
13510
13511 bool
13512 integer_valued_real_single_p (tree t, int depth)
13513 {
13514 switch (TREE_CODE (t))
13515 {
13516 case REAL_CST:
13517 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
13518
13519 case COND_EXPR:
13520 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
13521
13522 case SSA_NAME:
13523 /* Limit the depth of recursion to avoid quadratic behavior.
13524 This is expected to catch almost all occurrences in practice.
13525 If this code misses important cases that unbounded recursion
13526 would not, passes that need this information could be revised
13527 to provide it through dataflow propagation. */
13528 return (!name_registered_for_update_p (t)
13529 && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
13530 && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t),
13531 depth));
13532
13533 default:
13534 break;
13535 }
13536 return false;
13537 }
13538
13539 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
13540 has an integer value. We also allow +Inf, -Inf and NaN to be
13541 considered integer values. Return false for signaling NaN.
13542
13543 DEPTH is the current nesting depth of the query. */
13544
13545 static bool
13546 integer_valued_real_invalid_p (tree t, int depth)
13547 {
13548 switch (TREE_CODE (t))
13549 {
13550 case COMPOUND_EXPR:
13551 case MODIFY_EXPR:
13552 case BIND_EXPR:
13553 return RECURSE (TREE_OPERAND (t, 1));
13554
13555 case SAVE_EXPR:
13556 return RECURSE (TREE_OPERAND (t, 0));
13557
13558 default:
13559 break;
13560 }
13561 return false;
13562 }
13563
13564 #undef RECURSE
13565 #undef integer_valued_real_p
13566
13567 /* Return true if the floating point expression T has an integer value.
13568 We also allow +Inf, -Inf and NaN to be considered integer values.
13569 Return false for signaling NaN.
13570
13571 DEPTH is the current nesting depth of the query. */
13572
13573 bool
13574 integer_valued_real_p (tree t, int depth)
13575 {
13576 if (t == error_mark_node)
13577 return false;
13578
13579 tree_code code = TREE_CODE (t);
13580 switch (TREE_CODE_CLASS (code))
13581 {
13582 case tcc_binary:
13583 case tcc_comparison:
13584 return integer_valued_real_binary_p (code, TREE_OPERAND (t, 0),
13585 TREE_OPERAND (t, 1), depth);
13586
13587 case tcc_unary:
13588 return integer_valued_real_unary_p (code, TREE_OPERAND (t, 0), depth);
13589
13590 case tcc_constant:
13591 case tcc_declaration:
13592 case tcc_reference:
13593 return integer_valued_real_single_p (t, depth);
13594
13595 default:
13596 break;
13597 }
13598
13599 switch (code)
13600 {
13601 case COND_EXPR:
13602 case SSA_NAME:
13603 return integer_valued_real_single_p (t, depth);
13604
13605 case CALL_EXPR:
13606 {
13607 tree arg0 = (call_expr_nargs (t) > 0
13608 ? CALL_EXPR_ARG (t, 0)
13609 : NULL_TREE);
13610 tree arg1 = (call_expr_nargs (t) > 1
13611 ? CALL_EXPR_ARG (t, 1)
13612 : NULL_TREE);
13613 return integer_valued_real_call_p (get_call_combined_fn (t),
13614 arg0, arg1, depth);
13615 }
13616
13617 default:
13618 return integer_valued_real_invalid_p (t, depth);
13619 }
13620 }
13621
13622 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
13623 attempt to fold the expression to a constant without modifying TYPE,
13624 OP0 or OP1.
13625
13626 If the expression could be simplified to a constant, then return
13627 the constant. If the expression would not be simplified to a
13628 constant, then return NULL_TREE. */
13629
13630 tree
13631 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
13632 {
13633 tree tem = fold_binary (code, type, op0, op1);
13634 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13635 }
13636
13637 /* Given the components of a unary expression CODE, TYPE and OP0,
13638 attempt to fold the expression to a constant without modifying
13639 TYPE or OP0.
13640
13641 If the expression could be simplified to a constant, then return
13642 the constant. If the expression would not be simplified to a
13643 constant, then return NULL_TREE. */
13644
13645 tree
13646 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
13647 {
13648 tree tem = fold_unary (code, type, op0);
13649 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13650 }
13651
13652 /* If EXP represents referencing an element in a constant string
13653 (either via pointer arithmetic or array indexing), return the
13654 tree representing the value accessed, otherwise return NULL. */
13655
13656 tree
13657 fold_read_from_constant_string (tree exp)
13658 {
13659 if ((TREE_CODE (exp) == INDIRECT_REF
13660 || TREE_CODE (exp) == ARRAY_REF)
13661 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
13662 {
13663 tree exp1 = TREE_OPERAND (exp, 0);
13664 tree index;
13665 tree string;
13666 location_t loc = EXPR_LOCATION (exp);
13667
13668 if (TREE_CODE (exp) == INDIRECT_REF)
13669 string = string_constant (exp1, &index);
13670 else
13671 {
13672 tree low_bound = array_ref_low_bound (exp);
13673 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
13674
13675 /* Optimize the special-case of a zero lower bound.
13676
13677 We convert the low_bound to sizetype to avoid some problems
13678 with constant folding. (E.g. suppose the lower bound is 1,
13679 and its mode is QI. Without the conversion,l (ARRAY
13680 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
13681 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
13682 if (! integer_zerop (low_bound))
13683 index = size_diffop_loc (loc, index,
13684 fold_convert_loc (loc, sizetype, low_bound));
13685
13686 string = exp1;
13687 }
13688
13689 if (string
13690 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
13691 && TREE_CODE (string) == STRING_CST
13692 && TREE_CODE (index) == INTEGER_CST
13693 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
13694 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
13695 == MODE_INT)
13696 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
13697 return build_int_cst_type (TREE_TYPE (exp),
13698 (TREE_STRING_POINTER (string)
13699 [TREE_INT_CST_LOW (index)]));
13700 }
13701 return NULL;
13702 }
13703
13704 /* Return the tree for neg (ARG0) when ARG0 is known to be either
13705 an integer constant, real, or fixed-point constant.
13706
13707 TYPE is the type of the result. */
13708
13709 static tree
13710 fold_negate_const (tree arg0, tree type)
13711 {
13712 tree t = NULL_TREE;
13713
13714 switch (TREE_CODE (arg0))
13715 {
13716 case INTEGER_CST:
13717 {
13718 bool overflow;
13719 wide_int val = wi::neg (arg0, &overflow);
13720 t = force_fit_type (type, val, 1,
13721 (overflow && ! TYPE_UNSIGNED (type))
13722 || TREE_OVERFLOW (arg0));
13723 break;
13724 }
13725
13726 case REAL_CST:
13727 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13728 break;
13729
13730 case FIXED_CST:
13731 {
13732 FIXED_VALUE_TYPE f;
13733 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
13734 &(TREE_FIXED_CST (arg0)), NULL,
13735 TYPE_SATURATING (type));
13736 t = build_fixed (type, f);
13737 /* Propagate overflow flags. */
13738 if (overflow_p | TREE_OVERFLOW (arg0))
13739 TREE_OVERFLOW (t) = 1;
13740 break;
13741 }
13742
13743 default:
13744 gcc_unreachable ();
13745 }
13746
13747 return t;
13748 }
13749
13750 /* Return the tree for abs (ARG0) when ARG0 is known to be either
13751 an integer constant or real constant.
13752
13753 TYPE is the type of the result. */
13754
13755 tree
13756 fold_abs_const (tree arg0, tree type)
13757 {
13758 tree t = NULL_TREE;
13759
13760 switch (TREE_CODE (arg0))
13761 {
13762 case INTEGER_CST:
13763 {
13764 /* If the value is unsigned or non-negative, then the absolute value
13765 is the same as the ordinary value. */
13766 if (!wi::neg_p (arg0, TYPE_SIGN (type)))
13767 t = arg0;
13768
13769 /* If the value is negative, then the absolute value is
13770 its negation. */
13771 else
13772 {
13773 bool overflow;
13774 wide_int val = wi::neg (arg0, &overflow);
13775 t = force_fit_type (type, val, -1,
13776 overflow | TREE_OVERFLOW (arg0));
13777 }
13778 }
13779 break;
13780
13781 case REAL_CST:
13782 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
13783 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13784 else
13785 t = arg0;
13786 break;
13787
13788 default:
13789 gcc_unreachable ();
13790 }
13791
13792 return t;
13793 }
13794
13795 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13796 constant. TYPE is the type of the result. */
13797
13798 static tree
13799 fold_not_const (const_tree arg0, tree type)
13800 {
13801 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
13802
13803 return force_fit_type (type, wi::bit_not (arg0), 0, TREE_OVERFLOW (arg0));
13804 }
13805
13806 /* Given CODE, a relational operator, the target type, TYPE and two
13807 constant operands OP0 and OP1, return the result of the
13808 relational operation. If the result is not a compile time
13809 constant, then return NULL_TREE. */
13810
13811 static tree
13812 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
13813 {
13814 int result, invert;
13815
13816 /* From here on, the only cases we handle are when the result is
13817 known to be a constant. */
13818
13819 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
13820 {
13821 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
13822 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
13823
13824 /* Handle the cases where either operand is a NaN. */
13825 if (real_isnan (c0) || real_isnan (c1))
13826 {
13827 switch (code)
13828 {
13829 case EQ_EXPR:
13830 case ORDERED_EXPR:
13831 result = 0;
13832 break;
13833
13834 case NE_EXPR:
13835 case UNORDERED_EXPR:
13836 case UNLT_EXPR:
13837 case UNLE_EXPR:
13838 case UNGT_EXPR:
13839 case UNGE_EXPR:
13840 case UNEQ_EXPR:
13841 result = 1;
13842 break;
13843
13844 case LT_EXPR:
13845 case LE_EXPR:
13846 case GT_EXPR:
13847 case GE_EXPR:
13848 case LTGT_EXPR:
13849 if (flag_trapping_math)
13850 return NULL_TREE;
13851 result = 0;
13852 break;
13853
13854 default:
13855 gcc_unreachable ();
13856 }
13857
13858 return constant_boolean_node (result, type);
13859 }
13860
13861 return constant_boolean_node (real_compare (code, c0, c1), type);
13862 }
13863
13864 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
13865 {
13866 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
13867 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
13868 return constant_boolean_node (fixed_compare (code, c0, c1), type);
13869 }
13870
13871 /* Handle equality/inequality of complex constants. */
13872 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
13873 {
13874 tree rcond = fold_relational_const (code, type,
13875 TREE_REALPART (op0),
13876 TREE_REALPART (op1));
13877 tree icond = fold_relational_const (code, type,
13878 TREE_IMAGPART (op0),
13879 TREE_IMAGPART (op1));
13880 if (code == EQ_EXPR)
13881 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
13882 else if (code == NE_EXPR)
13883 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
13884 else
13885 return NULL_TREE;
13886 }
13887
13888 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
13889 {
13890 if (!VECTOR_TYPE_P (type))
13891 {
13892 /* Have vector comparison with scalar boolean result. */
13893 gcc_assert ((code == EQ_EXPR || code == NE_EXPR)
13894 && VECTOR_CST_NELTS (op0) == VECTOR_CST_NELTS (op1));
13895 for (unsigned i = 0; i < VECTOR_CST_NELTS (op0); i++)
13896 {
13897 tree elem0 = VECTOR_CST_ELT (op0, i);
13898 tree elem1 = VECTOR_CST_ELT (op1, i);
13899 tree tmp = fold_relational_const (code, type, elem0, elem1);
13900 if (tmp == NULL_TREE)
13901 return NULL_TREE;
13902 if (integer_zerop (tmp))
13903 return constant_boolean_node (false, type);
13904 }
13905 return constant_boolean_node (true, type);
13906 }
13907 unsigned count = VECTOR_CST_NELTS (op0);
13908 tree *elts = XALLOCAVEC (tree, count);
13909 gcc_assert (VECTOR_CST_NELTS (op1) == count
13910 && TYPE_VECTOR_SUBPARTS (type) == count);
13911
13912 for (unsigned i = 0; i < count; i++)
13913 {
13914 tree elem_type = TREE_TYPE (type);
13915 tree elem0 = VECTOR_CST_ELT (op0, i);
13916 tree elem1 = VECTOR_CST_ELT (op1, i);
13917
13918 tree tem = fold_relational_const (code, elem_type,
13919 elem0, elem1);
13920
13921 if (tem == NULL_TREE)
13922 return NULL_TREE;
13923
13924 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
13925 }
13926
13927 return build_vector (type, elts);
13928 }
13929
13930 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
13931
13932 To compute GT, swap the arguments and do LT.
13933 To compute GE, do LT and invert the result.
13934 To compute LE, swap the arguments, do LT and invert the result.
13935 To compute NE, do EQ and invert the result.
13936
13937 Therefore, the code below must handle only EQ and LT. */
13938
13939 if (code == LE_EXPR || code == GT_EXPR)
13940 {
13941 std::swap (op0, op1);
13942 code = swap_tree_comparison (code);
13943 }
13944
13945 /* Note that it is safe to invert for real values here because we
13946 have already handled the one case that it matters. */
13947
13948 invert = 0;
13949 if (code == NE_EXPR || code == GE_EXPR)
13950 {
13951 invert = 1;
13952 code = invert_tree_comparison (code, false);
13953 }
13954
13955 /* Compute a result for LT or EQ if args permit;
13956 Otherwise return T. */
13957 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
13958 {
13959 if (code == EQ_EXPR)
13960 result = tree_int_cst_equal (op0, op1);
13961 else
13962 result = tree_int_cst_lt (op0, op1);
13963 }
13964 else
13965 return NULL_TREE;
13966
13967 if (invert)
13968 result ^= 1;
13969 return constant_boolean_node (result, type);
13970 }
13971
13972 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
13973 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
13974 itself. */
13975
13976 tree
13977 fold_build_cleanup_point_expr (tree type, tree expr)
13978 {
13979 /* If the expression does not have side effects then we don't have to wrap
13980 it with a cleanup point expression. */
13981 if (!TREE_SIDE_EFFECTS (expr))
13982 return expr;
13983
13984 /* If the expression is a return, check to see if the expression inside the
13985 return has no side effects or the right hand side of the modify expression
13986 inside the return. If either don't have side effects set we don't need to
13987 wrap the expression in a cleanup point expression. Note we don't check the
13988 left hand side of the modify because it should always be a return decl. */
13989 if (TREE_CODE (expr) == RETURN_EXPR)
13990 {
13991 tree op = TREE_OPERAND (expr, 0);
13992 if (!op || !TREE_SIDE_EFFECTS (op))
13993 return expr;
13994 op = TREE_OPERAND (op, 1);
13995 if (!TREE_SIDE_EFFECTS (op))
13996 return expr;
13997 }
13998
13999 return build1_loc (EXPR_LOCATION (expr), CLEANUP_POINT_EXPR, type, expr);
14000 }
14001
14002 /* Given a pointer value OP0 and a type TYPE, return a simplified version
14003 of an indirection through OP0, or NULL_TREE if no simplification is
14004 possible. */
14005
14006 tree
14007 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
14008 {
14009 tree sub = op0;
14010 tree subtype;
14011
14012 STRIP_NOPS (sub);
14013 subtype = TREE_TYPE (sub);
14014 if (!POINTER_TYPE_P (subtype)
14015 || TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (op0)))
14016 return NULL_TREE;
14017
14018 if (TREE_CODE (sub) == ADDR_EXPR)
14019 {
14020 tree op = TREE_OPERAND (sub, 0);
14021 tree optype = TREE_TYPE (op);
14022 /* *&CONST_DECL -> to the value of the const decl. */
14023 if (TREE_CODE (op) == CONST_DECL)
14024 return DECL_INITIAL (op);
14025 /* *&p => p; make sure to handle *&"str"[cst] here. */
14026 if (type == optype)
14027 {
14028 tree fop = fold_read_from_constant_string (op);
14029 if (fop)
14030 return fop;
14031 else
14032 return op;
14033 }
14034 /* *(foo *)&fooarray => fooarray[0] */
14035 else if (TREE_CODE (optype) == ARRAY_TYPE
14036 && type == TREE_TYPE (optype)
14037 && (!in_gimple_form
14038 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14039 {
14040 tree type_domain = TYPE_DOMAIN (optype);
14041 tree min_val = size_zero_node;
14042 if (type_domain && TYPE_MIN_VALUE (type_domain))
14043 min_val = TYPE_MIN_VALUE (type_domain);
14044 if (in_gimple_form
14045 && TREE_CODE (min_val) != INTEGER_CST)
14046 return NULL_TREE;
14047 return build4_loc (loc, ARRAY_REF, type, op, min_val,
14048 NULL_TREE, NULL_TREE);
14049 }
14050 /* *(foo *)&complexfoo => __real__ complexfoo */
14051 else if (TREE_CODE (optype) == COMPLEX_TYPE
14052 && type == TREE_TYPE (optype))
14053 return fold_build1_loc (loc, REALPART_EXPR, type, op);
14054 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
14055 else if (TREE_CODE (optype) == VECTOR_TYPE
14056 && type == TREE_TYPE (optype))
14057 {
14058 tree part_width = TYPE_SIZE (type);
14059 tree index = bitsize_int (0);
14060 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
14061 }
14062 }
14063
14064 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
14065 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
14066 {
14067 tree op00 = TREE_OPERAND (sub, 0);
14068 tree op01 = TREE_OPERAND (sub, 1);
14069
14070 STRIP_NOPS (op00);
14071 if (TREE_CODE (op00) == ADDR_EXPR)
14072 {
14073 tree op00type;
14074 op00 = TREE_OPERAND (op00, 0);
14075 op00type = TREE_TYPE (op00);
14076
14077 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
14078 if (TREE_CODE (op00type) == VECTOR_TYPE
14079 && type == TREE_TYPE (op00type))
14080 {
14081 tree part_width = TYPE_SIZE (type);
14082 unsigned HOST_WIDE_INT max_offset
14083 = (tree_to_uhwi (part_width) / BITS_PER_UNIT
14084 * TYPE_VECTOR_SUBPARTS (op00type));
14085 if (tree_int_cst_sign_bit (op01) == 0
14086 && compare_tree_int (op01, max_offset) == -1)
14087 {
14088 unsigned HOST_WIDE_INT offset = tree_to_uhwi (op01);
14089 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
14090 tree index = bitsize_int (indexi);
14091 return fold_build3_loc (loc,
14092 BIT_FIELD_REF, type, op00,
14093 part_width, index);
14094 }
14095 }
14096 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
14097 else if (TREE_CODE (op00type) == COMPLEX_TYPE
14098 && type == TREE_TYPE (op00type))
14099 {
14100 tree size = TYPE_SIZE_UNIT (type);
14101 if (tree_int_cst_equal (size, op01))
14102 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
14103 }
14104 /* ((foo *)&fooarray)[1] => fooarray[1] */
14105 else if (TREE_CODE (op00type) == ARRAY_TYPE
14106 && type == TREE_TYPE (op00type))
14107 {
14108 tree type_domain = TYPE_DOMAIN (op00type);
14109 tree min_val = size_zero_node;
14110 if (type_domain && TYPE_MIN_VALUE (type_domain))
14111 min_val = TYPE_MIN_VALUE (type_domain);
14112 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
14113 TYPE_SIZE_UNIT (type));
14114 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
14115 return build4_loc (loc, ARRAY_REF, type, op00, op01,
14116 NULL_TREE, NULL_TREE);
14117 }
14118 }
14119 }
14120
14121 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
14122 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
14123 && type == TREE_TYPE (TREE_TYPE (subtype))
14124 && (!in_gimple_form
14125 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14126 {
14127 tree type_domain;
14128 tree min_val = size_zero_node;
14129 sub = build_fold_indirect_ref_loc (loc, sub);
14130 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
14131 if (type_domain && TYPE_MIN_VALUE (type_domain))
14132 min_val = TYPE_MIN_VALUE (type_domain);
14133 if (in_gimple_form
14134 && TREE_CODE (min_val) != INTEGER_CST)
14135 return NULL_TREE;
14136 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
14137 NULL_TREE);
14138 }
14139
14140 return NULL_TREE;
14141 }
14142
14143 /* Builds an expression for an indirection through T, simplifying some
14144 cases. */
14145
14146 tree
14147 build_fold_indirect_ref_loc (location_t loc, tree t)
14148 {
14149 tree type = TREE_TYPE (TREE_TYPE (t));
14150 tree sub = fold_indirect_ref_1 (loc, type, t);
14151
14152 if (sub)
14153 return sub;
14154
14155 return build1_loc (loc, INDIRECT_REF, type, t);
14156 }
14157
14158 /* Given an INDIRECT_REF T, return either T or a simplified version. */
14159
14160 tree
14161 fold_indirect_ref_loc (location_t loc, tree t)
14162 {
14163 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
14164
14165 if (sub)
14166 return sub;
14167 else
14168 return t;
14169 }
14170
14171 /* Strip non-trapping, non-side-effecting tree nodes from an expression
14172 whose result is ignored. The type of the returned tree need not be
14173 the same as the original expression. */
14174
14175 tree
14176 fold_ignored_result (tree t)
14177 {
14178 if (!TREE_SIDE_EFFECTS (t))
14179 return integer_zero_node;
14180
14181 for (;;)
14182 switch (TREE_CODE_CLASS (TREE_CODE (t)))
14183 {
14184 case tcc_unary:
14185 t = TREE_OPERAND (t, 0);
14186 break;
14187
14188 case tcc_binary:
14189 case tcc_comparison:
14190 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14191 t = TREE_OPERAND (t, 0);
14192 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
14193 t = TREE_OPERAND (t, 1);
14194 else
14195 return t;
14196 break;
14197
14198 case tcc_expression:
14199 switch (TREE_CODE (t))
14200 {
14201 case COMPOUND_EXPR:
14202 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14203 return t;
14204 t = TREE_OPERAND (t, 0);
14205 break;
14206
14207 case COND_EXPR:
14208 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
14209 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
14210 return t;
14211 t = TREE_OPERAND (t, 0);
14212 break;
14213
14214 default:
14215 return t;
14216 }
14217 break;
14218
14219 default:
14220 return t;
14221 }
14222 }
14223
14224 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
14225
14226 tree
14227 round_up_loc (location_t loc, tree value, unsigned int divisor)
14228 {
14229 tree div = NULL_TREE;
14230
14231 if (divisor == 1)
14232 return value;
14233
14234 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14235 have to do anything. Only do this when we are not given a const,
14236 because in that case, this check is more expensive than just
14237 doing it. */
14238 if (TREE_CODE (value) != INTEGER_CST)
14239 {
14240 div = build_int_cst (TREE_TYPE (value), divisor);
14241
14242 if (multiple_of_p (TREE_TYPE (value), value, div))
14243 return value;
14244 }
14245
14246 /* If divisor is a power of two, simplify this to bit manipulation. */
14247 if (pow2_or_zerop (divisor))
14248 {
14249 if (TREE_CODE (value) == INTEGER_CST)
14250 {
14251 wide_int val = value;
14252 bool overflow_p;
14253
14254 if ((val & (divisor - 1)) == 0)
14255 return value;
14256
14257 overflow_p = TREE_OVERFLOW (value);
14258 val += divisor - 1;
14259 val &= (int) -divisor;
14260 if (val == 0)
14261 overflow_p = true;
14262
14263 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
14264 }
14265 else
14266 {
14267 tree t;
14268
14269 t = build_int_cst (TREE_TYPE (value), divisor - 1);
14270 value = size_binop_loc (loc, PLUS_EXPR, value, t);
14271 t = build_int_cst (TREE_TYPE (value), - (int) divisor);
14272 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14273 }
14274 }
14275 else
14276 {
14277 if (!div)
14278 div = build_int_cst (TREE_TYPE (value), divisor);
14279 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
14280 value = size_binop_loc (loc, MULT_EXPR, value, div);
14281 }
14282
14283 return value;
14284 }
14285
14286 /* Likewise, but round down. */
14287
14288 tree
14289 round_down_loc (location_t loc, tree value, int divisor)
14290 {
14291 tree div = NULL_TREE;
14292
14293 gcc_assert (divisor > 0);
14294 if (divisor == 1)
14295 return value;
14296
14297 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14298 have to do anything. Only do this when we are not given a const,
14299 because in that case, this check is more expensive than just
14300 doing it. */
14301 if (TREE_CODE (value) != INTEGER_CST)
14302 {
14303 div = build_int_cst (TREE_TYPE (value), divisor);
14304
14305 if (multiple_of_p (TREE_TYPE (value), value, div))
14306 return value;
14307 }
14308
14309 /* If divisor is a power of two, simplify this to bit manipulation. */
14310 if (pow2_or_zerop (divisor))
14311 {
14312 tree t;
14313
14314 t = build_int_cst (TREE_TYPE (value), -divisor);
14315 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14316 }
14317 else
14318 {
14319 if (!div)
14320 div = build_int_cst (TREE_TYPE (value), divisor);
14321 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
14322 value = size_binop_loc (loc, MULT_EXPR, value, div);
14323 }
14324
14325 return value;
14326 }
14327
14328 /* Returns the pointer to the base of the object addressed by EXP and
14329 extracts the information about the offset of the access, storing it
14330 to PBITPOS and POFFSET. */
14331
14332 static tree
14333 split_address_to_core_and_offset (tree exp,
14334 HOST_WIDE_INT *pbitpos, tree *poffset)
14335 {
14336 tree core;
14337 machine_mode mode;
14338 int unsignedp, reversep, volatilep;
14339 HOST_WIDE_INT bitsize;
14340 location_t loc = EXPR_LOCATION (exp);
14341
14342 if (TREE_CODE (exp) == ADDR_EXPR)
14343 {
14344 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
14345 poffset, &mode, &unsignedp, &reversep,
14346 &volatilep);
14347 core = build_fold_addr_expr_loc (loc, core);
14348 }
14349 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
14350 {
14351 core = TREE_OPERAND (exp, 0);
14352 STRIP_NOPS (core);
14353 *pbitpos = 0;
14354 *poffset = TREE_OPERAND (exp, 1);
14355 if (TREE_CODE (*poffset) == INTEGER_CST)
14356 {
14357 offset_int tem = wi::sext (wi::to_offset (*poffset),
14358 TYPE_PRECISION (TREE_TYPE (*poffset)));
14359 tem <<= LOG2_BITS_PER_UNIT;
14360 if (wi::fits_shwi_p (tem))
14361 {
14362 *pbitpos = tem.to_shwi ();
14363 *poffset = NULL_TREE;
14364 }
14365 }
14366 }
14367 else
14368 {
14369 core = exp;
14370 *pbitpos = 0;
14371 *poffset = NULL_TREE;
14372 }
14373
14374 return core;
14375 }
14376
14377 /* Returns true if addresses of E1 and E2 differ by a constant, false
14378 otherwise. If they do, E1 - E2 is stored in *DIFF. */
14379
14380 bool
14381 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
14382 {
14383 tree core1, core2;
14384 HOST_WIDE_INT bitpos1, bitpos2;
14385 tree toffset1, toffset2, tdiff, type;
14386
14387 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
14388 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
14389
14390 if (bitpos1 % BITS_PER_UNIT != 0
14391 || bitpos2 % BITS_PER_UNIT != 0
14392 || !operand_equal_p (core1, core2, 0))
14393 return false;
14394
14395 if (toffset1 && toffset2)
14396 {
14397 type = TREE_TYPE (toffset1);
14398 if (type != TREE_TYPE (toffset2))
14399 toffset2 = fold_convert (type, toffset2);
14400
14401 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
14402 if (!cst_and_fits_in_hwi (tdiff))
14403 return false;
14404
14405 *diff = int_cst_value (tdiff);
14406 }
14407 else if (toffset1 || toffset2)
14408 {
14409 /* If only one of the offsets is non-constant, the difference cannot
14410 be a constant. */
14411 return false;
14412 }
14413 else
14414 *diff = 0;
14415
14416 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
14417 return true;
14418 }
14419
14420 /* Return OFF converted to a pointer offset type suitable as offset for
14421 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
14422 tree
14423 convert_to_ptrofftype_loc (location_t loc, tree off)
14424 {
14425 return fold_convert_loc (loc, sizetype, off);
14426 }
14427
14428 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14429 tree
14430 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
14431 {
14432 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14433 ptr, convert_to_ptrofftype_loc (loc, off));
14434 }
14435
14436 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14437 tree
14438 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
14439 {
14440 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14441 ptr, size_int (off));
14442 }
14443
14444 /* Return a char pointer for a C string if it is a string constant
14445 or sum of string constant and integer constant. We only support
14446 string constants properly terminated with '\0' character.
14447 If STRLEN is a valid pointer, length (including terminating character)
14448 of returned string is stored to the argument. */
14449
14450 const char *
14451 c_getstr (tree src, unsigned HOST_WIDE_INT *strlen)
14452 {
14453 tree offset_node;
14454
14455 if (strlen)
14456 *strlen = 0;
14457
14458 src = string_constant (src, &offset_node);
14459 if (src == 0)
14460 return NULL;
14461
14462 unsigned HOST_WIDE_INT offset = 0;
14463 if (offset_node != NULL_TREE)
14464 {
14465 if (!tree_fits_uhwi_p (offset_node))
14466 return NULL;
14467 else
14468 offset = tree_to_uhwi (offset_node);
14469 }
14470
14471 unsigned HOST_WIDE_INT string_length = TREE_STRING_LENGTH (src);
14472 const char *string = TREE_STRING_POINTER (src);
14473
14474 /* Support only properly null-terminated strings. */
14475 if (string_length == 0
14476 || string[string_length - 1] != '\0'
14477 || offset >= string_length)
14478 return NULL;
14479
14480 if (strlen)
14481 *strlen = string_length - offset;
14482 return string + offset;
14483 }
14484
14485 #if CHECKING_P
14486
14487 namespace selftest {
14488
14489 /* Helper functions for writing tests of folding trees. */
14490
14491 /* Verify that the binary op (LHS CODE RHS) folds to CONSTANT. */
14492
14493 static void
14494 assert_binop_folds_to_const (tree lhs, enum tree_code code, tree rhs,
14495 tree constant)
14496 {
14497 ASSERT_EQ (constant, fold_build2 (code, TREE_TYPE (lhs), lhs, rhs));
14498 }
14499
14500 /* Verify that the binary op (LHS CODE RHS) folds to an NON_LVALUE_EXPR
14501 wrapping WRAPPED_EXPR. */
14502
14503 static void
14504 assert_binop_folds_to_nonlvalue (tree lhs, enum tree_code code, tree rhs,
14505 tree wrapped_expr)
14506 {
14507 tree result = fold_build2 (code, TREE_TYPE (lhs), lhs, rhs);
14508 ASSERT_NE (wrapped_expr, result);
14509 ASSERT_EQ (NON_LVALUE_EXPR, TREE_CODE (result));
14510 ASSERT_EQ (wrapped_expr, TREE_OPERAND (result, 0));
14511 }
14512
14513 /* Verify that various arithmetic binary operations are folded
14514 correctly. */
14515
14516 static void
14517 test_arithmetic_folding ()
14518 {
14519 tree type = integer_type_node;
14520 tree x = create_tmp_var_raw (type, "x");
14521 tree zero = build_zero_cst (type);
14522 tree one = build_int_cst (type, 1);
14523
14524 /* Addition. */
14525 /* 1 <-- (0 + 1) */
14526 assert_binop_folds_to_const (zero, PLUS_EXPR, one,
14527 one);
14528 assert_binop_folds_to_const (one, PLUS_EXPR, zero,
14529 one);
14530
14531 /* (nonlvalue)x <-- (x + 0) */
14532 assert_binop_folds_to_nonlvalue (x, PLUS_EXPR, zero,
14533 x);
14534
14535 /* Subtraction. */
14536 /* 0 <-- (x - x) */
14537 assert_binop_folds_to_const (x, MINUS_EXPR, x,
14538 zero);
14539 assert_binop_folds_to_nonlvalue (x, MINUS_EXPR, zero,
14540 x);
14541
14542 /* Multiplication. */
14543 /* 0 <-- (x * 0) */
14544 assert_binop_folds_to_const (x, MULT_EXPR, zero,
14545 zero);
14546
14547 /* (nonlvalue)x <-- (x * 1) */
14548 assert_binop_folds_to_nonlvalue (x, MULT_EXPR, one,
14549 x);
14550 }
14551
14552 /* Verify that various binary operations on vectors are folded
14553 correctly. */
14554
14555 static void
14556 test_vector_folding ()
14557 {
14558 tree inner_type = integer_type_node;
14559 tree type = build_vector_type (inner_type, 4);
14560 tree zero = build_zero_cst (type);
14561 tree one = build_one_cst (type);
14562
14563 /* Verify equality tests that return a scalar boolean result. */
14564 tree res_type = boolean_type_node;
14565 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, one)));
14566 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, zero)));
14567 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, zero, one)));
14568 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, one, one)));
14569 }
14570
14571 /* Run all of the selftests within this file. */
14572
14573 void
14574 fold_const_c_tests ()
14575 {
14576 test_arithmetic_folding ();
14577 test_vector_folding ();
14578 }
14579
14580 } // namespace selftest
14581
14582 #endif /* CHECKING_P */