match.pd: X + X --> X * 2 for integers
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2016 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
27
28 /* The entry points in this file are fold, size_int_wide and size_binop.
29
30 fold takes a tree as argument and returns a simplified tree.
31
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
35
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
38
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
42
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "backend.h"
47 #include "target.h"
48 #include "rtl.h"
49 #include "tree.h"
50 #include "gimple.h"
51 #include "predict.h"
52 #include "tm_p.h"
53 #include "tree-ssa-operands.h"
54 #include "optabs-query.h"
55 #include "cgraph.h"
56 #include "diagnostic-core.h"
57 #include "flags.h"
58 #include "alias.h"
59 #include "fold-const.h"
60 #include "fold-const-call.h"
61 #include "stor-layout.h"
62 #include "calls.h"
63 #include "tree-iterator.h"
64 #include "expr.h"
65 #include "intl.h"
66 #include "langhooks.h"
67 #include "tree-eh.h"
68 #include "gimplify.h"
69 #include "tree-dfa.h"
70 #include "builtins.h"
71 #include "generic-match.h"
72 #include "gimple-fold.h"
73 #include "params.h"
74 #include "tree-into-ssa.h"
75 #include "md5.h"
76 #include "case-cfn-macros.h"
77 #include "stringpool.h"
78 #include "tree-ssanames.h"
79
80 #ifndef LOAD_EXTEND_OP
81 #define LOAD_EXTEND_OP(M) UNKNOWN
82 #endif
83
84 /* Nonzero if we are folding constants inside an initializer; zero
85 otherwise. */
86 int folding_initializer = 0;
87
88 /* The following constants represent a bit based encoding of GCC's
89 comparison operators. This encoding simplifies transformations
90 on relational comparison operators, such as AND and OR. */
91 enum comparison_code {
92 COMPCODE_FALSE = 0,
93 COMPCODE_LT = 1,
94 COMPCODE_EQ = 2,
95 COMPCODE_LE = 3,
96 COMPCODE_GT = 4,
97 COMPCODE_LTGT = 5,
98 COMPCODE_GE = 6,
99 COMPCODE_ORD = 7,
100 COMPCODE_UNORD = 8,
101 COMPCODE_UNLT = 9,
102 COMPCODE_UNEQ = 10,
103 COMPCODE_UNLE = 11,
104 COMPCODE_UNGT = 12,
105 COMPCODE_NE = 13,
106 COMPCODE_UNGE = 14,
107 COMPCODE_TRUE = 15
108 };
109
110 static bool negate_expr_p (tree);
111 static tree negate_expr (tree);
112 static tree split_tree (location_t, tree, tree, enum tree_code,
113 tree *, tree *, tree *, int);
114 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
115 static enum comparison_code comparison_to_compcode (enum tree_code);
116 static enum tree_code compcode_to_comparison (enum comparison_code);
117 static int operand_equal_for_comparison_p (tree, tree, tree);
118 static int twoval_comparison_p (tree, tree *, tree *, int *);
119 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
120 static tree make_bit_field_ref (location_t, tree, tree,
121 HOST_WIDE_INT, HOST_WIDE_INT, int, int);
122 static tree optimize_bit_field_compare (location_t, enum tree_code,
123 tree, tree, tree);
124 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
125 HOST_WIDE_INT *,
126 machine_mode *, int *, int *, int *,
127 tree *, tree *);
128 static int simple_operand_p (const_tree);
129 static bool simple_operand_p_2 (tree);
130 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
131 static tree range_predecessor (tree);
132 static tree range_successor (tree);
133 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
134 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
135 static tree unextend (tree, int, int, tree);
136 static tree optimize_minmax_comparison (location_t, enum tree_code,
137 tree, tree, tree);
138 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
139 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
140 static tree fold_binary_op_with_conditional_arg (location_t,
141 enum tree_code, tree,
142 tree, tree,
143 tree, tree, int);
144 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
145 static bool reorder_operands_p (const_tree, const_tree);
146 static tree fold_negate_const (tree, tree);
147 static tree fold_not_const (const_tree, tree);
148 static tree fold_relational_const (enum tree_code, tree, tree, tree);
149 static tree fold_convert_const (enum tree_code, tree, tree);
150 static tree fold_view_convert_expr (tree, tree);
151 static bool vec_cst_ctor_to_array (tree, tree *);
152
153
154 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
155 Otherwise, return LOC. */
156
157 static location_t
158 expr_location_or (tree t, location_t loc)
159 {
160 location_t tloc = EXPR_LOCATION (t);
161 return tloc == UNKNOWN_LOCATION ? loc : tloc;
162 }
163
164 /* Similar to protected_set_expr_location, but never modify x in place,
165 if location can and needs to be set, unshare it. */
166
167 static inline tree
168 protected_set_expr_location_unshare (tree x, location_t loc)
169 {
170 if (CAN_HAVE_LOCATION_P (x)
171 && EXPR_LOCATION (x) != loc
172 && !(TREE_CODE (x) == SAVE_EXPR
173 || TREE_CODE (x) == TARGET_EXPR
174 || TREE_CODE (x) == BIND_EXPR))
175 {
176 x = copy_node (x);
177 SET_EXPR_LOCATION (x, loc);
178 }
179 return x;
180 }
181 \f
182 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
183 division and returns the quotient. Otherwise returns
184 NULL_TREE. */
185
186 tree
187 div_if_zero_remainder (const_tree arg1, const_tree arg2)
188 {
189 widest_int quo;
190
191 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
192 SIGNED, &quo))
193 return wide_int_to_tree (TREE_TYPE (arg1), quo);
194
195 return NULL_TREE;
196 }
197 \f
198 /* This is nonzero if we should defer warnings about undefined
199 overflow. This facility exists because these warnings are a
200 special case. The code to estimate loop iterations does not want
201 to issue any warnings, since it works with expressions which do not
202 occur in user code. Various bits of cleanup code call fold(), but
203 only use the result if it has certain characteristics (e.g., is a
204 constant); that code only wants to issue a warning if the result is
205 used. */
206
207 static int fold_deferring_overflow_warnings;
208
209 /* If a warning about undefined overflow is deferred, this is the
210 warning. Note that this may cause us to turn two warnings into
211 one, but that is fine since it is sufficient to only give one
212 warning per expression. */
213
214 static const char* fold_deferred_overflow_warning;
215
216 /* If a warning about undefined overflow is deferred, this is the
217 level at which the warning should be emitted. */
218
219 static enum warn_strict_overflow_code fold_deferred_overflow_code;
220
221 /* Start deferring overflow warnings. We could use a stack here to
222 permit nested calls, but at present it is not necessary. */
223
224 void
225 fold_defer_overflow_warnings (void)
226 {
227 ++fold_deferring_overflow_warnings;
228 }
229
230 /* Stop deferring overflow warnings. If there is a pending warning,
231 and ISSUE is true, then issue the warning if appropriate. STMT is
232 the statement with which the warning should be associated (used for
233 location information); STMT may be NULL. CODE is the level of the
234 warning--a warn_strict_overflow_code value. This function will use
235 the smaller of CODE and the deferred code when deciding whether to
236 issue the warning. CODE may be zero to mean to always use the
237 deferred code. */
238
239 void
240 fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
241 {
242 const char *warnmsg;
243 location_t locus;
244
245 gcc_assert (fold_deferring_overflow_warnings > 0);
246 --fold_deferring_overflow_warnings;
247 if (fold_deferring_overflow_warnings > 0)
248 {
249 if (fold_deferred_overflow_warning != NULL
250 && code != 0
251 && code < (int) fold_deferred_overflow_code)
252 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
253 return;
254 }
255
256 warnmsg = fold_deferred_overflow_warning;
257 fold_deferred_overflow_warning = NULL;
258
259 if (!issue || warnmsg == NULL)
260 return;
261
262 if (gimple_no_warning_p (stmt))
263 return;
264
265 /* Use the smallest code level when deciding to issue the
266 warning. */
267 if (code == 0 || code > (int) fold_deferred_overflow_code)
268 code = fold_deferred_overflow_code;
269
270 if (!issue_strict_overflow_warning (code))
271 return;
272
273 if (stmt == NULL)
274 locus = input_location;
275 else
276 locus = gimple_location (stmt);
277 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
278 }
279
280 /* Stop deferring overflow warnings, ignoring any deferred
281 warnings. */
282
283 void
284 fold_undefer_and_ignore_overflow_warnings (void)
285 {
286 fold_undefer_overflow_warnings (false, NULL, 0);
287 }
288
289 /* Whether we are deferring overflow warnings. */
290
291 bool
292 fold_deferring_overflow_warnings_p (void)
293 {
294 return fold_deferring_overflow_warnings > 0;
295 }
296
297 /* This is called when we fold something based on the fact that signed
298 overflow is undefined. */
299
300 static void
301 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
302 {
303 if (fold_deferring_overflow_warnings > 0)
304 {
305 if (fold_deferred_overflow_warning == NULL
306 || wc < fold_deferred_overflow_code)
307 {
308 fold_deferred_overflow_warning = gmsgid;
309 fold_deferred_overflow_code = wc;
310 }
311 }
312 else if (issue_strict_overflow_warning (wc))
313 warning (OPT_Wstrict_overflow, gmsgid);
314 }
315 \f
316 /* Return true if the built-in mathematical function specified by CODE
317 is odd, i.e. -f(x) == f(-x). */
318
319 bool
320 negate_mathfn_p (combined_fn fn)
321 {
322 switch (fn)
323 {
324 CASE_CFN_ASIN:
325 CASE_CFN_ASINH:
326 CASE_CFN_ATAN:
327 CASE_CFN_ATANH:
328 CASE_CFN_CASIN:
329 CASE_CFN_CASINH:
330 CASE_CFN_CATAN:
331 CASE_CFN_CATANH:
332 CASE_CFN_CBRT:
333 CASE_CFN_CPROJ:
334 CASE_CFN_CSIN:
335 CASE_CFN_CSINH:
336 CASE_CFN_CTAN:
337 CASE_CFN_CTANH:
338 CASE_CFN_ERF:
339 CASE_CFN_LLROUND:
340 CASE_CFN_LROUND:
341 CASE_CFN_ROUND:
342 CASE_CFN_SIN:
343 CASE_CFN_SINH:
344 CASE_CFN_TAN:
345 CASE_CFN_TANH:
346 CASE_CFN_TRUNC:
347 return true;
348
349 CASE_CFN_LLRINT:
350 CASE_CFN_LRINT:
351 CASE_CFN_NEARBYINT:
352 CASE_CFN_RINT:
353 return !flag_rounding_math;
354
355 default:
356 break;
357 }
358 return false;
359 }
360
361 /* Check whether we may negate an integer constant T without causing
362 overflow. */
363
364 bool
365 may_negate_without_overflow_p (const_tree t)
366 {
367 tree type;
368
369 gcc_assert (TREE_CODE (t) == INTEGER_CST);
370
371 type = TREE_TYPE (t);
372 if (TYPE_UNSIGNED (type))
373 return false;
374
375 return !wi::only_sign_bit_p (t);
376 }
377
378 /* Determine whether an expression T can be cheaply negated using
379 the function negate_expr without introducing undefined overflow. */
380
381 static bool
382 negate_expr_p (tree t)
383 {
384 tree type;
385
386 if (t == 0)
387 return false;
388
389 type = TREE_TYPE (t);
390
391 STRIP_SIGN_NOPS (t);
392 switch (TREE_CODE (t))
393 {
394 case INTEGER_CST:
395 if (INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_WRAPS (type))
396 return true;
397
398 /* Check that -CST will not overflow type. */
399 return may_negate_without_overflow_p (t);
400 case BIT_NOT_EXPR:
401 return (INTEGRAL_TYPE_P (type)
402 && TYPE_OVERFLOW_WRAPS (type));
403
404 case FIXED_CST:
405 return true;
406
407 case NEGATE_EXPR:
408 return !TYPE_OVERFLOW_SANITIZED (type);
409
410 case REAL_CST:
411 /* We want to canonicalize to positive real constants. Pretend
412 that only negative ones can be easily negated. */
413 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
414
415 case COMPLEX_CST:
416 return negate_expr_p (TREE_REALPART (t))
417 && negate_expr_p (TREE_IMAGPART (t));
418
419 case VECTOR_CST:
420 {
421 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
422 return true;
423
424 int count = TYPE_VECTOR_SUBPARTS (type), i;
425
426 for (i = 0; i < count; i++)
427 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
428 return false;
429
430 return true;
431 }
432
433 case COMPLEX_EXPR:
434 return negate_expr_p (TREE_OPERAND (t, 0))
435 && negate_expr_p (TREE_OPERAND (t, 1));
436
437 case CONJ_EXPR:
438 return negate_expr_p (TREE_OPERAND (t, 0));
439
440 case PLUS_EXPR:
441 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
442 || HONOR_SIGNED_ZEROS (element_mode (type))
443 || (INTEGRAL_TYPE_P (type)
444 && ! TYPE_OVERFLOW_WRAPS (type)))
445 return false;
446 /* -(A + B) -> (-B) - A. */
447 if (negate_expr_p (TREE_OPERAND (t, 1))
448 && reorder_operands_p (TREE_OPERAND (t, 0),
449 TREE_OPERAND (t, 1)))
450 return true;
451 /* -(A + B) -> (-A) - B. */
452 return negate_expr_p (TREE_OPERAND (t, 0));
453
454 case MINUS_EXPR:
455 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
456 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
457 && !HONOR_SIGNED_ZEROS (element_mode (type))
458 && (! INTEGRAL_TYPE_P (type)
459 || TYPE_OVERFLOW_WRAPS (type))
460 && reorder_operands_p (TREE_OPERAND (t, 0),
461 TREE_OPERAND (t, 1));
462
463 case MULT_EXPR:
464 if (TYPE_UNSIGNED (type))
465 break;
466 /* INT_MIN/n * n doesn't overflow while negating one operand it does
467 if n is a power of two. */
468 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
469 && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
470 && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
471 && ! integer_pow2p (TREE_OPERAND (t, 0)))
472 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
473 && ! integer_pow2p (TREE_OPERAND (t, 1)))))
474 break;
475
476 /* Fall through. */
477
478 case RDIV_EXPR:
479 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
480 return negate_expr_p (TREE_OPERAND (t, 1))
481 || negate_expr_p (TREE_OPERAND (t, 0));
482 break;
483
484 case TRUNC_DIV_EXPR:
485 case ROUND_DIV_EXPR:
486 case EXACT_DIV_EXPR:
487 if (TYPE_UNSIGNED (type))
488 break;
489 if (negate_expr_p (TREE_OPERAND (t, 0)))
490 return true;
491 /* In general we can't negate B in A / B, because if A is INT_MIN and
492 B is 1, we may turn this into INT_MIN / -1 which is undefined
493 and actually traps on some architectures. */
494 if (! INTEGRAL_TYPE_P (TREE_TYPE (t))
495 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
496 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
497 && ! integer_onep (TREE_OPERAND (t, 1))))
498 return negate_expr_p (TREE_OPERAND (t, 1));
499 break;
500
501 case NOP_EXPR:
502 /* Negate -((double)float) as (double)(-float). */
503 if (TREE_CODE (type) == REAL_TYPE)
504 {
505 tree tem = strip_float_extensions (t);
506 if (tem != t)
507 return negate_expr_p (tem);
508 }
509 break;
510
511 case CALL_EXPR:
512 /* Negate -f(x) as f(-x). */
513 if (negate_mathfn_p (get_call_combined_fn (t)))
514 return negate_expr_p (CALL_EXPR_ARG (t, 0));
515 break;
516
517 case RSHIFT_EXPR:
518 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
519 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
520 {
521 tree op1 = TREE_OPERAND (t, 1);
522 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
523 return true;
524 }
525 break;
526
527 default:
528 break;
529 }
530 return false;
531 }
532
533 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
534 simplification is possible.
535 If negate_expr_p would return true for T, NULL_TREE will never be
536 returned. */
537
538 static tree
539 fold_negate_expr (location_t loc, tree t)
540 {
541 tree type = TREE_TYPE (t);
542 tree tem;
543
544 switch (TREE_CODE (t))
545 {
546 /* Convert - (~A) to A + 1. */
547 case BIT_NOT_EXPR:
548 if (INTEGRAL_TYPE_P (type))
549 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
550 build_one_cst (type));
551 break;
552
553 case INTEGER_CST:
554 tem = fold_negate_const (t, type);
555 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
556 || (ANY_INTEGRAL_TYPE_P (type)
557 && !TYPE_OVERFLOW_TRAPS (type)
558 && TYPE_OVERFLOW_WRAPS (type))
559 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
560 return tem;
561 break;
562
563 case REAL_CST:
564 tem = fold_negate_const (t, type);
565 return tem;
566
567 case FIXED_CST:
568 tem = fold_negate_const (t, type);
569 return tem;
570
571 case COMPLEX_CST:
572 {
573 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
574 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
575 if (rpart && ipart)
576 return build_complex (type, rpart, ipart);
577 }
578 break;
579
580 case VECTOR_CST:
581 {
582 int count = TYPE_VECTOR_SUBPARTS (type), i;
583 tree *elts = XALLOCAVEC (tree, count);
584
585 for (i = 0; i < count; i++)
586 {
587 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
588 if (elts[i] == NULL_TREE)
589 return NULL_TREE;
590 }
591
592 return build_vector (type, elts);
593 }
594
595 case COMPLEX_EXPR:
596 if (negate_expr_p (t))
597 return fold_build2_loc (loc, COMPLEX_EXPR, type,
598 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
599 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
600 break;
601
602 case CONJ_EXPR:
603 if (negate_expr_p (t))
604 return fold_build1_loc (loc, CONJ_EXPR, type,
605 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
606 break;
607
608 case NEGATE_EXPR:
609 if (!TYPE_OVERFLOW_SANITIZED (type))
610 return TREE_OPERAND (t, 0);
611 break;
612
613 case PLUS_EXPR:
614 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
615 && !HONOR_SIGNED_ZEROS (element_mode (type)))
616 {
617 /* -(A + B) -> (-B) - A. */
618 if (negate_expr_p (TREE_OPERAND (t, 1))
619 && reorder_operands_p (TREE_OPERAND (t, 0),
620 TREE_OPERAND (t, 1)))
621 {
622 tem = negate_expr (TREE_OPERAND (t, 1));
623 return fold_build2_loc (loc, MINUS_EXPR, type,
624 tem, TREE_OPERAND (t, 0));
625 }
626
627 /* -(A + B) -> (-A) - B. */
628 if (negate_expr_p (TREE_OPERAND (t, 0)))
629 {
630 tem = negate_expr (TREE_OPERAND (t, 0));
631 return fold_build2_loc (loc, MINUS_EXPR, type,
632 tem, TREE_OPERAND (t, 1));
633 }
634 }
635 break;
636
637 case MINUS_EXPR:
638 /* - (A - B) -> B - A */
639 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
640 && !HONOR_SIGNED_ZEROS (element_mode (type))
641 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
642 return fold_build2_loc (loc, MINUS_EXPR, type,
643 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
644 break;
645
646 case MULT_EXPR:
647 if (TYPE_UNSIGNED (type))
648 break;
649
650 /* Fall through. */
651
652 case RDIV_EXPR:
653 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
654 {
655 tem = TREE_OPERAND (t, 1);
656 if (negate_expr_p (tem))
657 return fold_build2_loc (loc, TREE_CODE (t), type,
658 TREE_OPERAND (t, 0), negate_expr (tem));
659 tem = TREE_OPERAND (t, 0);
660 if (negate_expr_p (tem))
661 return fold_build2_loc (loc, TREE_CODE (t), type,
662 negate_expr (tem), TREE_OPERAND (t, 1));
663 }
664 break;
665
666 case TRUNC_DIV_EXPR:
667 case ROUND_DIV_EXPR:
668 case EXACT_DIV_EXPR:
669 if (TYPE_UNSIGNED (type))
670 break;
671 if (negate_expr_p (TREE_OPERAND (t, 0)))
672 return fold_build2_loc (loc, TREE_CODE (t), type,
673 negate_expr (TREE_OPERAND (t, 0)),
674 TREE_OPERAND (t, 1));
675 /* In general we can't negate B in A / B, because if A is INT_MIN and
676 B is 1, we may turn this into INT_MIN / -1 which is undefined
677 and actually traps on some architectures. */
678 if ((! INTEGRAL_TYPE_P (TREE_TYPE (t))
679 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
680 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
681 && ! integer_onep (TREE_OPERAND (t, 1))))
682 && negate_expr_p (TREE_OPERAND (t, 1)))
683 return fold_build2_loc (loc, TREE_CODE (t), type,
684 TREE_OPERAND (t, 0),
685 negate_expr (TREE_OPERAND (t, 1)));
686 break;
687
688 case NOP_EXPR:
689 /* Convert -((double)float) into (double)(-float). */
690 if (TREE_CODE (type) == REAL_TYPE)
691 {
692 tem = strip_float_extensions (t);
693 if (tem != t && negate_expr_p (tem))
694 return fold_convert_loc (loc, type, negate_expr (tem));
695 }
696 break;
697
698 case CALL_EXPR:
699 /* Negate -f(x) as f(-x). */
700 if (negate_mathfn_p (get_call_combined_fn (t))
701 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
702 {
703 tree fndecl, arg;
704
705 fndecl = get_callee_fndecl (t);
706 arg = negate_expr (CALL_EXPR_ARG (t, 0));
707 return build_call_expr_loc (loc, fndecl, 1, arg);
708 }
709 break;
710
711 case RSHIFT_EXPR:
712 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
713 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
714 {
715 tree op1 = TREE_OPERAND (t, 1);
716 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
717 {
718 tree ntype = TYPE_UNSIGNED (type)
719 ? signed_type_for (type)
720 : unsigned_type_for (type);
721 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
722 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
723 return fold_convert_loc (loc, type, temp);
724 }
725 }
726 break;
727
728 default:
729 break;
730 }
731
732 return NULL_TREE;
733 }
734
735 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
736 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
737 return NULL_TREE. */
738
739 static tree
740 negate_expr (tree t)
741 {
742 tree type, tem;
743 location_t loc;
744
745 if (t == NULL_TREE)
746 return NULL_TREE;
747
748 loc = EXPR_LOCATION (t);
749 type = TREE_TYPE (t);
750 STRIP_SIGN_NOPS (t);
751
752 tem = fold_negate_expr (loc, t);
753 if (!tem)
754 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
755 return fold_convert_loc (loc, type, tem);
756 }
757 \f
758 /* Split a tree IN into a constant, literal and variable parts that could be
759 combined with CODE to make IN. "constant" means an expression with
760 TREE_CONSTANT but that isn't an actual constant. CODE must be a
761 commutative arithmetic operation. Store the constant part into *CONP,
762 the literal in *LITP and return the variable part. If a part isn't
763 present, set it to null. If the tree does not decompose in this way,
764 return the entire tree as the variable part and the other parts as null.
765
766 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
767 case, we negate an operand that was subtracted. Except if it is a
768 literal for which we use *MINUS_LITP instead.
769
770 If NEGATE_P is true, we are negating all of IN, again except a literal
771 for which we use *MINUS_LITP instead. If a variable part is of pointer
772 type, it is negated after converting to TYPE. This prevents us from
773 generating illegal MINUS pointer expression. LOC is the location of
774 the converted variable part.
775
776 If IN is itself a literal or constant, return it as appropriate.
777
778 Note that we do not guarantee that any of the three values will be the
779 same type as IN, but they will have the same signedness and mode. */
780
781 static tree
782 split_tree (location_t loc, tree in, tree type, enum tree_code code,
783 tree *conp, tree *litp, tree *minus_litp, int negate_p)
784 {
785 tree var = 0;
786
787 *conp = 0;
788 *litp = 0;
789 *minus_litp = 0;
790
791 /* Strip any conversions that don't change the machine mode or signedness. */
792 STRIP_SIGN_NOPS (in);
793
794 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
795 || TREE_CODE (in) == FIXED_CST)
796 *litp = in;
797 else if (TREE_CODE (in) == code
798 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
799 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
800 /* We can associate addition and subtraction together (even
801 though the C standard doesn't say so) for integers because
802 the value is not affected. For reals, the value might be
803 affected, so we can't. */
804 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
805 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
806 {
807 tree op0 = TREE_OPERAND (in, 0);
808 tree op1 = TREE_OPERAND (in, 1);
809 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
810 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
811
812 /* First see if either of the operands is a literal, then a constant. */
813 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
814 || TREE_CODE (op0) == FIXED_CST)
815 *litp = op0, op0 = 0;
816 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
817 || TREE_CODE (op1) == FIXED_CST)
818 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
819
820 if (op0 != 0 && TREE_CONSTANT (op0))
821 *conp = op0, op0 = 0;
822 else if (op1 != 0 && TREE_CONSTANT (op1))
823 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
824
825 /* If we haven't dealt with either operand, this is not a case we can
826 decompose. Otherwise, VAR is either of the ones remaining, if any. */
827 if (op0 != 0 && op1 != 0)
828 var = in;
829 else if (op0 != 0)
830 var = op0;
831 else
832 var = op1, neg_var_p = neg1_p;
833
834 /* Now do any needed negations. */
835 if (neg_litp_p)
836 *minus_litp = *litp, *litp = 0;
837 if (neg_conp_p)
838 *conp = negate_expr (*conp);
839 if (neg_var_p)
840 {
841 /* Convert to TYPE before negating a pointer type expr. */
842 if (var && POINTER_TYPE_P (TREE_TYPE (var)))
843 var = fold_convert_loc (loc, type, var);
844 var = negate_expr (var);
845 }
846 }
847 else if (TREE_CODE (in) == BIT_NOT_EXPR
848 && code == PLUS_EXPR)
849 {
850 /* -X - 1 is folded to ~X, undo that here. */
851 *minus_litp = build_one_cst (TREE_TYPE (in));
852 var = negate_expr (TREE_OPERAND (in, 0));
853 }
854 else if (TREE_CONSTANT (in))
855 *conp = in;
856 else
857 var = in;
858
859 if (negate_p)
860 {
861 if (*litp)
862 *minus_litp = *litp, *litp = 0;
863 else if (*minus_litp)
864 *litp = *minus_litp, *minus_litp = 0;
865 *conp = negate_expr (*conp);
866 /* Convert to TYPE before negating a pointer type expr. */
867 if (var && POINTER_TYPE_P (TREE_TYPE (var)))
868 var = fold_convert_loc (loc, type, var);
869 var = negate_expr (var);
870 }
871
872 return var;
873 }
874
875 /* Re-associate trees split by the above function. T1 and T2 are
876 either expressions to associate or null. Return the new
877 expression, if any. LOC is the location of the new expression. If
878 we build an operation, do it in TYPE and with CODE. */
879
880 static tree
881 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
882 {
883 if (t1 == 0)
884 return t2;
885 else if (t2 == 0)
886 return t1;
887
888 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
889 try to fold this since we will have infinite recursion. But do
890 deal with any NEGATE_EXPRs. */
891 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
892 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
893 {
894 if (code == PLUS_EXPR)
895 {
896 if (TREE_CODE (t1) == NEGATE_EXPR)
897 return build2_loc (loc, MINUS_EXPR, type,
898 fold_convert_loc (loc, type, t2),
899 fold_convert_loc (loc, type,
900 TREE_OPERAND (t1, 0)));
901 else if (TREE_CODE (t2) == NEGATE_EXPR)
902 return build2_loc (loc, MINUS_EXPR, type,
903 fold_convert_loc (loc, type, t1),
904 fold_convert_loc (loc, type,
905 TREE_OPERAND (t2, 0)));
906 else if (integer_zerop (t2))
907 return fold_convert_loc (loc, type, t1);
908 }
909 else if (code == MINUS_EXPR)
910 {
911 if (integer_zerop (t2))
912 return fold_convert_loc (loc, type, t1);
913 }
914
915 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
916 fold_convert_loc (loc, type, t2));
917 }
918
919 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
920 fold_convert_loc (loc, type, t2));
921 }
922 \f
923 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
924 for use in int_const_binop, size_binop and size_diffop. */
925
926 static bool
927 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
928 {
929 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
930 return false;
931 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
932 return false;
933
934 switch (code)
935 {
936 case LSHIFT_EXPR:
937 case RSHIFT_EXPR:
938 case LROTATE_EXPR:
939 case RROTATE_EXPR:
940 return true;
941
942 default:
943 break;
944 }
945
946 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
947 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
948 && TYPE_MODE (type1) == TYPE_MODE (type2);
949 }
950
951
952 /* Combine two integer constants ARG1 and ARG2 under operation CODE
953 to produce a new constant. Return NULL_TREE if we don't know how
954 to evaluate CODE at compile-time. */
955
956 static tree
957 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree parg2,
958 int overflowable)
959 {
960 wide_int res;
961 tree t;
962 tree type = TREE_TYPE (arg1);
963 signop sign = TYPE_SIGN (type);
964 bool overflow = false;
965
966 wide_int arg2 = wide_int::from (parg2, TYPE_PRECISION (type),
967 TYPE_SIGN (TREE_TYPE (parg2)));
968
969 switch (code)
970 {
971 case BIT_IOR_EXPR:
972 res = wi::bit_or (arg1, arg2);
973 break;
974
975 case BIT_XOR_EXPR:
976 res = wi::bit_xor (arg1, arg2);
977 break;
978
979 case BIT_AND_EXPR:
980 res = wi::bit_and (arg1, arg2);
981 break;
982
983 case RSHIFT_EXPR:
984 case LSHIFT_EXPR:
985 if (wi::neg_p (arg2))
986 {
987 arg2 = -arg2;
988 if (code == RSHIFT_EXPR)
989 code = LSHIFT_EXPR;
990 else
991 code = RSHIFT_EXPR;
992 }
993
994 if (code == RSHIFT_EXPR)
995 /* It's unclear from the C standard whether shifts can overflow.
996 The following code ignores overflow; perhaps a C standard
997 interpretation ruling is needed. */
998 res = wi::rshift (arg1, arg2, sign);
999 else
1000 res = wi::lshift (arg1, arg2);
1001 break;
1002
1003 case RROTATE_EXPR:
1004 case LROTATE_EXPR:
1005 if (wi::neg_p (arg2))
1006 {
1007 arg2 = -arg2;
1008 if (code == RROTATE_EXPR)
1009 code = LROTATE_EXPR;
1010 else
1011 code = RROTATE_EXPR;
1012 }
1013
1014 if (code == RROTATE_EXPR)
1015 res = wi::rrotate (arg1, arg2);
1016 else
1017 res = wi::lrotate (arg1, arg2);
1018 break;
1019
1020 case PLUS_EXPR:
1021 res = wi::add (arg1, arg2, sign, &overflow);
1022 break;
1023
1024 case MINUS_EXPR:
1025 res = wi::sub (arg1, arg2, sign, &overflow);
1026 break;
1027
1028 case MULT_EXPR:
1029 res = wi::mul (arg1, arg2, sign, &overflow);
1030 break;
1031
1032 case MULT_HIGHPART_EXPR:
1033 res = wi::mul_high (arg1, arg2, sign);
1034 break;
1035
1036 case TRUNC_DIV_EXPR:
1037 case EXACT_DIV_EXPR:
1038 if (arg2 == 0)
1039 return NULL_TREE;
1040 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1041 break;
1042
1043 case FLOOR_DIV_EXPR:
1044 if (arg2 == 0)
1045 return NULL_TREE;
1046 res = wi::div_floor (arg1, arg2, sign, &overflow);
1047 break;
1048
1049 case CEIL_DIV_EXPR:
1050 if (arg2 == 0)
1051 return NULL_TREE;
1052 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1053 break;
1054
1055 case ROUND_DIV_EXPR:
1056 if (arg2 == 0)
1057 return NULL_TREE;
1058 res = wi::div_round (arg1, arg2, sign, &overflow);
1059 break;
1060
1061 case TRUNC_MOD_EXPR:
1062 if (arg2 == 0)
1063 return NULL_TREE;
1064 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1065 break;
1066
1067 case FLOOR_MOD_EXPR:
1068 if (arg2 == 0)
1069 return NULL_TREE;
1070 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1071 break;
1072
1073 case CEIL_MOD_EXPR:
1074 if (arg2 == 0)
1075 return NULL_TREE;
1076 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1077 break;
1078
1079 case ROUND_MOD_EXPR:
1080 if (arg2 == 0)
1081 return NULL_TREE;
1082 res = wi::mod_round (arg1, arg2, sign, &overflow);
1083 break;
1084
1085 case MIN_EXPR:
1086 res = wi::min (arg1, arg2, sign);
1087 break;
1088
1089 case MAX_EXPR:
1090 res = wi::max (arg1, arg2, sign);
1091 break;
1092
1093 default:
1094 return NULL_TREE;
1095 }
1096
1097 t = force_fit_type (type, res, overflowable,
1098 (((sign == SIGNED || overflowable == -1)
1099 && overflow)
1100 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (parg2)));
1101
1102 return t;
1103 }
1104
1105 tree
1106 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1107 {
1108 return int_const_binop_1 (code, arg1, arg2, 1);
1109 }
1110
1111 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1112 constant. We assume ARG1 and ARG2 have the same data type, or at least
1113 are the same kind of constant and the same machine mode. Return zero if
1114 combining the constants is not allowed in the current operating mode. */
1115
1116 static tree
1117 const_binop (enum tree_code code, tree arg1, tree arg2)
1118 {
1119 /* Sanity check for the recursive cases. */
1120 if (!arg1 || !arg2)
1121 return NULL_TREE;
1122
1123 STRIP_NOPS (arg1);
1124 STRIP_NOPS (arg2);
1125
1126 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1127 {
1128 if (code == POINTER_PLUS_EXPR)
1129 return int_const_binop (PLUS_EXPR,
1130 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1131
1132 return int_const_binop (code, arg1, arg2);
1133 }
1134
1135 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1136 {
1137 machine_mode mode;
1138 REAL_VALUE_TYPE d1;
1139 REAL_VALUE_TYPE d2;
1140 REAL_VALUE_TYPE value;
1141 REAL_VALUE_TYPE result;
1142 bool inexact;
1143 tree t, type;
1144
1145 /* The following codes are handled by real_arithmetic. */
1146 switch (code)
1147 {
1148 case PLUS_EXPR:
1149 case MINUS_EXPR:
1150 case MULT_EXPR:
1151 case RDIV_EXPR:
1152 case MIN_EXPR:
1153 case MAX_EXPR:
1154 break;
1155
1156 default:
1157 return NULL_TREE;
1158 }
1159
1160 d1 = TREE_REAL_CST (arg1);
1161 d2 = TREE_REAL_CST (arg2);
1162
1163 type = TREE_TYPE (arg1);
1164 mode = TYPE_MODE (type);
1165
1166 /* Don't perform operation if we honor signaling NaNs and
1167 either operand is a signaling NaN. */
1168 if (HONOR_SNANS (mode)
1169 && (REAL_VALUE_ISSIGNALING_NAN (d1)
1170 || REAL_VALUE_ISSIGNALING_NAN (d2)))
1171 return NULL_TREE;
1172
1173 /* Don't perform operation if it would raise a division
1174 by zero exception. */
1175 if (code == RDIV_EXPR
1176 && real_equal (&d2, &dconst0)
1177 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1178 return NULL_TREE;
1179
1180 /* If either operand is a NaN, just return it. Otherwise, set up
1181 for floating-point trap; we return an overflow. */
1182 if (REAL_VALUE_ISNAN (d1))
1183 {
1184 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1185 is off. */
1186 d1.signalling = 0;
1187 t = build_real (type, d1);
1188 return t;
1189 }
1190 else if (REAL_VALUE_ISNAN (d2))
1191 {
1192 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1193 is off. */
1194 d2.signalling = 0;
1195 t = build_real (type, d2);
1196 return t;
1197 }
1198
1199 inexact = real_arithmetic (&value, code, &d1, &d2);
1200 real_convert (&result, mode, &value);
1201
1202 /* Don't constant fold this floating point operation if
1203 the result has overflowed and flag_trapping_math. */
1204 if (flag_trapping_math
1205 && MODE_HAS_INFINITIES (mode)
1206 && REAL_VALUE_ISINF (result)
1207 && !REAL_VALUE_ISINF (d1)
1208 && !REAL_VALUE_ISINF (d2))
1209 return NULL_TREE;
1210
1211 /* Don't constant fold this floating point operation if the
1212 result may dependent upon the run-time rounding mode and
1213 flag_rounding_math is set, or if GCC's software emulation
1214 is unable to accurately represent the result. */
1215 if ((flag_rounding_math
1216 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1217 && (inexact || !real_identical (&result, &value)))
1218 return NULL_TREE;
1219
1220 t = build_real (type, result);
1221
1222 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1223 return t;
1224 }
1225
1226 if (TREE_CODE (arg1) == FIXED_CST)
1227 {
1228 FIXED_VALUE_TYPE f1;
1229 FIXED_VALUE_TYPE f2;
1230 FIXED_VALUE_TYPE result;
1231 tree t, type;
1232 int sat_p;
1233 bool overflow_p;
1234
1235 /* The following codes are handled by fixed_arithmetic. */
1236 switch (code)
1237 {
1238 case PLUS_EXPR:
1239 case MINUS_EXPR:
1240 case MULT_EXPR:
1241 case TRUNC_DIV_EXPR:
1242 if (TREE_CODE (arg2) != FIXED_CST)
1243 return NULL_TREE;
1244 f2 = TREE_FIXED_CST (arg2);
1245 break;
1246
1247 case LSHIFT_EXPR:
1248 case RSHIFT_EXPR:
1249 {
1250 if (TREE_CODE (arg2) != INTEGER_CST)
1251 return NULL_TREE;
1252 wide_int w2 = arg2;
1253 f2.data.high = w2.elt (1);
1254 f2.data.low = w2.elt (0);
1255 f2.mode = SImode;
1256 }
1257 break;
1258
1259 default:
1260 return NULL_TREE;
1261 }
1262
1263 f1 = TREE_FIXED_CST (arg1);
1264 type = TREE_TYPE (arg1);
1265 sat_p = TYPE_SATURATING (type);
1266 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1267 t = build_fixed (type, result);
1268 /* Propagate overflow flags. */
1269 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1270 TREE_OVERFLOW (t) = 1;
1271 return t;
1272 }
1273
1274 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1275 {
1276 tree type = TREE_TYPE (arg1);
1277 tree r1 = TREE_REALPART (arg1);
1278 tree i1 = TREE_IMAGPART (arg1);
1279 tree r2 = TREE_REALPART (arg2);
1280 tree i2 = TREE_IMAGPART (arg2);
1281 tree real, imag;
1282
1283 switch (code)
1284 {
1285 case PLUS_EXPR:
1286 case MINUS_EXPR:
1287 real = const_binop (code, r1, r2);
1288 imag = const_binop (code, i1, i2);
1289 break;
1290
1291 case MULT_EXPR:
1292 if (COMPLEX_FLOAT_TYPE_P (type))
1293 return do_mpc_arg2 (arg1, arg2, type,
1294 /* do_nonfinite= */ folding_initializer,
1295 mpc_mul);
1296
1297 real = const_binop (MINUS_EXPR,
1298 const_binop (MULT_EXPR, r1, r2),
1299 const_binop (MULT_EXPR, i1, i2));
1300 imag = const_binop (PLUS_EXPR,
1301 const_binop (MULT_EXPR, r1, i2),
1302 const_binop (MULT_EXPR, i1, r2));
1303 break;
1304
1305 case RDIV_EXPR:
1306 if (COMPLEX_FLOAT_TYPE_P (type))
1307 return do_mpc_arg2 (arg1, arg2, type,
1308 /* do_nonfinite= */ folding_initializer,
1309 mpc_div);
1310 /* Fallthru ... */
1311 case TRUNC_DIV_EXPR:
1312 case CEIL_DIV_EXPR:
1313 case FLOOR_DIV_EXPR:
1314 case ROUND_DIV_EXPR:
1315 if (flag_complex_method == 0)
1316 {
1317 /* Keep this algorithm in sync with
1318 tree-complex.c:expand_complex_div_straight().
1319
1320 Expand complex division to scalars, straightforward algorithm.
1321 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1322 t = br*br + bi*bi
1323 */
1324 tree magsquared
1325 = const_binop (PLUS_EXPR,
1326 const_binop (MULT_EXPR, r2, r2),
1327 const_binop (MULT_EXPR, i2, i2));
1328 tree t1
1329 = const_binop (PLUS_EXPR,
1330 const_binop (MULT_EXPR, r1, r2),
1331 const_binop (MULT_EXPR, i1, i2));
1332 tree t2
1333 = const_binop (MINUS_EXPR,
1334 const_binop (MULT_EXPR, i1, r2),
1335 const_binop (MULT_EXPR, r1, i2));
1336
1337 real = const_binop (code, t1, magsquared);
1338 imag = const_binop (code, t2, magsquared);
1339 }
1340 else
1341 {
1342 /* Keep this algorithm in sync with
1343 tree-complex.c:expand_complex_div_wide().
1344
1345 Expand complex division to scalars, modified algorithm to minimize
1346 overflow with wide input ranges. */
1347 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1348 fold_abs_const (r2, TREE_TYPE (type)),
1349 fold_abs_const (i2, TREE_TYPE (type)));
1350
1351 if (integer_nonzerop (compare))
1352 {
1353 /* In the TRUE branch, we compute
1354 ratio = br/bi;
1355 div = (br * ratio) + bi;
1356 tr = (ar * ratio) + ai;
1357 ti = (ai * ratio) - ar;
1358 tr = tr / div;
1359 ti = ti / div; */
1360 tree ratio = const_binop (code, r2, i2);
1361 tree div = const_binop (PLUS_EXPR, i2,
1362 const_binop (MULT_EXPR, r2, ratio));
1363 real = const_binop (MULT_EXPR, r1, ratio);
1364 real = const_binop (PLUS_EXPR, real, i1);
1365 real = const_binop (code, real, div);
1366
1367 imag = const_binop (MULT_EXPR, i1, ratio);
1368 imag = const_binop (MINUS_EXPR, imag, r1);
1369 imag = const_binop (code, imag, div);
1370 }
1371 else
1372 {
1373 /* In the FALSE branch, we compute
1374 ratio = d/c;
1375 divisor = (d * ratio) + c;
1376 tr = (b * ratio) + a;
1377 ti = b - (a * ratio);
1378 tr = tr / div;
1379 ti = ti / div; */
1380 tree ratio = const_binop (code, i2, r2);
1381 tree div = const_binop (PLUS_EXPR, r2,
1382 const_binop (MULT_EXPR, i2, ratio));
1383
1384 real = const_binop (MULT_EXPR, i1, ratio);
1385 real = const_binop (PLUS_EXPR, real, r1);
1386 real = const_binop (code, real, div);
1387
1388 imag = const_binop (MULT_EXPR, r1, ratio);
1389 imag = const_binop (MINUS_EXPR, i1, imag);
1390 imag = const_binop (code, imag, div);
1391 }
1392 }
1393 break;
1394
1395 default:
1396 return NULL_TREE;
1397 }
1398
1399 if (real && imag)
1400 return build_complex (type, real, imag);
1401 }
1402
1403 if (TREE_CODE (arg1) == VECTOR_CST
1404 && TREE_CODE (arg2) == VECTOR_CST)
1405 {
1406 tree type = TREE_TYPE (arg1);
1407 int count = TYPE_VECTOR_SUBPARTS (type), i;
1408 tree *elts = XALLOCAVEC (tree, count);
1409
1410 for (i = 0; i < count; i++)
1411 {
1412 tree elem1 = VECTOR_CST_ELT (arg1, i);
1413 tree elem2 = VECTOR_CST_ELT (arg2, i);
1414
1415 elts[i] = const_binop (code, elem1, elem2);
1416
1417 /* It is possible that const_binop cannot handle the given
1418 code and return NULL_TREE */
1419 if (elts[i] == NULL_TREE)
1420 return NULL_TREE;
1421 }
1422
1423 return build_vector (type, elts);
1424 }
1425
1426 /* Shifts allow a scalar offset for a vector. */
1427 if (TREE_CODE (arg1) == VECTOR_CST
1428 && TREE_CODE (arg2) == INTEGER_CST)
1429 {
1430 tree type = TREE_TYPE (arg1);
1431 int count = TYPE_VECTOR_SUBPARTS (type), i;
1432 tree *elts = XALLOCAVEC (tree, count);
1433
1434 for (i = 0; i < count; i++)
1435 {
1436 tree elem1 = VECTOR_CST_ELT (arg1, i);
1437
1438 elts[i] = const_binop (code, elem1, arg2);
1439
1440 /* It is possible that const_binop cannot handle the given
1441 code and return NULL_TREE. */
1442 if (elts[i] == NULL_TREE)
1443 return NULL_TREE;
1444 }
1445
1446 return build_vector (type, elts);
1447 }
1448 return NULL_TREE;
1449 }
1450
1451 /* Overload that adds a TYPE parameter to be able to dispatch
1452 to fold_relational_const. */
1453
1454 tree
1455 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1456 {
1457 if (TREE_CODE_CLASS (code) == tcc_comparison)
1458 return fold_relational_const (code, type, arg1, arg2);
1459
1460 /* ??? Until we make the const_binop worker take the type of the
1461 result as argument put those cases that need it here. */
1462 switch (code)
1463 {
1464 case COMPLEX_EXPR:
1465 if ((TREE_CODE (arg1) == REAL_CST
1466 && TREE_CODE (arg2) == REAL_CST)
1467 || (TREE_CODE (arg1) == INTEGER_CST
1468 && TREE_CODE (arg2) == INTEGER_CST))
1469 return build_complex (type, arg1, arg2);
1470 return NULL_TREE;
1471
1472 case VEC_PACK_TRUNC_EXPR:
1473 case VEC_PACK_FIX_TRUNC_EXPR:
1474 {
1475 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1476 tree *elts;
1477
1478 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2
1479 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts / 2);
1480 if (TREE_CODE (arg1) != VECTOR_CST
1481 || TREE_CODE (arg2) != VECTOR_CST)
1482 return NULL_TREE;
1483
1484 elts = XALLOCAVEC (tree, nelts);
1485 if (!vec_cst_ctor_to_array (arg1, elts)
1486 || !vec_cst_ctor_to_array (arg2, elts + nelts / 2))
1487 return NULL_TREE;
1488
1489 for (i = 0; i < nelts; i++)
1490 {
1491 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1492 ? NOP_EXPR : FIX_TRUNC_EXPR,
1493 TREE_TYPE (type), elts[i]);
1494 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1495 return NULL_TREE;
1496 }
1497
1498 return build_vector (type, elts);
1499 }
1500
1501 case VEC_WIDEN_MULT_LO_EXPR:
1502 case VEC_WIDEN_MULT_HI_EXPR:
1503 case VEC_WIDEN_MULT_EVEN_EXPR:
1504 case VEC_WIDEN_MULT_ODD_EXPR:
1505 {
1506 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
1507 unsigned int out, ofs, scale;
1508 tree *elts;
1509
1510 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2
1511 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts * 2);
1512 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1513 return NULL_TREE;
1514
1515 elts = XALLOCAVEC (tree, nelts * 4);
1516 if (!vec_cst_ctor_to_array (arg1, elts)
1517 || !vec_cst_ctor_to_array (arg2, elts + nelts * 2))
1518 return NULL_TREE;
1519
1520 if (code == VEC_WIDEN_MULT_LO_EXPR)
1521 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
1522 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1523 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
1524 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1525 scale = 1, ofs = 0;
1526 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1527 scale = 1, ofs = 1;
1528
1529 for (out = 0; out < nelts; out++)
1530 {
1531 unsigned int in1 = (out << scale) + ofs;
1532 unsigned int in2 = in1 + nelts * 2;
1533 tree t1, t2;
1534
1535 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
1536 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
1537
1538 if (t1 == NULL_TREE || t2 == NULL_TREE)
1539 return NULL_TREE;
1540 elts[out] = const_binop (MULT_EXPR, t1, t2);
1541 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
1542 return NULL_TREE;
1543 }
1544
1545 return build_vector (type, elts);
1546 }
1547
1548 default:;
1549 }
1550
1551 if (TREE_CODE_CLASS (code) != tcc_binary)
1552 return NULL_TREE;
1553
1554 /* Make sure type and arg0 have the same saturating flag. */
1555 gcc_checking_assert (TYPE_SATURATING (type)
1556 == TYPE_SATURATING (TREE_TYPE (arg1)));
1557
1558 return const_binop (code, arg1, arg2);
1559 }
1560
1561 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1562 Return zero if computing the constants is not possible. */
1563
1564 tree
1565 const_unop (enum tree_code code, tree type, tree arg0)
1566 {
1567 /* Don't perform the operation, other than NEGATE and ABS, if
1568 flag_signaling_nans is on and the operand is a signaling NaN. */
1569 if (TREE_CODE (arg0) == REAL_CST
1570 && HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
1571 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0))
1572 && code != NEGATE_EXPR
1573 && code != ABS_EXPR)
1574 return NULL_TREE;
1575
1576 switch (code)
1577 {
1578 CASE_CONVERT:
1579 case FLOAT_EXPR:
1580 case FIX_TRUNC_EXPR:
1581 case FIXED_CONVERT_EXPR:
1582 return fold_convert_const (code, type, arg0);
1583
1584 case ADDR_SPACE_CONVERT_EXPR:
1585 /* If the source address is 0, and the source address space
1586 cannot have a valid object at 0, fold to dest type null. */
1587 if (integer_zerop (arg0)
1588 && !(targetm.addr_space.zero_address_valid
1589 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))))))
1590 return fold_convert_const (code, type, arg0);
1591 break;
1592
1593 case VIEW_CONVERT_EXPR:
1594 return fold_view_convert_expr (type, arg0);
1595
1596 case NEGATE_EXPR:
1597 {
1598 /* Can't call fold_negate_const directly here as that doesn't
1599 handle all cases and we might not be able to negate some
1600 constants. */
1601 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1602 if (tem && CONSTANT_CLASS_P (tem))
1603 return tem;
1604 break;
1605 }
1606
1607 case ABS_EXPR:
1608 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1609 return fold_abs_const (arg0, type);
1610 break;
1611
1612 case CONJ_EXPR:
1613 if (TREE_CODE (arg0) == COMPLEX_CST)
1614 {
1615 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1616 TREE_TYPE (type));
1617 return build_complex (type, TREE_REALPART (arg0), ipart);
1618 }
1619 break;
1620
1621 case BIT_NOT_EXPR:
1622 if (TREE_CODE (arg0) == INTEGER_CST)
1623 return fold_not_const (arg0, type);
1624 /* Perform BIT_NOT_EXPR on each element individually. */
1625 else if (TREE_CODE (arg0) == VECTOR_CST)
1626 {
1627 tree *elements;
1628 tree elem;
1629 unsigned count = VECTOR_CST_NELTS (arg0), i;
1630
1631 elements = XALLOCAVEC (tree, count);
1632 for (i = 0; i < count; i++)
1633 {
1634 elem = VECTOR_CST_ELT (arg0, i);
1635 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1636 if (elem == NULL_TREE)
1637 break;
1638 elements[i] = elem;
1639 }
1640 if (i == count)
1641 return build_vector (type, elements);
1642 }
1643 break;
1644
1645 case TRUTH_NOT_EXPR:
1646 if (TREE_CODE (arg0) == INTEGER_CST)
1647 return constant_boolean_node (integer_zerop (arg0), type);
1648 break;
1649
1650 case REALPART_EXPR:
1651 if (TREE_CODE (arg0) == COMPLEX_CST)
1652 return fold_convert (type, TREE_REALPART (arg0));
1653 break;
1654
1655 case IMAGPART_EXPR:
1656 if (TREE_CODE (arg0) == COMPLEX_CST)
1657 return fold_convert (type, TREE_IMAGPART (arg0));
1658 break;
1659
1660 case VEC_UNPACK_LO_EXPR:
1661 case VEC_UNPACK_HI_EXPR:
1662 case VEC_UNPACK_FLOAT_LO_EXPR:
1663 case VEC_UNPACK_FLOAT_HI_EXPR:
1664 {
1665 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1666 tree *elts;
1667 enum tree_code subcode;
1668
1669 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
1670 if (TREE_CODE (arg0) != VECTOR_CST)
1671 return NULL_TREE;
1672
1673 elts = XALLOCAVEC (tree, nelts * 2);
1674 if (!vec_cst_ctor_to_array (arg0, elts))
1675 return NULL_TREE;
1676
1677 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1678 || code == VEC_UNPACK_FLOAT_LO_EXPR))
1679 elts += nelts;
1680
1681 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1682 subcode = NOP_EXPR;
1683 else
1684 subcode = FLOAT_EXPR;
1685
1686 for (i = 0; i < nelts; i++)
1687 {
1688 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
1689 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1690 return NULL_TREE;
1691 }
1692
1693 return build_vector (type, elts);
1694 }
1695
1696 case REDUC_MIN_EXPR:
1697 case REDUC_MAX_EXPR:
1698 case REDUC_PLUS_EXPR:
1699 {
1700 unsigned int nelts, i;
1701 tree *elts;
1702 enum tree_code subcode;
1703
1704 if (TREE_CODE (arg0) != VECTOR_CST)
1705 return NULL_TREE;
1706 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
1707
1708 elts = XALLOCAVEC (tree, nelts);
1709 if (!vec_cst_ctor_to_array (arg0, elts))
1710 return NULL_TREE;
1711
1712 switch (code)
1713 {
1714 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
1715 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
1716 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
1717 default: gcc_unreachable ();
1718 }
1719
1720 for (i = 1; i < nelts; i++)
1721 {
1722 elts[0] = const_binop (subcode, elts[0], elts[i]);
1723 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
1724 return NULL_TREE;
1725 }
1726
1727 return elts[0];
1728 }
1729
1730 default:
1731 break;
1732 }
1733
1734 return NULL_TREE;
1735 }
1736
1737 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1738 indicates which particular sizetype to create. */
1739
1740 tree
1741 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1742 {
1743 return build_int_cst (sizetype_tab[(int) kind], number);
1744 }
1745 \f
1746 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1747 is a tree code. The type of the result is taken from the operands.
1748 Both must be equivalent integer types, ala int_binop_types_match_p.
1749 If the operands are constant, so is the result. */
1750
1751 tree
1752 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1753 {
1754 tree type = TREE_TYPE (arg0);
1755
1756 if (arg0 == error_mark_node || arg1 == error_mark_node)
1757 return error_mark_node;
1758
1759 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1760 TREE_TYPE (arg1)));
1761
1762 /* Handle the special case of two integer constants faster. */
1763 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1764 {
1765 /* And some specific cases even faster than that. */
1766 if (code == PLUS_EXPR)
1767 {
1768 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1769 return arg1;
1770 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1771 return arg0;
1772 }
1773 else if (code == MINUS_EXPR)
1774 {
1775 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1776 return arg0;
1777 }
1778 else if (code == MULT_EXPR)
1779 {
1780 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1781 return arg1;
1782 }
1783
1784 /* Handle general case of two integer constants. For sizetype
1785 constant calculations we always want to know about overflow,
1786 even in the unsigned case. */
1787 return int_const_binop_1 (code, arg0, arg1, -1);
1788 }
1789
1790 return fold_build2_loc (loc, code, type, arg0, arg1);
1791 }
1792
1793 /* Given two values, either both of sizetype or both of bitsizetype,
1794 compute the difference between the two values. Return the value
1795 in signed type corresponding to the type of the operands. */
1796
1797 tree
1798 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1799 {
1800 tree type = TREE_TYPE (arg0);
1801 tree ctype;
1802
1803 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1804 TREE_TYPE (arg1)));
1805
1806 /* If the type is already signed, just do the simple thing. */
1807 if (!TYPE_UNSIGNED (type))
1808 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1809
1810 if (type == sizetype)
1811 ctype = ssizetype;
1812 else if (type == bitsizetype)
1813 ctype = sbitsizetype;
1814 else
1815 ctype = signed_type_for (type);
1816
1817 /* If either operand is not a constant, do the conversions to the signed
1818 type and subtract. The hardware will do the right thing with any
1819 overflow in the subtraction. */
1820 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1821 return size_binop_loc (loc, MINUS_EXPR,
1822 fold_convert_loc (loc, ctype, arg0),
1823 fold_convert_loc (loc, ctype, arg1));
1824
1825 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1826 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1827 overflow) and negate (which can't either). Special-case a result
1828 of zero while we're here. */
1829 if (tree_int_cst_equal (arg0, arg1))
1830 return build_int_cst (ctype, 0);
1831 else if (tree_int_cst_lt (arg1, arg0))
1832 return fold_convert_loc (loc, ctype,
1833 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1834 else
1835 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1836 fold_convert_loc (loc, ctype,
1837 size_binop_loc (loc,
1838 MINUS_EXPR,
1839 arg1, arg0)));
1840 }
1841 \f
1842 /* A subroutine of fold_convert_const handling conversions of an
1843 INTEGER_CST to another integer type. */
1844
1845 static tree
1846 fold_convert_const_int_from_int (tree type, const_tree arg1)
1847 {
1848 /* Given an integer constant, make new constant with new type,
1849 appropriately sign-extended or truncated. Use widest_int
1850 so that any extension is done according ARG1's type. */
1851 return force_fit_type (type, wi::to_widest (arg1),
1852 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1853 TREE_OVERFLOW (arg1));
1854 }
1855
1856 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1857 to an integer type. */
1858
1859 static tree
1860 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1861 {
1862 bool overflow = false;
1863 tree t;
1864
1865 /* The following code implements the floating point to integer
1866 conversion rules required by the Java Language Specification,
1867 that IEEE NaNs are mapped to zero and values that overflow
1868 the target precision saturate, i.e. values greater than
1869 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1870 are mapped to INT_MIN. These semantics are allowed by the
1871 C and C++ standards that simply state that the behavior of
1872 FP-to-integer conversion is unspecified upon overflow. */
1873
1874 wide_int val;
1875 REAL_VALUE_TYPE r;
1876 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1877
1878 switch (code)
1879 {
1880 case FIX_TRUNC_EXPR:
1881 real_trunc (&r, VOIDmode, &x);
1882 break;
1883
1884 default:
1885 gcc_unreachable ();
1886 }
1887
1888 /* If R is NaN, return zero and show we have an overflow. */
1889 if (REAL_VALUE_ISNAN (r))
1890 {
1891 overflow = true;
1892 val = wi::zero (TYPE_PRECISION (type));
1893 }
1894
1895 /* See if R is less than the lower bound or greater than the
1896 upper bound. */
1897
1898 if (! overflow)
1899 {
1900 tree lt = TYPE_MIN_VALUE (type);
1901 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1902 if (real_less (&r, &l))
1903 {
1904 overflow = true;
1905 val = lt;
1906 }
1907 }
1908
1909 if (! overflow)
1910 {
1911 tree ut = TYPE_MAX_VALUE (type);
1912 if (ut)
1913 {
1914 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1915 if (real_less (&u, &r))
1916 {
1917 overflow = true;
1918 val = ut;
1919 }
1920 }
1921 }
1922
1923 if (! overflow)
1924 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1925
1926 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1927 return t;
1928 }
1929
1930 /* A subroutine of fold_convert_const handling conversions of a
1931 FIXED_CST to an integer type. */
1932
1933 static tree
1934 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1935 {
1936 tree t;
1937 double_int temp, temp_trunc;
1938 unsigned int mode;
1939
1940 /* Right shift FIXED_CST to temp by fbit. */
1941 temp = TREE_FIXED_CST (arg1).data;
1942 mode = TREE_FIXED_CST (arg1).mode;
1943 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1944 {
1945 temp = temp.rshift (GET_MODE_FBIT (mode),
1946 HOST_BITS_PER_DOUBLE_INT,
1947 SIGNED_FIXED_POINT_MODE_P (mode));
1948
1949 /* Left shift temp to temp_trunc by fbit. */
1950 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1951 HOST_BITS_PER_DOUBLE_INT,
1952 SIGNED_FIXED_POINT_MODE_P (mode));
1953 }
1954 else
1955 {
1956 temp = double_int_zero;
1957 temp_trunc = double_int_zero;
1958 }
1959
1960 /* If FIXED_CST is negative, we need to round the value toward 0.
1961 By checking if the fractional bits are not zero to add 1 to temp. */
1962 if (SIGNED_FIXED_POINT_MODE_P (mode)
1963 && temp_trunc.is_negative ()
1964 && TREE_FIXED_CST (arg1).data != temp_trunc)
1965 temp += double_int_one;
1966
1967 /* Given a fixed-point constant, make new constant with new type,
1968 appropriately sign-extended or truncated. */
1969 t = force_fit_type (type, temp, -1,
1970 (temp.is_negative ()
1971 && (TYPE_UNSIGNED (type)
1972 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1973 | TREE_OVERFLOW (arg1));
1974
1975 return t;
1976 }
1977
1978 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1979 to another floating point type. */
1980
1981 static tree
1982 fold_convert_const_real_from_real (tree type, const_tree arg1)
1983 {
1984 REAL_VALUE_TYPE value;
1985 tree t;
1986
1987 /* Don't perform the operation if flag_signaling_nans is on
1988 and the operand is a signaling NaN. */
1989 if (HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
1990 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1)))
1991 return NULL_TREE;
1992
1993 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1994 t = build_real (type, value);
1995
1996 /* If converting an infinity or NAN to a representation that doesn't
1997 have one, set the overflow bit so that we can produce some kind of
1998 error message at the appropriate point if necessary. It's not the
1999 most user-friendly message, but it's better than nothing. */
2000 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2001 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2002 TREE_OVERFLOW (t) = 1;
2003 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2004 && !MODE_HAS_NANS (TYPE_MODE (type)))
2005 TREE_OVERFLOW (t) = 1;
2006 /* Regular overflow, conversion produced an infinity in a mode that
2007 can't represent them. */
2008 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2009 && REAL_VALUE_ISINF (value)
2010 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2011 TREE_OVERFLOW (t) = 1;
2012 else
2013 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2014 return t;
2015 }
2016
2017 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2018 to a floating point type. */
2019
2020 static tree
2021 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2022 {
2023 REAL_VALUE_TYPE value;
2024 tree t;
2025
2026 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2027 t = build_real (type, value);
2028
2029 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2030 return t;
2031 }
2032
2033 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2034 to another fixed-point type. */
2035
2036 static tree
2037 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2038 {
2039 FIXED_VALUE_TYPE value;
2040 tree t;
2041 bool overflow_p;
2042
2043 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2044 TYPE_SATURATING (type));
2045 t = build_fixed (type, value);
2046
2047 /* Propagate overflow flags. */
2048 if (overflow_p | TREE_OVERFLOW (arg1))
2049 TREE_OVERFLOW (t) = 1;
2050 return t;
2051 }
2052
2053 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2054 to a fixed-point type. */
2055
2056 static tree
2057 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2058 {
2059 FIXED_VALUE_TYPE value;
2060 tree t;
2061 bool overflow_p;
2062 double_int di;
2063
2064 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2065
2066 di.low = TREE_INT_CST_ELT (arg1, 0);
2067 if (TREE_INT_CST_NUNITS (arg1) == 1)
2068 di.high = (HOST_WIDE_INT) di.low < 0 ? (HOST_WIDE_INT) -1 : 0;
2069 else
2070 di.high = TREE_INT_CST_ELT (arg1, 1);
2071
2072 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di,
2073 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2074 TYPE_SATURATING (type));
2075 t = build_fixed (type, value);
2076
2077 /* Propagate overflow flags. */
2078 if (overflow_p | TREE_OVERFLOW (arg1))
2079 TREE_OVERFLOW (t) = 1;
2080 return t;
2081 }
2082
2083 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2084 to a fixed-point type. */
2085
2086 static tree
2087 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2088 {
2089 FIXED_VALUE_TYPE value;
2090 tree t;
2091 bool overflow_p;
2092
2093 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2094 &TREE_REAL_CST (arg1),
2095 TYPE_SATURATING (type));
2096 t = build_fixed (type, value);
2097
2098 /* Propagate overflow flags. */
2099 if (overflow_p | TREE_OVERFLOW (arg1))
2100 TREE_OVERFLOW (t) = 1;
2101 return t;
2102 }
2103
2104 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2105 type TYPE. If no simplification can be done return NULL_TREE. */
2106
2107 static tree
2108 fold_convert_const (enum tree_code code, tree type, tree arg1)
2109 {
2110 if (TREE_TYPE (arg1) == type)
2111 return arg1;
2112
2113 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2114 || TREE_CODE (type) == OFFSET_TYPE)
2115 {
2116 if (TREE_CODE (arg1) == INTEGER_CST)
2117 return fold_convert_const_int_from_int (type, arg1);
2118 else if (TREE_CODE (arg1) == REAL_CST)
2119 return fold_convert_const_int_from_real (code, type, arg1);
2120 else if (TREE_CODE (arg1) == FIXED_CST)
2121 return fold_convert_const_int_from_fixed (type, arg1);
2122 }
2123 else if (TREE_CODE (type) == REAL_TYPE)
2124 {
2125 if (TREE_CODE (arg1) == INTEGER_CST)
2126 return build_real_from_int_cst (type, arg1);
2127 else if (TREE_CODE (arg1) == REAL_CST)
2128 return fold_convert_const_real_from_real (type, arg1);
2129 else if (TREE_CODE (arg1) == FIXED_CST)
2130 return fold_convert_const_real_from_fixed (type, arg1);
2131 }
2132 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2133 {
2134 if (TREE_CODE (arg1) == FIXED_CST)
2135 return fold_convert_const_fixed_from_fixed (type, arg1);
2136 else if (TREE_CODE (arg1) == INTEGER_CST)
2137 return fold_convert_const_fixed_from_int (type, arg1);
2138 else if (TREE_CODE (arg1) == REAL_CST)
2139 return fold_convert_const_fixed_from_real (type, arg1);
2140 }
2141 else if (TREE_CODE (type) == VECTOR_TYPE)
2142 {
2143 if (TREE_CODE (arg1) == VECTOR_CST
2144 && TYPE_VECTOR_SUBPARTS (type) == VECTOR_CST_NELTS (arg1))
2145 {
2146 int len = TYPE_VECTOR_SUBPARTS (type);
2147 tree elttype = TREE_TYPE (type);
2148 tree *v = XALLOCAVEC (tree, len);
2149 for (int i = 0; i < len; ++i)
2150 {
2151 tree elt = VECTOR_CST_ELT (arg1, i);
2152 tree cvt = fold_convert_const (code, elttype, elt);
2153 if (cvt == NULL_TREE)
2154 return NULL_TREE;
2155 v[i] = cvt;
2156 }
2157 return build_vector (type, v);
2158 }
2159 }
2160 return NULL_TREE;
2161 }
2162
2163 /* Construct a vector of zero elements of vector type TYPE. */
2164
2165 static tree
2166 build_zero_vector (tree type)
2167 {
2168 tree t;
2169
2170 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2171 return build_vector_from_val (type, t);
2172 }
2173
2174 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2175
2176 bool
2177 fold_convertible_p (const_tree type, const_tree arg)
2178 {
2179 tree orig = TREE_TYPE (arg);
2180
2181 if (type == orig)
2182 return true;
2183
2184 if (TREE_CODE (arg) == ERROR_MARK
2185 || TREE_CODE (type) == ERROR_MARK
2186 || TREE_CODE (orig) == ERROR_MARK)
2187 return false;
2188
2189 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2190 return true;
2191
2192 switch (TREE_CODE (type))
2193 {
2194 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2195 case POINTER_TYPE: case REFERENCE_TYPE:
2196 case OFFSET_TYPE:
2197 return (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2198 || TREE_CODE (orig) == OFFSET_TYPE);
2199
2200 case REAL_TYPE:
2201 case FIXED_POINT_TYPE:
2202 case COMPLEX_TYPE:
2203 case VECTOR_TYPE:
2204 case VOID_TYPE:
2205 return TREE_CODE (type) == TREE_CODE (orig);
2206
2207 default:
2208 return false;
2209 }
2210 }
2211
2212 /* Convert expression ARG to type TYPE. Used by the middle-end for
2213 simple conversions in preference to calling the front-end's convert. */
2214
2215 tree
2216 fold_convert_loc (location_t loc, tree type, tree arg)
2217 {
2218 tree orig = TREE_TYPE (arg);
2219 tree tem;
2220
2221 if (type == orig)
2222 return arg;
2223
2224 if (TREE_CODE (arg) == ERROR_MARK
2225 || TREE_CODE (type) == ERROR_MARK
2226 || TREE_CODE (orig) == ERROR_MARK)
2227 return error_mark_node;
2228
2229 switch (TREE_CODE (type))
2230 {
2231 case POINTER_TYPE:
2232 case REFERENCE_TYPE:
2233 /* Handle conversions between pointers to different address spaces. */
2234 if (POINTER_TYPE_P (orig)
2235 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2236 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2237 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2238 /* fall through */
2239
2240 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2241 case OFFSET_TYPE:
2242 if (TREE_CODE (arg) == INTEGER_CST)
2243 {
2244 tem = fold_convert_const (NOP_EXPR, type, arg);
2245 if (tem != NULL_TREE)
2246 return tem;
2247 }
2248 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2249 || TREE_CODE (orig) == OFFSET_TYPE)
2250 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2251 if (TREE_CODE (orig) == COMPLEX_TYPE)
2252 return fold_convert_loc (loc, type,
2253 fold_build1_loc (loc, REALPART_EXPR,
2254 TREE_TYPE (orig), arg));
2255 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2256 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2257 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2258
2259 case REAL_TYPE:
2260 if (TREE_CODE (arg) == INTEGER_CST)
2261 {
2262 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2263 if (tem != NULL_TREE)
2264 return tem;
2265 }
2266 else if (TREE_CODE (arg) == REAL_CST)
2267 {
2268 tem = fold_convert_const (NOP_EXPR, type, arg);
2269 if (tem != NULL_TREE)
2270 return tem;
2271 }
2272 else if (TREE_CODE (arg) == FIXED_CST)
2273 {
2274 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2275 if (tem != NULL_TREE)
2276 return tem;
2277 }
2278
2279 switch (TREE_CODE (orig))
2280 {
2281 case INTEGER_TYPE:
2282 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2283 case POINTER_TYPE: case REFERENCE_TYPE:
2284 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2285
2286 case REAL_TYPE:
2287 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2288
2289 case FIXED_POINT_TYPE:
2290 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2291
2292 case COMPLEX_TYPE:
2293 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2294 return fold_convert_loc (loc, type, tem);
2295
2296 default:
2297 gcc_unreachable ();
2298 }
2299
2300 case FIXED_POINT_TYPE:
2301 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2302 || TREE_CODE (arg) == REAL_CST)
2303 {
2304 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2305 if (tem != NULL_TREE)
2306 goto fold_convert_exit;
2307 }
2308
2309 switch (TREE_CODE (orig))
2310 {
2311 case FIXED_POINT_TYPE:
2312 case INTEGER_TYPE:
2313 case ENUMERAL_TYPE:
2314 case BOOLEAN_TYPE:
2315 case REAL_TYPE:
2316 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2317
2318 case COMPLEX_TYPE:
2319 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2320 return fold_convert_loc (loc, type, tem);
2321
2322 default:
2323 gcc_unreachable ();
2324 }
2325
2326 case COMPLEX_TYPE:
2327 switch (TREE_CODE (orig))
2328 {
2329 case INTEGER_TYPE:
2330 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2331 case POINTER_TYPE: case REFERENCE_TYPE:
2332 case REAL_TYPE:
2333 case FIXED_POINT_TYPE:
2334 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2335 fold_convert_loc (loc, TREE_TYPE (type), arg),
2336 fold_convert_loc (loc, TREE_TYPE (type),
2337 integer_zero_node));
2338 case COMPLEX_TYPE:
2339 {
2340 tree rpart, ipart;
2341
2342 if (TREE_CODE (arg) == COMPLEX_EXPR)
2343 {
2344 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2345 TREE_OPERAND (arg, 0));
2346 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2347 TREE_OPERAND (arg, 1));
2348 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2349 }
2350
2351 arg = save_expr (arg);
2352 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2353 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2354 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2355 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2356 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2357 }
2358
2359 default:
2360 gcc_unreachable ();
2361 }
2362
2363 case VECTOR_TYPE:
2364 if (integer_zerop (arg))
2365 return build_zero_vector (type);
2366 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2367 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2368 || TREE_CODE (orig) == VECTOR_TYPE);
2369 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2370
2371 case VOID_TYPE:
2372 tem = fold_ignored_result (arg);
2373 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2374
2375 default:
2376 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2377 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2378 gcc_unreachable ();
2379 }
2380 fold_convert_exit:
2381 protected_set_expr_location_unshare (tem, loc);
2382 return tem;
2383 }
2384 \f
2385 /* Return false if expr can be assumed not to be an lvalue, true
2386 otherwise. */
2387
2388 static bool
2389 maybe_lvalue_p (const_tree x)
2390 {
2391 /* We only need to wrap lvalue tree codes. */
2392 switch (TREE_CODE (x))
2393 {
2394 case VAR_DECL:
2395 case PARM_DECL:
2396 case RESULT_DECL:
2397 case LABEL_DECL:
2398 case FUNCTION_DECL:
2399 case SSA_NAME:
2400
2401 case COMPONENT_REF:
2402 case MEM_REF:
2403 case INDIRECT_REF:
2404 case ARRAY_REF:
2405 case ARRAY_RANGE_REF:
2406 case BIT_FIELD_REF:
2407 case OBJ_TYPE_REF:
2408
2409 case REALPART_EXPR:
2410 case IMAGPART_EXPR:
2411 case PREINCREMENT_EXPR:
2412 case PREDECREMENT_EXPR:
2413 case SAVE_EXPR:
2414 case TRY_CATCH_EXPR:
2415 case WITH_CLEANUP_EXPR:
2416 case COMPOUND_EXPR:
2417 case MODIFY_EXPR:
2418 case TARGET_EXPR:
2419 case COND_EXPR:
2420 case BIND_EXPR:
2421 break;
2422
2423 default:
2424 /* Assume the worst for front-end tree codes. */
2425 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2426 break;
2427 return false;
2428 }
2429
2430 return true;
2431 }
2432
2433 /* Return an expr equal to X but certainly not valid as an lvalue. */
2434
2435 tree
2436 non_lvalue_loc (location_t loc, tree x)
2437 {
2438 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2439 us. */
2440 if (in_gimple_form)
2441 return x;
2442
2443 if (! maybe_lvalue_p (x))
2444 return x;
2445 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2446 }
2447
2448 /* When pedantic, return an expr equal to X but certainly not valid as a
2449 pedantic lvalue. Otherwise, return X. */
2450
2451 static tree
2452 pedantic_non_lvalue_loc (location_t loc, tree x)
2453 {
2454 return protected_set_expr_location_unshare (x, loc);
2455 }
2456 \f
2457 /* Given a tree comparison code, return the code that is the logical inverse.
2458 It is generally not safe to do this for floating-point comparisons, except
2459 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2460 ERROR_MARK in this case. */
2461
2462 enum tree_code
2463 invert_tree_comparison (enum tree_code code, bool honor_nans)
2464 {
2465 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2466 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2467 return ERROR_MARK;
2468
2469 switch (code)
2470 {
2471 case EQ_EXPR:
2472 return NE_EXPR;
2473 case NE_EXPR:
2474 return EQ_EXPR;
2475 case GT_EXPR:
2476 return honor_nans ? UNLE_EXPR : LE_EXPR;
2477 case GE_EXPR:
2478 return honor_nans ? UNLT_EXPR : LT_EXPR;
2479 case LT_EXPR:
2480 return honor_nans ? UNGE_EXPR : GE_EXPR;
2481 case LE_EXPR:
2482 return honor_nans ? UNGT_EXPR : GT_EXPR;
2483 case LTGT_EXPR:
2484 return UNEQ_EXPR;
2485 case UNEQ_EXPR:
2486 return LTGT_EXPR;
2487 case UNGT_EXPR:
2488 return LE_EXPR;
2489 case UNGE_EXPR:
2490 return LT_EXPR;
2491 case UNLT_EXPR:
2492 return GE_EXPR;
2493 case UNLE_EXPR:
2494 return GT_EXPR;
2495 case ORDERED_EXPR:
2496 return UNORDERED_EXPR;
2497 case UNORDERED_EXPR:
2498 return ORDERED_EXPR;
2499 default:
2500 gcc_unreachable ();
2501 }
2502 }
2503
2504 /* Similar, but return the comparison that results if the operands are
2505 swapped. This is safe for floating-point. */
2506
2507 enum tree_code
2508 swap_tree_comparison (enum tree_code code)
2509 {
2510 switch (code)
2511 {
2512 case EQ_EXPR:
2513 case NE_EXPR:
2514 case ORDERED_EXPR:
2515 case UNORDERED_EXPR:
2516 case LTGT_EXPR:
2517 case UNEQ_EXPR:
2518 return code;
2519 case GT_EXPR:
2520 return LT_EXPR;
2521 case GE_EXPR:
2522 return LE_EXPR;
2523 case LT_EXPR:
2524 return GT_EXPR;
2525 case LE_EXPR:
2526 return GE_EXPR;
2527 case UNGT_EXPR:
2528 return UNLT_EXPR;
2529 case UNGE_EXPR:
2530 return UNLE_EXPR;
2531 case UNLT_EXPR:
2532 return UNGT_EXPR;
2533 case UNLE_EXPR:
2534 return UNGE_EXPR;
2535 default:
2536 gcc_unreachable ();
2537 }
2538 }
2539
2540
2541 /* Convert a comparison tree code from an enum tree_code representation
2542 into a compcode bit-based encoding. This function is the inverse of
2543 compcode_to_comparison. */
2544
2545 static enum comparison_code
2546 comparison_to_compcode (enum tree_code code)
2547 {
2548 switch (code)
2549 {
2550 case LT_EXPR:
2551 return COMPCODE_LT;
2552 case EQ_EXPR:
2553 return COMPCODE_EQ;
2554 case LE_EXPR:
2555 return COMPCODE_LE;
2556 case GT_EXPR:
2557 return COMPCODE_GT;
2558 case NE_EXPR:
2559 return COMPCODE_NE;
2560 case GE_EXPR:
2561 return COMPCODE_GE;
2562 case ORDERED_EXPR:
2563 return COMPCODE_ORD;
2564 case UNORDERED_EXPR:
2565 return COMPCODE_UNORD;
2566 case UNLT_EXPR:
2567 return COMPCODE_UNLT;
2568 case UNEQ_EXPR:
2569 return COMPCODE_UNEQ;
2570 case UNLE_EXPR:
2571 return COMPCODE_UNLE;
2572 case UNGT_EXPR:
2573 return COMPCODE_UNGT;
2574 case LTGT_EXPR:
2575 return COMPCODE_LTGT;
2576 case UNGE_EXPR:
2577 return COMPCODE_UNGE;
2578 default:
2579 gcc_unreachable ();
2580 }
2581 }
2582
2583 /* Convert a compcode bit-based encoding of a comparison operator back
2584 to GCC's enum tree_code representation. This function is the
2585 inverse of comparison_to_compcode. */
2586
2587 static enum tree_code
2588 compcode_to_comparison (enum comparison_code code)
2589 {
2590 switch (code)
2591 {
2592 case COMPCODE_LT:
2593 return LT_EXPR;
2594 case COMPCODE_EQ:
2595 return EQ_EXPR;
2596 case COMPCODE_LE:
2597 return LE_EXPR;
2598 case COMPCODE_GT:
2599 return GT_EXPR;
2600 case COMPCODE_NE:
2601 return NE_EXPR;
2602 case COMPCODE_GE:
2603 return GE_EXPR;
2604 case COMPCODE_ORD:
2605 return ORDERED_EXPR;
2606 case COMPCODE_UNORD:
2607 return UNORDERED_EXPR;
2608 case COMPCODE_UNLT:
2609 return UNLT_EXPR;
2610 case COMPCODE_UNEQ:
2611 return UNEQ_EXPR;
2612 case COMPCODE_UNLE:
2613 return UNLE_EXPR;
2614 case COMPCODE_UNGT:
2615 return UNGT_EXPR;
2616 case COMPCODE_LTGT:
2617 return LTGT_EXPR;
2618 case COMPCODE_UNGE:
2619 return UNGE_EXPR;
2620 default:
2621 gcc_unreachable ();
2622 }
2623 }
2624
2625 /* Return a tree for the comparison which is the combination of
2626 doing the AND or OR (depending on CODE) of the two operations LCODE
2627 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2628 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2629 if this makes the transformation invalid. */
2630
2631 tree
2632 combine_comparisons (location_t loc,
2633 enum tree_code code, enum tree_code lcode,
2634 enum tree_code rcode, tree truth_type,
2635 tree ll_arg, tree lr_arg)
2636 {
2637 bool honor_nans = HONOR_NANS (ll_arg);
2638 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2639 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2640 int compcode;
2641
2642 switch (code)
2643 {
2644 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2645 compcode = lcompcode & rcompcode;
2646 break;
2647
2648 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2649 compcode = lcompcode | rcompcode;
2650 break;
2651
2652 default:
2653 return NULL_TREE;
2654 }
2655
2656 if (!honor_nans)
2657 {
2658 /* Eliminate unordered comparisons, as well as LTGT and ORD
2659 which are not used unless the mode has NaNs. */
2660 compcode &= ~COMPCODE_UNORD;
2661 if (compcode == COMPCODE_LTGT)
2662 compcode = COMPCODE_NE;
2663 else if (compcode == COMPCODE_ORD)
2664 compcode = COMPCODE_TRUE;
2665 }
2666 else if (flag_trapping_math)
2667 {
2668 /* Check that the original operation and the optimized ones will trap
2669 under the same condition. */
2670 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2671 && (lcompcode != COMPCODE_EQ)
2672 && (lcompcode != COMPCODE_ORD);
2673 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2674 && (rcompcode != COMPCODE_EQ)
2675 && (rcompcode != COMPCODE_ORD);
2676 bool trap = (compcode & COMPCODE_UNORD) == 0
2677 && (compcode != COMPCODE_EQ)
2678 && (compcode != COMPCODE_ORD);
2679
2680 /* In a short-circuited boolean expression the LHS might be
2681 such that the RHS, if evaluated, will never trap. For
2682 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2683 if neither x nor y is NaN. (This is a mixed blessing: for
2684 example, the expression above will never trap, hence
2685 optimizing it to x < y would be invalid). */
2686 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2687 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2688 rtrap = false;
2689
2690 /* If the comparison was short-circuited, and only the RHS
2691 trapped, we may now generate a spurious trap. */
2692 if (rtrap && !ltrap
2693 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2694 return NULL_TREE;
2695
2696 /* If we changed the conditions that cause a trap, we lose. */
2697 if ((ltrap || rtrap) != trap)
2698 return NULL_TREE;
2699 }
2700
2701 if (compcode == COMPCODE_TRUE)
2702 return constant_boolean_node (true, truth_type);
2703 else if (compcode == COMPCODE_FALSE)
2704 return constant_boolean_node (false, truth_type);
2705 else
2706 {
2707 enum tree_code tcode;
2708
2709 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2710 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2711 }
2712 }
2713 \f
2714 /* Return nonzero if two operands (typically of the same tree node)
2715 are necessarily equal. FLAGS modifies behavior as follows:
2716
2717 If OEP_ONLY_CONST is set, only return nonzero for constants.
2718 This function tests whether the operands are indistinguishable;
2719 it does not test whether they are equal using C's == operation.
2720 The distinction is important for IEEE floating point, because
2721 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2722 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2723
2724 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2725 even though it may hold multiple values during a function.
2726 This is because a GCC tree node guarantees that nothing else is
2727 executed between the evaluation of its "operands" (which may often
2728 be evaluated in arbitrary order). Hence if the operands themselves
2729 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2730 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2731 unset means assuming isochronic (or instantaneous) tree equivalence.
2732 Unless comparing arbitrary expression trees, such as from different
2733 statements, this flag can usually be left unset.
2734
2735 If OEP_PURE_SAME is set, then pure functions with identical arguments
2736 are considered the same. It is used when the caller has other ways
2737 to ensure that global memory is unchanged in between.
2738
2739 If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
2740 not values of expressions.
2741
2742 Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
2743 any operand with side effect. This is unnecesarily conservative in the
2744 case we know that arg0 and arg1 are in disjoint code paths (such as in
2745 ?: operator). In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
2746 addresses with TREE_CONSTANT flag set so we know that &var == &var
2747 even if var is volatile. */
2748
2749 int
2750 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2751 {
2752 /* If either is ERROR_MARK, they aren't equal. */
2753 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2754 || TREE_TYPE (arg0) == error_mark_node
2755 || TREE_TYPE (arg1) == error_mark_node)
2756 return 0;
2757
2758 /* Similar, if either does not have a type (like a released SSA name),
2759 they aren't equal. */
2760 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2761 return 0;
2762
2763 /* We cannot consider pointers to different address space equal. */
2764 if (POINTER_TYPE_P (TREE_TYPE (arg0))
2765 && POINTER_TYPE_P (TREE_TYPE (arg1))
2766 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2767 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2768 return 0;
2769
2770 /* Check equality of integer constants before bailing out due to
2771 precision differences. */
2772 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2773 {
2774 /* Address of INTEGER_CST is not defined; check that we did not forget
2775 to drop the OEP_ADDRESS_OF flags. */
2776 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2777 return tree_int_cst_equal (arg0, arg1);
2778 }
2779
2780 if (!(flags & OEP_ADDRESS_OF))
2781 {
2782 /* If both types don't have the same signedness, then we can't consider
2783 them equal. We must check this before the STRIP_NOPS calls
2784 because they may change the signedness of the arguments. As pointers
2785 strictly don't have a signedness, require either two pointers or
2786 two non-pointers as well. */
2787 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2788 || POINTER_TYPE_P (TREE_TYPE (arg0))
2789 != POINTER_TYPE_P (TREE_TYPE (arg1)))
2790 return 0;
2791
2792 /* If both types don't have the same precision, then it is not safe
2793 to strip NOPs. */
2794 if (element_precision (TREE_TYPE (arg0))
2795 != element_precision (TREE_TYPE (arg1)))
2796 return 0;
2797
2798 STRIP_NOPS (arg0);
2799 STRIP_NOPS (arg1);
2800 }
2801 #if 0
2802 /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
2803 sanity check once the issue is solved. */
2804 else
2805 /* Addresses of conversions and SSA_NAMEs (and many other things)
2806 are not defined. Check that we did not forget to drop the
2807 OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
2808 gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
2809 && TREE_CODE (arg0) != SSA_NAME);
2810 #endif
2811
2812 /* In case both args are comparisons but with different comparison
2813 code, try to swap the comparison operands of one arg to produce
2814 a match and compare that variant. */
2815 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2816 && COMPARISON_CLASS_P (arg0)
2817 && COMPARISON_CLASS_P (arg1))
2818 {
2819 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2820
2821 if (TREE_CODE (arg0) == swap_code)
2822 return operand_equal_p (TREE_OPERAND (arg0, 0),
2823 TREE_OPERAND (arg1, 1), flags)
2824 && operand_equal_p (TREE_OPERAND (arg0, 1),
2825 TREE_OPERAND (arg1, 0), flags);
2826 }
2827
2828 if (TREE_CODE (arg0) != TREE_CODE (arg1))
2829 {
2830 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2831 if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
2832 ;
2833 else if (flags & OEP_ADDRESS_OF)
2834 {
2835 /* If we are interested in comparing addresses ignore
2836 MEM_REF wrappings of the base that can appear just for
2837 TBAA reasons. */
2838 if (TREE_CODE (arg0) == MEM_REF
2839 && DECL_P (arg1)
2840 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
2841 && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
2842 && integer_zerop (TREE_OPERAND (arg0, 1)))
2843 return 1;
2844 else if (TREE_CODE (arg1) == MEM_REF
2845 && DECL_P (arg0)
2846 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
2847 && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
2848 && integer_zerop (TREE_OPERAND (arg1, 1)))
2849 return 1;
2850 return 0;
2851 }
2852 else
2853 return 0;
2854 }
2855
2856 /* When not checking adddresses, this is needed for conversions and for
2857 COMPONENT_REF. Might as well play it safe and always test this. */
2858 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2859 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2860 || (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))
2861 && !(flags & OEP_ADDRESS_OF)))
2862 return 0;
2863
2864 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2865 We don't care about side effects in that case because the SAVE_EXPR
2866 takes care of that for us. In all other cases, two expressions are
2867 equal if they have no side effects. If we have two identical
2868 expressions with side effects that should be treated the same due
2869 to the only side effects being identical SAVE_EXPR's, that will
2870 be detected in the recursive calls below.
2871 If we are taking an invariant address of two identical objects
2872 they are necessarily equal as well. */
2873 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2874 && (TREE_CODE (arg0) == SAVE_EXPR
2875 || (flags & OEP_MATCH_SIDE_EFFECTS)
2876 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2877 return 1;
2878
2879 /* Next handle constant cases, those for which we can return 1 even
2880 if ONLY_CONST is set. */
2881 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2882 switch (TREE_CODE (arg0))
2883 {
2884 case INTEGER_CST:
2885 return tree_int_cst_equal (arg0, arg1);
2886
2887 case FIXED_CST:
2888 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2889 TREE_FIXED_CST (arg1));
2890
2891 case REAL_CST:
2892 if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
2893 return 1;
2894
2895
2896 if (!HONOR_SIGNED_ZEROS (arg0))
2897 {
2898 /* If we do not distinguish between signed and unsigned zero,
2899 consider them equal. */
2900 if (real_zerop (arg0) && real_zerop (arg1))
2901 return 1;
2902 }
2903 return 0;
2904
2905 case VECTOR_CST:
2906 {
2907 unsigned i;
2908
2909 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2910 return 0;
2911
2912 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2913 {
2914 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2915 VECTOR_CST_ELT (arg1, i), flags))
2916 return 0;
2917 }
2918 return 1;
2919 }
2920
2921 case COMPLEX_CST:
2922 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2923 flags)
2924 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2925 flags));
2926
2927 case STRING_CST:
2928 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2929 && ! memcmp (TREE_STRING_POINTER (arg0),
2930 TREE_STRING_POINTER (arg1),
2931 TREE_STRING_LENGTH (arg0)));
2932
2933 case ADDR_EXPR:
2934 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2935 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2936 flags | OEP_ADDRESS_OF
2937 | OEP_MATCH_SIDE_EFFECTS);
2938 case CONSTRUCTOR:
2939 /* In GIMPLE empty constructors are allowed in initializers of
2940 aggregates. */
2941 return (!vec_safe_length (CONSTRUCTOR_ELTS (arg0))
2942 && !vec_safe_length (CONSTRUCTOR_ELTS (arg1)));
2943 default:
2944 break;
2945 }
2946
2947 if (flags & OEP_ONLY_CONST)
2948 return 0;
2949
2950 /* Define macros to test an operand from arg0 and arg1 for equality and a
2951 variant that allows null and views null as being different from any
2952 non-null value. In the latter case, if either is null, the both
2953 must be; otherwise, do the normal comparison. */
2954 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2955 TREE_OPERAND (arg1, N), flags)
2956
2957 #define OP_SAME_WITH_NULL(N) \
2958 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2959 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2960
2961 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2962 {
2963 case tcc_unary:
2964 /* Two conversions are equal only if signedness and modes match. */
2965 switch (TREE_CODE (arg0))
2966 {
2967 CASE_CONVERT:
2968 case FIX_TRUNC_EXPR:
2969 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2970 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2971 return 0;
2972 break;
2973 default:
2974 break;
2975 }
2976
2977 return OP_SAME (0);
2978
2979
2980 case tcc_comparison:
2981 case tcc_binary:
2982 if (OP_SAME (0) && OP_SAME (1))
2983 return 1;
2984
2985 /* For commutative ops, allow the other order. */
2986 return (commutative_tree_code (TREE_CODE (arg0))
2987 && operand_equal_p (TREE_OPERAND (arg0, 0),
2988 TREE_OPERAND (arg1, 1), flags)
2989 && operand_equal_p (TREE_OPERAND (arg0, 1),
2990 TREE_OPERAND (arg1, 0), flags));
2991
2992 case tcc_reference:
2993 /* If either of the pointer (or reference) expressions we are
2994 dereferencing contain a side effect, these cannot be equal,
2995 but their addresses can be. */
2996 if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0
2997 && (TREE_SIDE_EFFECTS (arg0)
2998 || TREE_SIDE_EFFECTS (arg1)))
2999 return 0;
3000
3001 switch (TREE_CODE (arg0))
3002 {
3003 case INDIRECT_REF:
3004 if (!(flags & OEP_ADDRESS_OF)
3005 && (TYPE_ALIGN (TREE_TYPE (arg0))
3006 != TYPE_ALIGN (TREE_TYPE (arg1))))
3007 return 0;
3008 flags &= ~OEP_ADDRESS_OF;
3009 return OP_SAME (0);
3010
3011 case IMAGPART_EXPR:
3012 /* Require the same offset. */
3013 if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3014 TYPE_SIZE (TREE_TYPE (arg1)),
3015 flags & ~OEP_ADDRESS_OF))
3016 return 0;
3017
3018 /* Fallthru. */
3019 case REALPART_EXPR:
3020 case VIEW_CONVERT_EXPR:
3021 return OP_SAME (0);
3022
3023 case TARGET_MEM_REF:
3024 case MEM_REF:
3025 if (!(flags & OEP_ADDRESS_OF))
3026 {
3027 /* Require equal access sizes */
3028 if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1))
3029 && (!TYPE_SIZE (TREE_TYPE (arg0))
3030 || !TYPE_SIZE (TREE_TYPE (arg1))
3031 || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3032 TYPE_SIZE (TREE_TYPE (arg1)),
3033 flags)))
3034 return 0;
3035 /* Verify that access happens in similar types. */
3036 if (!types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3037 return 0;
3038 /* Verify that accesses are TBAA compatible. */
3039 if (!alias_ptr_types_compatible_p
3040 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
3041 TREE_TYPE (TREE_OPERAND (arg1, 1)))
3042 || (MR_DEPENDENCE_CLIQUE (arg0)
3043 != MR_DEPENDENCE_CLIQUE (arg1))
3044 || (MR_DEPENDENCE_BASE (arg0)
3045 != MR_DEPENDENCE_BASE (arg1)))
3046 return 0;
3047 /* Verify that alignment is compatible. */
3048 if (TYPE_ALIGN (TREE_TYPE (arg0))
3049 != TYPE_ALIGN (TREE_TYPE (arg1)))
3050 return 0;
3051 }
3052 flags &= ~OEP_ADDRESS_OF;
3053 return (OP_SAME (0) && OP_SAME (1)
3054 /* TARGET_MEM_REF require equal extra operands. */
3055 && (TREE_CODE (arg0) != TARGET_MEM_REF
3056 || (OP_SAME_WITH_NULL (2)
3057 && OP_SAME_WITH_NULL (3)
3058 && OP_SAME_WITH_NULL (4))));
3059
3060 case ARRAY_REF:
3061 case ARRAY_RANGE_REF:
3062 if (!OP_SAME (0))
3063 return 0;
3064 flags &= ~OEP_ADDRESS_OF;
3065 /* Compare the array index by value if it is constant first as we
3066 may have different types but same value here. */
3067 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3068 TREE_OPERAND (arg1, 1))
3069 || OP_SAME (1))
3070 && OP_SAME_WITH_NULL (2)
3071 && OP_SAME_WITH_NULL (3)
3072 /* Compare low bound and element size as with OEP_ADDRESS_OF
3073 we have to account for the offset of the ref. */
3074 && (TREE_TYPE (TREE_OPERAND (arg0, 0))
3075 == TREE_TYPE (TREE_OPERAND (arg1, 0))
3076 || (operand_equal_p (array_ref_low_bound
3077 (CONST_CAST_TREE (arg0)),
3078 array_ref_low_bound
3079 (CONST_CAST_TREE (arg1)), flags)
3080 && operand_equal_p (array_ref_element_size
3081 (CONST_CAST_TREE (arg0)),
3082 array_ref_element_size
3083 (CONST_CAST_TREE (arg1)),
3084 flags))));
3085
3086 case COMPONENT_REF:
3087 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3088 may be NULL when we're called to compare MEM_EXPRs. */
3089 if (!OP_SAME_WITH_NULL (0)
3090 || !OP_SAME (1))
3091 return 0;
3092 flags &= ~OEP_ADDRESS_OF;
3093 return OP_SAME_WITH_NULL (2);
3094
3095 case BIT_FIELD_REF:
3096 if (!OP_SAME (0))
3097 return 0;
3098 flags &= ~OEP_ADDRESS_OF;
3099 return OP_SAME (1) && OP_SAME (2);
3100
3101 default:
3102 return 0;
3103 }
3104
3105 case tcc_expression:
3106 switch (TREE_CODE (arg0))
3107 {
3108 case ADDR_EXPR:
3109 /* Be sure we pass right ADDRESS_OF flag. */
3110 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3111 return operand_equal_p (TREE_OPERAND (arg0, 0),
3112 TREE_OPERAND (arg1, 0),
3113 flags | OEP_ADDRESS_OF);
3114
3115 case TRUTH_NOT_EXPR:
3116 return OP_SAME (0);
3117
3118 case TRUTH_ANDIF_EXPR:
3119 case TRUTH_ORIF_EXPR:
3120 return OP_SAME (0) && OP_SAME (1);
3121
3122 case FMA_EXPR:
3123 case WIDEN_MULT_PLUS_EXPR:
3124 case WIDEN_MULT_MINUS_EXPR:
3125 if (!OP_SAME (2))
3126 return 0;
3127 /* The multiplcation operands are commutative. */
3128 /* FALLTHRU */
3129
3130 case TRUTH_AND_EXPR:
3131 case TRUTH_OR_EXPR:
3132 case TRUTH_XOR_EXPR:
3133 if (OP_SAME (0) && OP_SAME (1))
3134 return 1;
3135
3136 /* Otherwise take into account this is a commutative operation. */
3137 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3138 TREE_OPERAND (arg1, 1), flags)
3139 && operand_equal_p (TREE_OPERAND (arg0, 1),
3140 TREE_OPERAND (arg1, 0), flags));
3141
3142 case COND_EXPR:
3143 if (! OP_SAME (1) || ! OP_SAME (2))
3144 return 0;
3145 flags &= ~OEP_ADDRESS_OF;
3146 return OP_SAME (0);
3147
3148 case VEC_COND_EXPR:
3149 case DOT_PROD_EXPR:
3150 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3151
3152 default:
3153 return 0;
3154 }
3155
3156 case tcc_vl_exp:
3157 switch (TREE_CODE (arg0))
3158 {
3159 case CALL_EXPR:
3160 if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3161 != (CALL_EXPR_FN (arg1) == NULL_TREE))
3162 /* If not both CALL_EXPRs are either internal or normal function
3163 functions, then they are not equal. */
3164 return 0;
3165 else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3166 {
3167 /* If the CALL_EXPRs call different internal functions, then they
3168 are not equal. */
3169 if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3170 return 0;
3171 }
3172 else
3173 {
3174 /* If the CALL_EXPRs call different functions, then they are not
3175 equal. */
3176 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3177 flags))
3178 return 0;
3179 }
3180
3181 /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */
3182 {
3183 unsigned int cef = call_expr_flags (arg0);
3184 if (flags & OEP_PURE_SAME)
3185 cef &= ECF_CONST | ECF_PURE;
3186 else
3187 cef &= ECF_CONST;
3188 if (!cef)
3189 return 0;
3190 }
3191
3192 /* Now see if all the arguments are the same. */
3193 {
3194 const_call_expr_arg_iterator iter0, iter1;
3195 const_tree a0, a1;
3196 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3197 a1 = first_const_call_expr_arg (arg1, &iter1);
3198 a0 && a1;
3199 a0 = next_const_call_expr_arg (&iter0),
3200 a1 = next_const_call_expr_arg (&iter1))
3201 if (! operand_equal_p (a0, a1, flags))
3202 return 0;
3203
3204 /* If we get here and both argument lists are exhausted
3205 then the CALL_EXPRs are equal. */
3206 return ! (a0 || a1);
3207 }
3208 default:
3209 return 0;
3210 }
3211
3212 case tcc_declaration:
3213 /* Consider __builtin_sqrt equal to sqrt. */
3214 return (TREE_CODE (arg0) == FUNCTION_DECL
3215 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3216 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3217 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3218
3219 case tcc_exceptional:
3220 if (TREE_CODE (arg0) == CONSTRUCTOR)
3221 {
3222 /* In GIMPLE constructors are used only to build vectors from
3223 elements. Individual elements in the constructor must be
3224 indexed in increasing order and form an initial sequence.
3225
3226 We make no effort to compare constructors in generic.
3227 (see sem_variable::equals in ipa-icf which can do so for
3228 constants). */
3229 if (!VECTOR_TYPE_P (TREE_TYPE (arg0))
3230 || !VECTOR_TYPE_P (TREE_TYPE (arg1)))
3231 return 0;
3232
3233 /* Be sure that vectors constructed have the same representation.
3234 We only tested element precision and modes to match.
3235 Vectors may be BLKmode and thus also check that the number of
3236 parts match. */
3237 if (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))
3238 != TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)))
3239 return 0;
3240
3241 vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3242 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3243 unsigned int len = vec_safe_length (v0);
3244
3245 if (len != vec_safe_length (v1))
3246 return 0;
3247
3248 for (unsigned int i = 0; i < len; i++)
3249 {
3250 constructor_elt *c0 = &(*v0)[i];
3251 constructor_elt *c1 = &(*v1)[i];
3252
3253 if (!operand_equal_p (c0->value, c1->value, flags)
3254 /* In GIMPLE the indexes can be either NULL or matching i.
3255 Double check this so we won't get false
3256 positives for GENERIC. */
3257 || (c0->index
3258 && (TREE_CODE (c0->index) != INTEGER_CST
3259 || !compare_tree_int (c0->index, i)))
3260 || (c1->index
3261 && (TREE_CODE (c1->index) != INTEGER_CST
3262 || !compare_tree_int (c1->index, i))))
3263 return 0;
3264 }
3265 return 1;
3266 }
3267 return 0;
3268
3269 default:
3270 return 0;
3271 }
3272
3273 #undef OP_SAME
3274 #undef OP_SAME_WITH_NULL
3275 }
3276 \f
3277 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3278 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3279
3280 When in doubt, return 0. */
3281
3282 static int
3283 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3284 {
3285 int unsignedp1, unsignedpo;
3286 tree primarg0, primarg1, primother;
3287 unsigned int correct_width;
3288
3289 if (operand_equal_p (arg0, arg1, 0))
3290 return 1;
3291
3292 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3293 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3294 return 0;
3295
3296 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3297 and see if the inner values are the same. This removes any
3298 signedness comparison, which doesn't matter here. */
3299 primarg0 = arg0, primarg1 = arg1;
3300 STRIP_NOPS (primarg0);
3301 STRIP_NOPS (primarg1);
3302 if (operand_equal_p (primarg0, primarg1, 0))
3303 return 1;
3304
3305 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3306 actual comparison operand, ARG0.
3307
3308 First throw away any conversions to wider types
3309 already present in the operands. */
3310
3311 primarg1 = get_narrower (arg1, &unsignedp1);
3312 primother = get_narrower (other, &unsignedpo);
3313
3314 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3315 if (unsignedp1 == unsignedpo
3316 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3317 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3318 {
3319 tree type = TREE_TYPE (arg0);
3320
3321 /* Make sure shorter operand is extended the right way
3322 to match the longer operand. */
3323 primarg1 = fold_convert (signed_or_unsigned_type_for
3324 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3325
3326 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3327 return 1;
3328 }
3329
3330 return 0;
3331 }
3332 \f
3333 /* See if ARG is an expression that is either a comparison or is performing
3334 arithmetic on comparisons. The comparisons must only be comparing
3335 two different values, which will be stored in *CVAL1 and *CVAL2; if
3336 they are nonzero it means that some operands have already been found.
3337 No variables may be used anywhere else in the expression except in the
3338 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3339 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3340
3341 If this is true, return 1. Otherwise, return zero. */
3342
3343 static int
3344 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3345 {
3346 enum tree_code code = TREE_CODE (arg);
3347 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3348
3349 /* We can handle some of the tcc_expression cases here. */
3350 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3351 tclass = tcc_unary;
3352 else if (tclass == tcc_expression
3353 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3354 || code == COMPOUND_EXPR))
3355 tclass = tcc_binary;
3356
3357 else if (tclass == tcc_expression && code == SAVE_EXPR
3358 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3359 {
3360 /* If we've already found a CVAL1 or CVAL2, this expression is
3361 two complex to handle. */
3362 if (*cval1 || *cval2)
3363 return 0;
3364
3365 tclass = tcc_unary;
3366 *save_p = 1;
3367 }
3368
3369 switch (tclass)
3370 {
3371 case tcc_unary:
3372 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3373
3374 case tcc_binary:
3375 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3376 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3377 cval1, cval2, save_p));
3378
3379 case tcc_constant:
3380 return 1;
3381
3382 case tcc_expression:
3383 if (code == COND_EXPR)
3384 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3385 cval1, cval2, save_p)
3386 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3387 cval1, cval2, save_p)
3388 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3389 cval1, cval2, save_p));
3390 return 0;
3391
3392 case tcc_comparison:
3393 /* First see if we can handle the first operand, then the second. For
3394 the second operand, we know *CVAL1 can't be zero. It must be that
3395 one side of the comparison is each of the values; test for the
3396 case where this isn't true by failing if the two operands
3397 are the same. */
3398
3399 if (operand_equal_p (TREE_OPERAND (arg, 0),
3400 TREE_OPERAND (arg, 1), 0))
3401 return 0;
3402
3403 if (*cval1 == 0)
3404 *cval1 = TREE_OPERAND (arg, 0);
3405 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3406 ;
3407 else if (*cval2 == 0)
3408 *cval2 = TREE_OPERAND (arg, 0);
3409 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3410 ;
3411 else
3412 return 0;
3413
3414 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3415 ;
3416 else if (*cval2 == 0)
3417 *cval2 = TREE_OPERAND (arg, 1);
3418 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3419 ;
3420 else
3421 return 0;
3422
3423 return 1;
3424
3425 default:
3426 return 0;
3427 }
3428 }
3429 \f
3430 /* ARG is a tree that is known to contain just arithmetic operations and
3431 comparisons. Evaluate the operations in the tree substituting NEW0 for
3432 any occurrence of OLD0 as an operand of a comparison and likewise for
3433 NEW1 and OLD1. */
3434
3435 static tree
3436 eval_subst (location_t loc, tree arg, tree old0, tree new0,
3437 tree old1, tree new1)
3438 {
3439 tree type = TREE_TYPE (arg);
3440 enum tree_code code = TREE_CODE (arg);
3441 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3442
3443 /* We can handle some of the tcc_expression cases here. */
3444 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3445 tclass = tcc_unary;
3446 else if (tclass == tcc_expression
3447 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3448 tclass = tcc_binary;
3449
3450 switch (tclass)
3451 {
3452 case tcc_unary:
3453 return fold_build1_loc (loc, code, type,
3454 eval_subst (loc, TREE_OPERAND (arg, 0),
3455 old0, new0, old1, new1));
3456
3457 case tcc_binary:
3458 return fold_build2_loc (loc, code, type,
3459 eval_subst (loc, TREE_OPERAND (arg, 0),
3460 old0, new0, old1, new1),
3461 eval_subst (loc, TREE_OPERAND (arg, 1),
3462 old0, new0, old1, new1));
3463
3464 case tcc_expression:
3465 switch (code)
3466 {
3467 case SAVE_EXPR:
3468 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3469 old1, new1);
3470
3471 case COMPOUND_EXPR:
3472 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3473 old1, new1);
3474
3475 case COND_EXPR:
3476 return fold_build3_loc (loc, code, type,
3477 eval_subst (loc, TREE_OPERAND (arg, 0),
3478 old0, new0, old1, new1),
3479 eval_subst (loc, TREE_OPERAND (arg, 1),
3480 old0, new0, old1, new1),
3481 eval_subst (loc, TREE_OPERAND (arg, 2),
3482 old0, new0, old1, new1));
3483 default:
3484 break;
3485 }
3486 /* Fall through - ??? */
3487
3488 case tcc_comparison:
3489 {
3490 tree arg0 = TREE_OPERAND (arg, 0);
3491 tree arg1 = TREE_OPERAND (arg, 1);
3492
3493 /* We need to check both for exact equality and tree equality. The
3494 former will be true if the operand has a side-effect. In that
3495 case, we know the operand occurred exactly once. */
3496
3497 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3498 arg0 = new0;
3499 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3500 arg0 = new1;
3501
3502 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3503 arg1 = new0;
3504 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3505 arg1 = new1;
3506
3507 return fold_build2_loc (loc, code, type, arg0, arg1);
3508 }
3509
3510 default:
3511 return arg;
3512 }
3513 }
3514 \f
3515 /* Return a tree for the case when the result of an expression is RESULT
3516 converted to TYPE and OMITTED was previously an operand of the expression
3517 but is now not needed (e.g., we folded OMITTED * 0).
3518
3519 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3520 the conversion of RESULT to TYPE. */
3521
3522 tree
3523 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3524 {
3525 tree t = fold_convert_loc (loc, type, result);
3526
3527 /* If the resulting operand is an empty statement, just return the omitted
3528 statement casted to void. */
3529 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3530 return build1_loc (loc, NOP_EXPR, void_type_node,
3531 fold_ignored_result (omitted));
3532
3533 if (TREE_SIDE_EFFECTS (omitted))
3534 return build2_loc (loc, COMPOUND_EXPR, type,
3535 fold_ignored_result (omitted), t);
3536
3537 return non_lvalue_loc (loc, t);
3538 }
3539
3540 /* Return a tree for the case when the result of an expression is RESULT
3541 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3542 of the expression but are now not needed.
3543
3544 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3545 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3546 evaluated before OMITTED2. Otherwise, if neither has side effects,
3547 just do the conversion of RESULT to TYPE. */
3548
3549 tree
3550 omit_two_operands_loc (location_t loc, tree type, tree result,
3551 tree omitted1, tree omitted2)
3552 {
3553 tree t = fold_convert_loc (loc, type, result);
3554
3555 if (TREE_SIDE_EFFECTS (omitted2))
3556 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3557 if (TREE_SIDE_EFFECTS (omitted1))
3558 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3559
3560 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3561 }
3562
3563 \f
3564 /* Return a simplified tree node for the truth-negation of ARG. This
3565 never alters ARG itself. We assume that ARG is an operation that
3566 returns a truth value (0 or 1).
3567
3568 FIXME: one would think we would fold the result, but it causes
3569 problems with the dominator optimizer. */
3570
3571 static tree
3572 fold_truth_not_expr (location_t loc, tree arg)
3573 {
3574 tree type = TREE_TYPE (arg);
3575 enum tree_code code = TREE_CODE (arg);
3576 location_t loc1, loc2;
3577
3578 /* If this is a comparison, we can simply invert it, except for
3579 floating-point non-equality comparisons, in which case we just
3580 enclose a TRUTH_NOT_EXPR around what we have. */
3581
3582 if (TREE_CODE_CLASS (code) == tcc_comparison)
3583 {
3584 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3585 if (FLOAT_TYPE_P (op_type)
3586 && flag_trapping_math
3587 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3588 && code != NE_EXPR && code != EQ_EXPR)
3589 return NULL_TREE;
3590
3591 code = invert_tree_comparison (code, HONOR_NANS (op_type));
3592 if (code == ERROR_MARK)
3593 return NULL_TREE;
3594
3595 tree ret = build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3596 TREE_OPERAND (arg, 1));
3597 if (TREE_NO_WARNING (arg))
3598 TREE_NO_WARNING (ret) = 1;
3599 return ret;
3600 }
3601
3602 switch (code)
3603 {
3604 case INTEGER_CST:
3605 return constant_boolean_node (integer_zerop (arg), type);
3606
3607 case TRUTH_AND_EXPR:
3608 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3609 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3610 return build2_loc (loc, TRUTH_OR_EXPR, type,
3611 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3612 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3613
3614 case TRUTH_OR_EXPR:
3615 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3616 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3617 return build2_loc (loc, TRUTH_AND_EXPR, type,
3618 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3619 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3620
3621 case TRUTH_XOR_EXPR:
3622 /* Here we can invert either operand. We invert the first operand
3623 unless the second operand is a TRUTH_NOT_EXPR in which case our
3624 result is the XOR of the first operand with the inside of the
3625 negation of the second operand. */
3626
3627 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3628 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3629 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3630 else
3631 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3632 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3633 TREE_OPERAND (arg, 1));
3634
3635 case TRUTH_ANDIF_EXPR:
3636 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3637 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3638 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3639 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3640 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3641
3642 case TRUTH_ORIF_EXPR:
3643 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3644 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3645 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3646 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3647 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3648
3649 case TRUTH_NOT_EXPR:
3650 return TREE_OPERAND (arg, 0);
3651
3652 case COND_EXPR:
3653 {
3654 tree arg1 = TREE_OPERAND (arg, 1);
3655 tree arg2 = TREE_OPERAND (arg, 2);
3656
3657 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3658 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3659
3660 /* A COND_EXPR may have a throw as one operand, which
3661 then has void type. Just leave void operands
3662 as they are. */
3663 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3664 VOID_TYPE_P (TREE_TYPE (arg1))
3665 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3666 VOID_TYPE_P (TREE_TYPE (arg2))
3667 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3668 }
3669
3670 case COMPOUND_EXPR:
3671 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3672 return build2_loc (loc, COMPOUND_EXPR, type,
3673 TREE_OPERAND (arg, 0),
3674 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3675
3676 case NON_LVALUE_EXPR:
3677 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3678 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3679
3680 CASE_CONVERT:
3681 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3682 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3683
3684 /* ... fall through ... */
3685
3686 case FLOAT_EXPR:
3687 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3688 return build1_loc (loc, TREE_CODE (arg), type,
3689 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3690
3691 case BIT_AND_EXPR:
3692 if (!integer_onep (TREE_OPERAND (arg, 1)))
3693 return NULL_TREE;
3694 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3695
3696 case SAVE_EXPR:
3697 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3698
3699 case CLEANUP_POINT_EXPR:
3700 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3701 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3702 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3703
3704 default:
3705 return NULL_TREE;
3706 }
3707 }
3708
3709 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3710 assume that ARG is an operation that returns a truth value (0 or 1
3711 for scalars, 0 or -1 for vectors). Return the folded expression if
3712 folding is successful. Otherwise, return NULL_TREE. */
3713
3714 static tree
3715 fold_invert_truthvalue (location_t loc, tree arg)
3716 {
3717 tree type = TREE_TYPE (arg);
3718 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3719 ? BIT_NOT_EXPR
3720 : TRUTH_NOT_EXPR,
3721 type, arg);
3722 }
3723
3724 /* Return a simplified tree node for the truth-negation of ARG. This
3725 never alters ARG itself. We assume that ARG is an operation that
3726 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3727
3728 tree
3729 invert_truthvalue_loc (location_t loc, tree arg)
3730 {
3731 if (TREE_CODE (arg) == ERROR_MARK)
3732 return arg;
3733
3734 tree type = TREE_TYPE (arg);
3735 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3736 ? BIT_NOT_EXPR
3737 : TRUTH_NOT_EXPR,
3738 type, arg);
3739 }
3740
3741 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3742 with code CODE. This optimization is unsafe. */
3743 static tree
3744 distribute_real_division (location_t loc, enum tree_code code, tree type,
3745 tree arg0, tree arg1)
3746 {
3747 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3748 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3749
3750 /* (A / C) +- (B / C) -> (A +- B) / C. */
3751 if (mul0 == mul1
3752 && operand_equal_p (TREE_OPERAND (arg0, 1),
3753 TREE_OPERAND (arg1, 1), 0))
3754 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3755 fold_build2_loc (loc, code, type,
3756 TREE_OPERAND (arg0, 0),
3757 TREE_OPERAND (arg1, 0)),
3758 TREE_OPERAND (arg0, 1));
3759
3760 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3761 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3762 TREE_OPERAND (arg1, 0), 0)
3763 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3764 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3765 {
3766 REAL_VALUE_TYPE r0, r1;
3767 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3768 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3769 if (!mul0)
3770 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3771 if (!mul1)
3772 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3773 real_arithmetic (&r0, code, &r0, &r1);
3774 return fold_build2_loc (loc, MULT_EXPR, type,
3775 TREE_OPERAND (arg0, 0),
3776 build_real (type, r0));
3777 }
3778
3779 return NULL_TREE;
3780 }
3781 \f
3782 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3783 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero
3784 and uses reverse storage order if REVERSEP is nonzero. */
3785
3786 static tree
3787 make_bit_field_ref (location_t loc, tree inner, tree type,
3788 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
3789 int unsignedp, int reversep)
3790 {
3791 tree result, bftype;
3792
3793 if (bitpos == 0 && !reversep)
3794 {
3795 tree size = TYPE_SIZE (TREE_TYPE (inner));
3796 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3797 || POINTER_TYPE_P (TREE_TYPE (inner)))
3798 && tree_fits_shwi_p (size)
3799 && tree_to_shwi (size) == bitsize)
3800 return fold_convert_loc (loc, type, inner);
3801 }
3802
3803 bftype = type;
3804 if (TYPE_PRECISION (bftype) != bitsize
3805 || TYPE_UNSIGNED (bftype) == !unsignedp)
3806 bftype = build_nonstandard_integer_type (bitsize, 0);
3807
3808 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3809 size_int (bitsize), bitsize_int (bitpos));
3810 REF_REVERSE_STORAGE_ORDER (result) = reversep;
3811
3812 if (bftype != type)
3813 result = fold_convert_loc (loc, type, result);
3814
3815 return result;
3816 }
3817
3818 /* Optimize a bit-field compare.
3819
3820 There are two cases: First is a compare against a constant and the
3821 second is a comparison of two items where the fields are at the same
3822 bit position relative to the start of a chunk (byte, halfword, word)
3823 large enough to contain it. In these cases we can avoid the shift
3824 implicit in bitfield extractions.
3825
3826 For constants, we emit a compare of the shifted constant with the
3827 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3828 compared. For two fields at the same position, we do the ANDs with the
3829 similar mask and compare the result of the ANDs.
3830
3831 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3832 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3833 are the left and right operands of the comparison, respectively.
3834
3835 If the optimization described above can be done, we return the resulting
3836 tree. Otherwise we return zero. */
3837
3838 static tree
3839 optimize_bit_field_compare (location_t loc, enum tree_code code,
3840 tree compare_type, tree lhs, tree rhs)
3841 {
3842 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3843 tree type = TREE_TYPE (lhs);
3844 tree unsigned_type;
3845 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3846 machine_mode lmode, rmode, nmode;
3847 int lunsignedp, runsignedp;
3848 int lreversep, rreversep;
3849 int lvolatilep = 0, rvolatilep = 0;
3850 tree linner, rinner = NULL_TREE;
3851 tree mask;
3852 tree offset;
3853
3854 /* Get all the information about the extractions being done. If the bit size
3855 if the same as the size of the underlying object, we aren't doing an
3856 extraction at all and so can do nothing. We also don't want to
3857 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3858 then will no longer be able to replace it. */
3859 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3860 &lunsignedp, &lreversep, &lvolatilep, false);
3861 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3862 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3863 return 0;
3864
3865 if (const_p)
3866 rreversep = lreversep;
3867 else
3868 {
3869 /* If this is not a constant, we can only do something if bit positions,
3870 sizes, signedness and storage order are the same. */
3871 rinner
3872 = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3873 &runsignedp, &rreversep, &rvolatilep, false);
3874
3875 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3876 || lunsignedp != runsignedp || lreversep != rreversep || offset != 0
3877 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3878 return 0;
3879 }
3880
3881 /* See if we can find a mode to refer to this field. We should be able to,
3882 but fail if we can't. */
3883 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3884 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3885 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3886 TYPE_ALIGN (TREE_TYPE (rinner))),
3887 word_mode, false);
3888 if (nmode == VOIDmode)
3889 return 0;
3890
3891 /* Set signed and unsigned types of the precision of this mode for the
3892 shifts below. */
3893 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3894
3895 /* Compute the bit position and size for the new reference and our offset
3896 within it. If the new reference is the same size as the original, we
3897 won't optimize anything, so return zero. */
3898 nbitsize = GET_MODE_BITSIZE (nmode);
3899 nbitpos = lbitpos & ~ (nbitsize - 1);
3900 lbitpos -= nbitpos;
3901 if (nbitsize == lbitsize)
3902 return 0;
3903
3904 if (lreversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
3905 lbitpos = nbitsize - lbitsize - lbitpos;
3906
3907 /* Make the mask to be used against the extracted field. */
3908 mask = build_int_cst_type (unsigned_type, -1);
3909 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3910 mask = const_binop (RSHIFT_EXPR, mask,
3911 size_int (nbitsize - lbitsize - lbitpos));
3912
3913 if (! const_p)
3914 /* If not comparing with constant, just rework the comparison
3915 and return. */
3916 return fold_build2_loc (loc, code, compare_type,
3917 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3918 make_bit_field_ref (loc, linner,
3919 unsigned_type,
3920 nbitsize, nbitpos,
3921 1, lreversep),
3922 mask),
3923 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3924 make_bit_field_ref (loc, rinner,
3925 unsigned_type,
3926 nbitsize, nbitpos,
3927 1, rreversep),
3928 mask));
3929
3930 /* Otherwise, we are handling the constant case. See if the constant is too
3931 big for the field. Warn and return a tree for 0 (false) if so. We do
3932 this not only for its own sake, but to avoid having to test for this
3933 error case below. If we didn't, we might generate wrong code.
3934
3935 For unsigned fields, the constant shifted right by the field length should
3936 be all zero. For signed fields, the high-order bits should agree with
3937 the sign bit. */
3938
3939 if (lunsignedp)
3940 {
3941 if (wi::lrshift (rhs, lbitsize) != 0)
3942 {
3943 warning (0, "comparison is always %d due to width of bit-field",
3944 code == NE_EXPR);
3945 return constant_boolean_node (code == NE_EXPR, compare_type);
3946 }
3947 }
3948 else
3949 {
3950 wide_int tem = wi::arshift (rhs, lbitsize - 1);
3951 if (tem != 0 && tem != -1)
3952 {
3953 warning (0, "comparison is always %d due to width of bit-field",
3954 code == NE_EXPR);
3955 return constant_boolean_node (code == NE_EXPR, compare_type);
3956 }
3957 }
3958
3959 /* Single-bit compares should always be against zero. */
3960 if (lbitsize == 1 && ! integer_zerop (rhs))
3961 {
3962 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3963 rhs = build_int_cst (type, 0);
3964 }
3965
3966 /* Make a new bitfield reference, shift the constant over the
3967 appropriate number of bits and mask it with the computed mask
3968 (in case this was a signed field). If we changed it, make a new one. */
3969 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1,
3970 lreversep);
3971
3972 rhs = const_binop (BIT_AND_EXPR,
3973 const_binop (LSHIFT_EXPR,
3974 fold_convert_loc (loc, unsigned_type, rhs),
3975 size_int (lbitpos)),
3976 mask);
3977
3978 lhs = build2_loc (loc, code, compare_type,
3979 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3980 return lhs;
3981 }
3982 \f
3983 /* Subroutine for fold_truth_andor_1: decode a field reference.
3984
3985 If EXP is a comparison reference, we return the innermost reference.
3986
3987 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3988 set to the starting bit number.
3989
3990 If the innermost field can be completely contained in a mode-sized
3991 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3992
3993 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3994 otherwise it is not changed.
3995
3996 *PUNSIGNEDP is set to the signedness of the field.
3997
3998 *PREVERSEP is set to the storage order of the field.
3999
4000 *PMASK is set to the mask used. This is either contained in a
4001 BIT_AND_EXPR or derived from the width of the field.
4002
4003 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4004
4005 Return 0 if this is not a component reference or is one that we can't
4006 do anything with. */
4007
4008 static tree
4009 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
4010 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
4011 int *punsignedp, int *preversep, int *pvolatilep,
4012 tree *pmask, tree *pand_mask)
4013 {
4014 tree outer_type = 0;
4015 tree and_mask = 0;
4016 tree mask, inner, offset;
4017 tree unsigned_type;
4018 unsigned int precision;
4019
4020 /* All the optimizations using this function assume integer fields.
4021 There are problems with FP fields since the type_for_size call
4022 below can fail for, e.g., XFmode. */
4023 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4024 return 0;
4025
4026 /* We are interested in the bare arrangement of bits, so strip everything
4027 that doesn't affect the machine mode. However, record the type of the
4028 outermost expression if it may matter below. */
4029 if (CONVERT_EXPR_P (exp)
4030 || TREE_CODE (exp) == NON_LVALUE_EXPR)
4031 outer_type = TREE_TYPE (exp);
4032 STRIP_NOPS (exp);
4033
4034 if (TREE_CODE (exp) == BIT_AND_EXPR)
4035 {
4036 and_mask = TREE_OPERAND (exp, 1);
4037 exp = TREE_OPERAND (exp, 0);
4038 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4039 if (TREE_CODE (and_mask) != INTEGER_CST)
4040 return 0;
4041 }
4042
4043 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
4044 punsignedp, preversep, pvolatilep, false);
4045 if ((inner == exp && and_mask == 0)
4046 || *pbitsize < 0 || offset != 0
4047 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
4048 return 0;
4049
4050 /* If the number of bits in the reference is the same as the bitsize of
4051 the outer type, then the outer type gives the signedness. Otherwise
4052 (in case of a small bitfield) the signedness is unchanged. */
4053 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4054 *punsignedp = TYPE_UNSIGNED (outer_type);
4055
4056 /* Compute the mask to access the bitfield. */
4057 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4058 precision = TYPE_PRECISION (unsigned_type);
4059
4060 mask = build_int_cst_type (unsigned_type, -1);
4061
4062 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4063 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4064
4065 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4066 if (and_mask != 0)
4067 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4068 fold_convert_loc (loc, unsigned_type, and_mask), mask);
4069
4070 *pmask = mask;
4071 *pand_mask = and_mask;
4072 return inner;
4073 }
4074
4075 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4076 bit positions and MASK is SIGNED. */
4077
4078 static int
4079 all_ones_mask_p (const_tree mask, unsigned int size)
4080 {
4081 tree type = TREE_TYPE (mask);
4082 unsigned int precision = TYPE_PRECISION (type);
4083
4084 /* If this function returns true when the type of the mask is
4085 UNSIGNED, then there will be errors. In particular see
4086 gcc.c-torture/execute/990326-1.c. There does not appear to be
4087 any documentation paper trail as to why this is so. But the pre
4088 wide-int worked with that restriction and it has been preserved
4089 here. */
4090 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
4091 return false;
4092
4093 return wi::mask (size, false, precision) == mask;
4094 }
4095
4096 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4097 represents the sign bit of EXP's type. If EXP represents a sign
4098 or zero extension, also test VAL against the unextended type.
4099 The return value is the (sub)expression whose sign bit is VAL,
4100 or NULL_TREE otherwise. */
4101
4102 tree
4103 sign_bit_p (tree exp, const_tree val)
4104 {
4105 int width;
4106 tree t;
4107
4108 /* Tree EXP must have an integral type. */
4109 t = TREE_TYPE (exp);
4110 if (! INTEGRAL_TYPE_P (t))
4111 return NULL_TREE;
4112
4113 /* Tree VAL must be an integer constant. */
4114 if (TREE_CODE (val) != INTEGER_CST
4115 || TREE_OVERFLOW (val))
4116 return NULL_TREE;
4117
4118 width = TYPE_PRECISION (t);
4119 if (wi::only_sign_bit_p (val, width))
4120 return exp;
4121
4122 /* Handle extension from a narrower type. */
4123 if (TREE_CODE (exp) == NOP_EXPR
4124 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4125 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4126
4127 return NULL_TREE;
4128 }
4129
4130 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
4131 to be evaluated unconditionally. */
4132
4133 static int
4134 simple_operand_p (const_tree exp)
4135 {
4136 /* Strip any conversions that don't change the machine mode. */
4137 STRIP_NOPS (exp);
4138
4139 return (CONSTANT_CLASS_P (exp)
4140 || TREE_CODE (exp) == SSA_NAME
4141 || (DECL_P (exp)
4142 && ! TREE_ADDRESSABLE (exp)
4143 && ! TREE_THIS_VOLATILE (exp)
4144 && ! DECL_NONLOCAL (exp)
4145 /* Don't regard global variables as simple. They may be
4146 allocated in ways unknown to the compiler (shared memory,
4147 #pragma weak, etc). */
4148 && ! TREE_PUBLIC (exp)
4149 && ! DECL_EXTERNAL (exp)
4150 /* Weakrefs are not safe to be read, since they can be NULL.
4151 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4152 have DECL_WEAK flag set. */
4153 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4154 /* Loading a static variable is unduly expensive, but global
4155 registers aren't expensive. */
4156 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4157 }
4158
4159 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4160 to be evaluated unconditionally.
4161 I addition to simple_operand_p, we assume that comparisons, conversions,
4162 and logic-not operations are simple, if their operands are simple, too. */
4163
4164 static bool
4165 simple_operand_p_2 (tree exp)
4166 {
4167 enum tree_code code;
4168
4169 if (TREE_SIDE_EFFECTS (exp)
4170 || tree_could_trap_p (exp))
4171 return false;
4172
4173 while (CONVERT_EXPR_P (exp))
4174 exp = TREE_OPERAND (exp, 0);
4175
4176 code = TREE_CODE (exp);
4177
4178 if (TREE_CODE_CLASS (code) == tcc_comparison)
4179 return (simple_operand_p (TREE_OPERAND (exp, 0))
4180 && simple_operand_p (TREE_OPERAND (exp, 1)));
4181
4182 if (code == TRUTH_NOT_EXPR)
4183 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4184
4185 return simple_operand_p (exp);
4186 }
4187
4188 \f
4189 /* The following functions are subroutines to fold_range_test and allow it to
4190 try to change a logical combination of comparisons into a range test.
4191
4192 For example, both
4193 X == 2 || X == 3 || X == 4 || X == 5
4194 and
4195 X >= 2 && X <= 5
4196 are converted to
4197 (unsigned) (X - 2) <= 3
4198
4199 We describe each set of comparisons as being either inside or outside
4200 a range, using a variable named like IN_P, and then describe the
4201 range with a lower and upper bound. If one of the bounds is omitted,
4202 it represents either the highest or lowest value of the type.
4203
4204 In the comments below, we represent a range by two numbers in brackets
4205 preceded by a "+" to designate being inside that range, or a "-" to
4206 designate being outside that range, so the condition can be inverted by
4207 flipping the prefix. An omitted bound is represented by a "-". For
4208 example, "- [-, 10]" means being outside the range starting at the lowest
4209 possible value and ending at 10, in other words, being greater than 10.
4210 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4211 always false.
4212
4213 We set up things so that the missing bounds are handled in a consistent
4214 manner so neither a missing bound nor "true" and "false" need to be
4215 handled using a special case. */
4216
4217 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4218 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4219 and UPPER1_P are nonzero if the respective argument is an upper bound
4220 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4221 must be specified for a comparison. ARG1 will be converted to ARG0's
4222 type if both are specified. */
4223
4224 static tree
4225 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4226 tree arg1, int upper1_p)
4227 {
4228 tree tem;
4229 int result;
4230 int sgn0, sgn1;
4231
4232 /* If neither arg represents infinity, do the normal operation.
4233 Else, if not a comparison, return infinity. Else handle the special
4234 comparison rules. Note that most of the cases below won't occur, but
4235 are handled for consistency. */
4236
4237 if (arg0 != 0 && arg1 != 0)
4238 {
4239 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4240 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4241 STRIP_NOPS (tem);
4242 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4243 }
4244
4245 if (TREE_CODE_CLASS (code) != tcc_comparison)
4246 return 0;
4247
4248 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4249 for neither. In real maths, we cannot assume open ended ranges are
4250 the same. But, this is computer arithmetic, where numbers are finite.
4251 We can therefore make the transformation of any unbounded range with
4252 the value Z, Z being greater than any representable number. This permits
4253 us to treat unbounded ranges as equal. */
4254 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4255 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4256 switch (code)
4257 {
4258 case EQ_EXPR:
4259 result = sgn0 == sgn1;
4260 break;
4261 case NE_EXPR:
4262 result = sgn0 != sgn1;
4263 break;
4264 case LT_EXPR:
4265 result = sgn0 < sgn1;
4266 break;
4267 case LE_EXPR:
4268 result = sgn0 <= sgn1;
4269 break;
4270 case GT_EXPR:
4271 result = sgn0 > sgn1;
4272 break;
4273 case GE_EXPR:
4274 result = sgn0 >= sgn1;
4275 break;
4276 default:
4277 gcc_unreachable ();
4278 }
4279
4280 return constant_boolean_node (result, type);
4281 }
4282 \f
4283 /* Helper routine for make_range. Perform one step for it, return
4284 new expression if the loop should continue or NULL_TREE if it should
4285 stop. */
4286
4287 tree
4288 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4289 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4290 bool *strict_overflow_p)
4291 {
4292 tree arg0_type = TREE_TYPE (arg0);
4293 tree n_low, n_high, low = *p_low, high = *p_high;
4294 int in_p = *p_in_p, n_in_p;
4295
4296 switch (code)
4297 {
4298 case TRUTH_NOT_EXPR:
4299 /* We can only do something if the range is testing for zero. */
4300 if (low == NULL_TREE || high == NULL_TREE
4301 || ! integer_zerop (low) || ! integer_zerop (high))
4302 return NULL_TREE;
4303 *p_in_p = ! in_p;
4304 return arg0;
4305
4306 case EQ_EXPR: case NE_EXPR:
4307 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4308 /* We can only do something if the range is testing for zero
4309 and if the second operand is an integer constant. Note that
4310 saying something is "in" the range we make is done by
4311 complementing IN_P since it will set in the initial case of
4312 being not equal to zero; "out" is leaving it alone. */
4313 if (low == NULL_TREE || high == NULL_TREE
4314 || ! integer_zerop (low) || ! integer_zerop (high)
4315 || TREE_CODE (arg1) != INTEGER_CST)
4316 return NULL_TREE;
4317
4318 switch (code)
4319 {
4320 case NE_EXPR: /* - [c, c] */
4321 low = high = arg1;
4322 break;
4323 case EQ_EXPR: /* + [c, c] */
4324 in_p = ! in_p, low = high = arg1;
4325 break;
4326 case GT_EXPR: /* - [-, c] */
4327 low = 0, high = arg1;
4328 break;
4329 case GE_EXPR: /* + [c, -] */
4330 in_p = ! in_p, low = arg1, high = 0;
4331 break;
4332 case LT_EXPR: /* - [c, -] */
4333 low = arg1, high = 0;
4334 break;
4335 case LE_EXPR: /* + [-, c] */
4336 in_p = ! in_p, low = 0, high = arg1;
4337 break;
4338 default:
4339 gcc_unreachable ();
4340 }
4341
4342 /* If this is an unsigned comparison, we also know that EXP is
4343 greater than or equal to zero. We base the range tests we make
4344 on that fact, so we record it here so we can parse existing
4345 range tests. We test arg0_type since often the return type
4346 of, e.g. EQ_EXPR, is boolean. */
4347 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4348 {
4349 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4350 in_p, low, high, 1,
4351 build_int_cst (arg0_type, 0),
4352 NULL_TREE))
4353 return NULL_TREE;
4354
4355 in_p = n_in_p, low = n_low, high = n_high;
4356
4357 /* If the high bound is missing, but we have a nonzero low
4358 bound, reverse the range so it goes from zero to the low bound
4359 minus 1. */
4360 if (high == 0 && low && ! integer_zerop (low))
4361 {
4362 in_p = ! in_p;
4363 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4364 build_int_cst (TREE_TYPE (low), 1), 0);
4365 low = build_int_cst (arg0_type, 0);
4366 }
4367 }
4368
4369 *p_low = low;
4370 *p_high = high;
4371 *p_in_p = in_p;
4372 return arg0;
4373
4374 case NEGATE_EXPR:
4375 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4376 low and high are non-NULL, then normalize will DTRT. */
4377 if (!TYPE_UNSIGNED (arg0_type)
4378 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4379 {
4380 if (low == NULL_TREE)
4381 low = TYPE_MIN_VALUE (arg0_type);
4382 if (high == NULL_TREE)
4383 high = TYPE_MAX_VALUE (arg0_type);
4384 }
4385
4386 /* (-x) IN [a,b] -> x in [-b, -a] */
4387 n_low = range_binop (MINUS_EXPR, exp_type,
4388 build_int_cst (exp_type, 0),
4389 0, high, 1);
4390 n_high = range_binop (MINUS_EXPR, exp_type,
4391 build_int_cst (exp_type, 0),
4392 0, low, 0);
4393 if (n_high != 0 && TREE_OVERFLOW (n_high))
4394 return NULL_TREE;
4395 goto normalize;
4396
4397 case BIT_NOT_EXPR:
4398 /* ~ X -> -X - 1 */
4399 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4400 build_int_cst (exp_type, 1));
4401
4402 case PLUS_EXPR:
4403 case MINUS_EXPR:
4404 if (TREE_CODE (arg1) != INTEGER_CST)
4405 return NULL_TREE;
4406
4407 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4408 move a constant to the other side. */
4409 if (!TYPE_UNSIGNED (arg0_type)
4410 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4411 return NULL_TREE;
4412
4413 /* If EXP is signed, any overflow in the computation is undefined,
4414 so we don't worry about it so long as our computations on
4415 the bounds don't overflow. For unsigned, overflow is defined
4416 and this is exactly the right thing. */
4417 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4418 arg0_type, low, 0, arg1, 0);
4419 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4420 arg0_type, high, 1, arg1, 0);
4421 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4422 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4423 return NULL_TREE;
4424
4425 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4426 *strict_overflow_p = true;
4427
4428 normalize:
4429 /* Check for an unsigned range which has wrapped around the maximum
4430 value thus making n_high < n_low, and normalize it. */
4431 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4432 {
4433 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4434 build_int_cst (TREE_TYPE (n_high), 1), 0);
4435 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4436 build_int_cst (TREE_TYPE (n_low), 1), 0);
4437
4438 /* If the range is of the form +/- [ x+1, x ], we won't
4439 be able to normalize it. But then, it represents the
4440 whole range or the empty set, so make it
4441 +/- [ -, - ]. */
4442 if (tree_int_cst_equal (n_low, low)
4443 && tree_int_cst_equal (n_high, high))
4444 low = high = 0;
4445 else
4446 in_p = ! in_p;
4447 }
4448 else
4449 low = n_low, high = n_high;
4450
4451 *p_low = low;
4452 *p_high = high;
4453 *p_in_p = in_p;
4454 return arg0;
4455
4456 CASE_CONVERT:
4457 case NON_LVALUE_EXPR:
4458 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4459 return NULL_TREE;
4460
4461 if (! INTEGRAL_TYPE_P (arg0_type)
4462 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4463 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4464 return NULL_TREE;
4465
4466 n_low = low, n_high = high;
4467
4468 if (n_low != 0)
4469 n_low = fold_convert_loc (loc, arg0_type, n_low);
4470
4471 if (n_high != 0)
4472 n_high = fold_convert_loc (loc, arg0_type, n_high);
4473
4474 /* If we're converting arg0 from an unsigned type, to exp,
4475 a signed type, we will be doing the comparison as unsigned.
4476 The tests above have already verified that LOW and HIGH
4477 are both positive.
4478
4479 So we have to ensure that we will handle large unsigned
4480 values the same way that the current signed bounds treat
4481 negative values. */
4482
4483 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4484 {
4485 tree high_positive;
4486 tree equiv_type;
4487 /* For fixed-point modes, we need to pass the saturating flag
4488 as the 2nd parameter. */
4489 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4490 equiv_type
4491 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4492 TYPE_SATURATING (arg0_type));
4493 else
4494 equiv_type
4495 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4496
4497 /* A range without an upper bound is, naturally, unbounded.
4498 Since convert would have cropped a very large value, use
4499 the max value for the destination type. */
4500 high_positive
4501 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4502 : TYPE_MAX_VALUE (arg0_type);
4503
4504 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4505 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4506 fold_convert_loc (loc, arg0_type,
4507 high_positive),
4508 build_int_cst (arg0_type, 1));
4509
4510 /* If the low bound is specified, "and" the range with the
4511 range for which the original unsigned value will be
4512 positive. */
4513 if (low != 0)
4514 {
4515 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4516 1, fold_convert_loc (loc, arg0_type,
4517 integer_zero_node),
4518 high_positive))
4519 return NULL_TREE;
4520
4521 in_p = (n_in_p == in_p);
4522 }
4523 else
4524 {
4525 /* Otherwise, "or" the range with the range of the input
4526 that will be interpreted as negative. */
4527 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4528 1, fold_convert_loc (loc, arg0_type,
4529 integer_zero_node),
4530 high_positive))
4531 return NULL_TREE;
4532
4533 in_p = (in_p != n_in_p);
4534 }
4535 }
4536
4537 *p_low = n_low;
4538 *p_high = n_high;
4539 *p_in_p = in_p;
4540 return arg0;
4541
4542 default:
4543 return NULL_TREE;
4544 }
4545 }
4546
4547 /* Given EXP, a logical expression, set the range it is testing into
4548 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4549 actually being tested. *PLOW and *PHIGH will be made of the same
4550 type as the returned expression. If EXP is not a comparison, we
4551 will most likely not be returning a useful value and range. Set
4552 *STRICT_OVERFLOW_P to true if the return value is only valid
4553 because signed overflow is undefined; otherwise, do not change
4554 *STRICT_OVERFLOW_P. */
4555
4556 tree
4557 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4558 bool *strict_overflow_p)
4559 {
4560 enum tree_code code;
4561 tree arg0, arg1 = NULL_TREE;
4562 tree exp_type, nexp;
4563 int in_p;
4564 tree low, high;
4565 location_t loc = EXPR_LOCATION (exp);
4566
4567 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4568 and see if we can refine the range. Some of the cases below may not
4569 happen, but it doesn't seem worth worrying about this. We "continue"
4570 the outer loop when we've changed something; otherwise we "break"
4571 the switch, which will "break" the while. */
4572
4573 in_p = 0;
4574 low = high = build_int_cst (TREE_TYPE (exp), 0);
4575
4576 while (1)
4577 {
4578 code = TREE_CODE (exp);
4579 exp_type = TREE_TYPE (exp);
4580 arg0 = NULL_TREE;
4581
4582 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4583 {
4584 if (TREE_OPERAND_LENGTH (exp) > 0)
4585 arg0 = TREE_OPERAND (exp, 0);
4586 if (TREE_CODE_CLASS (code) == tcc_binary
4587 || TREE_CODE_CLASS (code) == tcc_comparison
4588 || (TREE_CODE_CLASS (code) == tcc_expression
4589 && TREE_OPERAND_LENGTH (exp) > 1))
4590 arg1 = TREE_OPERAND (exp, 1);
4591 }
4592 if (arg0 == NULL_TREE)
4593 break;
4594
4595 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4596 &high, &in_p, strict_overflow_p);
4597 if (nexp == NULL_TREE)
4598 break;
4599 exp = nexp;
4600 }
4601
4602 /* If EXP is a constant, we can evaluate whether this is true or false. */
4603 if (TREE_CODE (exp) == INTEGER_CST)
4604 {
4605 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4606 exp, 0, low, 0))
4607 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4608 exp, 1, high, 1)));
4609 low = high = 0;
4610 exp = 0;
4611 }
4612
4613 *pin_p = in_p, *plow = low, *phigh = high;
4614 return exp;
4615 }
4616 \f
4617 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4618 type, TYPE, return an expression to test if EXP is in (or out of, depending
4619 on IN_P) the range. Return 0 if the test couldn't be created. */
4620
4621 tree
4622 build_range_check (location_t loc, tree type, tree exp, int in_p,
4623 tree low, tree high)
4624 {
4625 tree etype = TREE_TYPE (exp), value;
4626
4627 /* Disable this optimization for function pointer expressions
4628 on targets that require function pointer canonicalization. */
4629 if (targetm.have_canonicalize_funcptr_for_compare ()
4630 && TREE_CODE (etype) == POINTER_TYPE
4631 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4632 return NULL_TREE;
4633
4634 if (! in_p)
4635 {
4636 value = build_range_check (loc, type, exp, 1, low, high);
4637 if (value != 0)
4638 return invert_truthvalue_loc (loc, value);
4639
4640 return 0;
4641 }
4642
4643 if (low == 0 && high == 0)
4644 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4645
4646 if (low == 0)
4647 return fold_build2_loc (loc, LE_EXPR, type, exp,
4648 fold_convert_loc (loc, etype, high));
4649
4650 if (high == 0)
4651 return fold_build2_loc (loc, GE_EXPR, type, exp,
4652 fold_convert_loc (loc, etype, low));
4653
4654 if (operand_equal_p (low, high, 0))
4655 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4656 fold_convert_loc (loc, etype, low));
4657
4658 if (integer_zerop (low))
4659 {
4660 if (! TYPE_UNSIGNED (etype))
4661 {
4662 etype = unsigned_type_for (etype);
4663 high = fold_convert_loc (loc, etype, high);
4664 exp = fold_convert_loc (loc, etype, exp);
4665 }
4666 return build_range_check (loc, type, exp, 1, 0, high);
4667 }
4668
4669 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4670 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4671 {
4672 int prec = TYPE_PRECISION (etype);
4673
4674 if (wi::mask (prec - 1, false, prec) == high)
4675 {
4676 if (TYPE_UNSIGNED (etype))
4677 {
4678 tree signed_etype = signed_type_for (etype);
4679 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4680 etype
4681 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4682 else
4683 etype = signed_etype;
4684 exp = fold_convert_loc (loc, etype, exp);
4685 }
4686 return fold_build2_loc (loc, GT_EXPR, type, exp,
4687 build_int_cst (etype, 0));
4688 }
4689 }
4690
4691 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4692 This requires wrap-around arithmetics for the type of the expression.
4693 First make sure that arithmetics in this type is valid, then make sure
4694 that it wraps around. */
4695 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4696 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4697 TYPE_UNSIGNED (etype));
4698
4699 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4700 {
4701 tree utype, minv, maxv;
4702
4703 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4704 for the type in question, as we rely on this here. */
4705 utype = unsigned_type_for (etype);
4706 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4707 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4708 build_int_cst (TREE_TYPE (maxv), 1), 1);
4709 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4710
4711 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4712 minv, 1, maxv, 1)))
4713 etype = utype;
4714 else
4715 return 0;
4716 }
4717
4718 high = fold_convert_loc (loc, etype, high);
4719 low = fold_convert_loc (loc, etype, low);
4720 exp = fold_convert_loc (loc, etype, exp);
4721
4722 value = const_binop (MINUS_EXPR, high, low);
4723
4724
4725 if (POINTER_TYPE_P (etype))
4726 {
4727 if (value != 0 && !TREE_OVERFLOW (value))
4728 {
4729 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4730 return build_range_check (loc, type,
4731 fold_build_pointer_plus_loc (loc, exp, low),
4732 1, build_int_cst (etype, 0), value);
4733 }
4734 return 0;
4735 }
4736
4737 if (value != 0 && !TREE_OVERFLOW (value))
4738 return build_range_check (loc, type,
4739 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4740 1, build_int_cst (etype, 0), value);
4741
4742 return 0;
4743 }
4744 \f
4745 /* Return the predecessor of VAL in its type, handling the infinite case. */
4746
4747 static tree
4748 range_predecessor (tree val)
4749 {
4750 tree type = TREE_TYPE (val);
4751
4752 if (INTEGRAL_TYPE_P (type)
4753 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4754 return 0;
4755 else
4756 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4757 build_int_cst (TREE_TYPE (val), 1), 0);
4758 }
4759
4760 /* Return the successor of VAL in its type, handling the infinite case. */
4761
4762 static tree
4763 range_successor (tree val)
4764 {
4765 tree type = TREE_TYPE (val);
4766
4767 if (INTEGRAL_TYPE_P (type)
4768 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4769 return 0;
4770 else
4771 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4772 build_int_cst (TREE_TYPE (val), 1), 0);
4773 }
4774
4775 /* Given two ranges, see if we can merge them into one. Return 1 if we
4776 can, 0 if we can't. Set the output range into the specified parameters. */
4777
4778 bool
4779 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4780 tree high0, int in1_p, tree low1, tree high1)
4781 {
4782 int no_overlap;
4783 int subset;
4784 int temp;
4785 tree tem;
4786 int in_p;
4787 tree low, high;
4788 int lowequal = ((low0 == 0 && low1 == 0)
4789 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4790 low0, 0, low1, 0)));
4791 int highequal = ((high0 == 0 && high1 == 0)
4792 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4793 high0, 1, high1, 1)));
4794
4795 /* Make range 0 be the range that starts first, or ends last if they
4796 start at the same value. Swap them if it isn't. */
4797 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4798 low0, 0, low1, 0))
4799 || (lowequal
4800 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4801 high1, 1, high0, 1))))
4802 {
4803 temp = in0_p, in0_p = in1_p, in1_p = temp;
4804 tem = low0, low0 = low1, low1 = tem;
4805 tem = high0, high0 = high1, high1 = tem;
4806 }
4807
4808 /* Now flag two cases, whether the ranges are disjoint or whether the
4809 second range is totally subsumed in the first. Note that the tests
4810 below are simplified by the ones above. */
4811 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4812 high0, 1, low1, 0));
4813 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4814 high1, 1, high0, 1));
4815
4816 /* We now have four cases, depending on whether we are including or
4817 excluding the two ranges. */
4818 if (in0_p && in1_p)
4819 {
4820 /* If they don't overlap, the result is false. If the second range
4821 is a subset it is the result. Otherwise, the range is from the start
4822 of the second to the end of the first. */
4823 if (no_overlap)
4824 in_p = 0, low = high = 0;
4825 else if (subset)
4826 in_p = 1, low = low1, high = high1;
4827 else
4828 in_p = 1, low = low1, high = high0;
4829 }
4830
4831 else if (in0_p && ! in1_p)
4832 {
4833 /* If they don't overlap, the result is the first range. If they are
4834 equal, the result is false. If the second range is a subset of the
4835 first, and the ranges begin at the same place, we go from just after
4836 the end of the second range to the end of the first. If the second
4837 range is not a subset of the first, or if it is a subset and both
4838 ranges end at the same place, the range starts at the start of the
4839 first range and ends just before the second range.
4840 Otherwise, we can't describe this as a single range. */
4841 if (no_overlap)
4842 in_p = 1, low = low0, high = high0;
4843 else if (lowequal && highequal)
4844 in_p = 0, low = high = 0;
4845 else if (subset && lowequal)
4846 {
4847 low = range_successor (high1);
4848 high = high0;
4849 in_p = 1;
4850 if (low == 0)
4851 {
4852 /* We are in the weird situation where high0 > high1 but
4853 high1 has no successor. Punt. */
4854 return 0;
4855 }
4856 }
4857 else if (! subset || highequal)
4858 {
4859 low = low0;
4860 high = range_predecessor (low1);
4861 in_p = 1;
4862 if (high == 0)
4863 {
4864 /* low0 < low1 but low1 has no predecessor. Punt. */
4865 return 0;
4866 }
4867 }
4868 else
4869 return 0;
4870 }
4871
4872 else if (! in0_p && in1_p)
4873 {
4874 /* If they don't overlap, the result is the second range. If the second
4875 is a subset of the first, the result is false. Otherwise,
4876 the range starts just after the first range and ends at the
4877 end of the second. */
4878 if (no_overlap)
4879 in_p = 1, low = low1, high = high1;
4880 else if (subset || highequal)
4881 in_p = 0, low = high = 0;
4882 else
4883 {
4884 low = range_successor (high0);
4885 high = high1;
4886 in_p = 1;
4887 if (low == 0)
4888 {
4889 /* high1 > high0 but high0 has no successor. Punt. */
4890 return 0;
4891 }
4892 }
4893 }
4894
4895 else
4896 {
4897 /* The case where we are excluding both ranges. Here the complex case
4898 is if they don't overlap. In that case, the only time we have a
4899 range is if they are adjacent. If the second is a subset of the
4900 first, the result is the first. Otherwise, the range to exclude
4901 starts at the beginning of the first range and ends at the end of the
4902 second. */
4903 if (no_overlap)
4904 {
4905 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4906 range_successor (high0),
4907 1, low1, 0)))
4908 in_p = 0, low = low0, high = high1;
4909 else
4910 {
4911 /* Canonicalize - [min, x] into - [-, x]. */
4912 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4913 switch (TREE_CODE (TREE_TYPE (low0)))
4914 {
4915 case ENUMERAL_TYPE:
4916 if (TYPE_PRECISION (TREE_TYPE (low0))
4917 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4918 break;
4919 /* FALLTHROUGH */
4920 case INTEGER_TYPE:
4921 if (tree_int_cst_equal (low0,
4922 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4923 low0 = 0;
4924 break;
4925 case POINTER_TYPE:
4926 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4927 && integer_zerop (low0))
4928 low0 = 0;
4929 break;
4930 default:
4931 break;
4932 }
4933
4934 /* Canonicalize - [x, max] into - [x, -]. */
4935 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4936 switch (TREE_CODE (TREE_TYPE (high1)))
4937 {
4938 case ENUMERAL_TYPE:
4939 if (TYPE_PRECISION (TREE_TYPE (high1))
4940 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4941 break;
4942 /* FALLTHROUGH */
4943 case INTEGER_TYPE:
4944 if (tree_int_cst_equal (high1,
4945 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4946 high1 = 0;
4947 break;
4948 case POINTER_TYPE:
4949 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4950 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4951 high1, 1,
4952 build_int_cst (TREE_TYPE (high1), 1),
4953 1)))
4954 high1 = 0;
4955 break;
4956 default:
4957 break;
4958 }
4959
4960 /* The ranges might be also adjacent between the maximum and
4961 minimum values of the given type. For
4962 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4963 return + [x + 1, y - 1]. */
4964 if (low0 == 0 && high1 == 0)
4965 {
4966 low = range_successor (high0);
4967 high = range_predecessor (low1);
4968 if (low == 0 || high == 0)
4969 return 0;
4970
4971 in_p = 1;
4972 }
4973 else
4974 return 0;
4975 }
4976 }
4977 else if (subset)
4978 in_p = 0, low = low0, high = high0;
4979 else
4980 in_p = 0, low = low0, high = high1;
4981 }
4982
4983 *pin_p = in_p, *plow = low, *phigh = high;
4984 return 1;
4985 }
4986 \f
4987
4988 /* Subroutine of fold, looking inside expressions of the form
4989 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4990 of the COND_EXPR. This function is being used also to optimize
4991 A op B ? C : A, by reversing the comparison first.
4992
4993 Return a folded expression whose code is not a COND_EXPR
4994 anymore, or NULL_TREE if no folding opportunity is found. */
4995
4996 static tree
4997 fold_cond_expr_with_comparison (location_t loc, tree type,
4998 tree arg0, tree arg1, tree arg2)
4999 {
5000 enum tree_code comp_code = TREE_CODE (arg0);
5001 tree arg00 = TREE_OPERAND (arg0, 0);
5002 tree arg01 = TREE_OPERAND (arg0, 1);
5003 tree arg1_type = TREE_TYPE (arg1);
5004 tree tem;
5005
5006 STRIP_NOPS (arg1);
5007 STRIP_NOPS (arg2);
5008
5009 /* If we have A op 0 ? A : -A, consider applying the following
5010 transformations:
5011
5012 A == 0? A : -A same as -A
5013 A != 0? A : -A same as A
5014 A >= 0? A : -A same as abs (A)
5015 A > 0? A : -A same as abs (A)
5016 A <= 0? A : -A same as -abs (A)
5017 A < 0? A : -A same as -abs (A)
5018
5019 None of these transformations work for modes with signed
5020 zeros. If A is +/-0, the first two transformations will
5021 change the sign of the result (from +0 to -0, or vice
5022 versa). The last four will fix the sign of the result,
5023 even though the original expressions could be positive or
5024 negative, depending on the sign of A.
5025
5026 Note that all these transformations are correct if A is
5027 NaN, since the two alternatives (A and -A) are also NaNs. */
5028 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5029 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5030 ? real_zerop (arg01)
5031 : integer_zerop (arg01))
5032 && ((TREE_CODE (arg2) == NEGATE_EXPR
5033 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5034 /* In the case that A is of the form X-Y, '-A' (arg2) may
5035 have already been folded to Y-X, check for that. */
5036 || (TREE_CODE (arg1) == MINUS_EXPR
5037 && TREE_CODE (arg2) == MINUS_EXPR
5038 && operand_equal_p (TREE_OPERAND (arg1, 0),
5039 TREE_OPERAND (arg2, 1), 0)
5040 && operand_equal_p (TREE_OPERAND (arg1, 1),
5041 TREE_OPERAND (arg2, 0), 0))))
5042 switch (comp_code)
5043 {
5044 case EQ_EXPR:
5045 case UNEQ_EXPR:
5046 tem = fold_convert_loc (loc, arg1_type, arg1);
5047 return pedantic_non_lvalue_loc (loc,
5048 fold_convert_loc (loc, type,
5049 negate_expr (tem)));
5050 case NE_EXPR:
5051 case LTGT_EXPR:
5052 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5053 case UNGE_EXPR:
5054 case UNGT_EXPR:
5055 if (flag_trapping_math)
5056 break;
5057 /* Fall through. */
5058 case GE_EXPR:
5059 case GT_EXPR:
5060 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5061 break;
5062 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5063 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5064 case UNLE_EXPR:
5065 case UNLT_EXPR:
5066 if (flag_trapping_math)
5067 break;
5068 case LE_EXPR:
5069 case LT_EXPR:
5070 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5071 break;
5072 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5073 return negate_expr (fold_convert_loc (loc, type, tem));
5074 default:
5075 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5076 break;
5077 }
5078
5079 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5080 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5081 both transformations are correct when A is NaN: A != 0
5082 is then true, and A == 0 is false. */
5083
5084 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5085 && integer_zerop (arg01) && integer_zerop (arg2))
5086 {
5087 if (comp_code == NE_EXPR)
5088 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5089 else if (comp_code == EQ_EXPR)
5090 return build_zero_cst (type);
5091 }
5092
5093 /* Try some transformations of A op B ? A : B.
5094
5095 A == B? A : B same as B
5096 A != B? A : B same as A
5097 A >= B? A : B same as max (A, B)
5098 A > B? A : B same as max (B, A)
5099 A <= B? A : B same as min (A, B)
5100 A < B? A : B same as min (B, A)
5101
5102 As above, these transformations don't work in the presence
5103 of signed zeros. For example, if A and B are zeros of
5104 opposite sign, the first two transformations will change
5105 the sign of the result. In the last four, the original
5106 expressions give different results for (A=+0, B=-0) and
5107 (A=-0, B=+0), but the transformed expressions do not.
5108
5109 The first two transformations are correct if either A or B
5110 is a NaN. In the first transformation, the condition will
5111 be false, and B will indeed be chosen. In the case of the
5112 second transformation, the condition A != B will be true,
5113 and A will be chosen.
5114
5115 The conversions to max() and min() are not correct if B is
5116 a number and A is not. The conditions in the original
5117 expressions will be false, so all four give B. The min()
5118 and max() versions would give a NaN instead. */
5119 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5120 && operand_equal_for_comparison_p (arg01, arg2, arg00)
5121 /* Avoid these transformations if the COND_EXPR may be used
5122 as an lvalue in the C++ front-end. PR c++/19199. */
5123 && (in_gimple_form
5124 || VECTOR_TYPE_P (type)
5125 || (! lang_GNU_CXX ()
5126 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5127 || ! maybe_lvalue_p (arg1)
5128 || ! maybe_lvalue_p (arg2)))
5129 {
5130 tree comp_op0 = arg00;
5131 tree comp_op1 = arg01;
5132 tree comp_type = TREE_TYPE (comp_op0);
5133
5134 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
5135 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
5136 {
5137 comp_type = type;
5138 comp_op0 = arg1;
5139 comp_op1 = arg2;
5140 }
5141
5142 switch (comp_code)
5143 {
5144 case EQ_EXPR:
5145 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
5146 case NE_EXPR:
5147 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5148 case LE_EXPR:
5149 case LT_EXPR:
5150 case UNLE_EXPR:
5151 case UNLT_EXPR:
5152 /* In C++ a ?: expression can be an lvalue, so put the
5153 operand which will be used if they are equal first
5154 so that we can convert this back to the
5155 corresponding COND_EXPR. */
5156 if (!HONOR_NANS (arg1))
5157 {
5158 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5159 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5160 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5161 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5162 : fold_build2_loc (loc, MIN_EXPR, comp_type,
5163 comp_op1, comp_op0);
5164 return pedantic_non_lvalue_loc (loc,
5165 fold_convert_loc (loc, type, tem));
5166 }
5167 break;
5168 case GE_EXPR:
5169 case GT_EXPR:
5170 case UNGE_EXPR:
5171 case UNGT_EXPR:
5172 if (!HONOR_NANS (arg1))
5173 {
5174 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5175 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5176 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5177 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5178 : fold_build2_loc (loc, MAX_EXPR, comp_type,
5179 comp_op1, comp_op0);
5180 return pedantic_non_lvalue_loc (loc,
5181 fold_convert_loc (loc, type, tem));
5182 }
5183 break;
5184 case UNEQ_EXPR:
5185 if (!HONOR_NANS (arg1))
5186 return pedantic_non_lvalue_loc (loc,
5187 fold_convert_loc (loc, type, arg2));
5188 break;
5189 case LTGT_EXPR:
5190 if (!HONOR_NANS (arg1))
5191 return pedantic_non_lvalue_loc (loc,
5192 fold_convert_loc (loc, type, arg1));
5193 break;
5194 default:
5195 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5196 break;
5197 }
5198 }
5199
5200 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5201 we might still be able to simplify this. For example,
5202 if C1 is one less or one more than C2, this might have started
5203 out as a MIN or MAX and been transformed by this function.
5204 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5205
5206 if (INTEGRAL_TYPE_P (type)
5207 && TREE_CODE (arg01) == INTEGER_CST
5208 && TREE_CODE (arg2) == INTEGER_CST)
5209 switch (comp_code)
5210 {
5211 case EQ_EXPR:
5212 if (TREE_CODE (arg1) == INTEGER_CST)
5213 break;
5214 /* We can replace A with C1 in this case. */
5215 arg1 = fold_convert_loc (loc, type, arg01);
5216 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
5217
5218 case LT_EXPR:
5219 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
5220 MIN_EXPR, to preserve the signedness of the comparison. */
5221 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5222 OEP_ONLY_CONST)
5223 && operand_equal_p (arg01,
5224 const_binop (PLUS_EXPR, arg2,
5225 build_int_cst (type, 1)),
5226 OEP_ONLY_CONST))
5227 {
5228 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5229 fold_convert_loc (loc, TREE_TYPE (arg00),
5230 arg2));
5231 return pedantic_non_lvalue_loc (loc,
5232 fold_convert_loc (loc, type, tem));
5233 }
5234 break;
5235
5236 case LE_EXPR:
5237 /* If C1 is C2 - 1, this is min(A, C2), with the same care
5238 as above. */
5239 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5240 OEP_ONLY_CONST)
5241 && operand_equal_p (arg01,
5242 const_binop (MINUS_EXPR, arg2,
5243 build_int_cst (type, 1)),
5244 OEP_ONLY_CONST))
5245 {
5246 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5247 fold_convert_loc (loc, TREE_TYPE (arg00),
5248 arg2));
5249 return pedantic_non_lvalue_loc (loc,
5250 fold_convert_loc (loc, type, tem));
5251 }
5252 break;
5253
5254 case GT_EXPR:
5255 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
5256 MAX_EXPR, to preserve the signedness of the comparison. */
5257 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5258 OEP_ONLY_CONST)
5259 && operand_equal_p (arg01,
5260 const_binop (MINUS_EXPR, arg2,
5261 build_int_cst (type, 1)),
5262 OEP_ONLY_CONST))
5263 {
5264 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5265 fold_convert_loc (loc, TREE_TYPE (arg00),
5266 arg2));
5267 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5268 }
5269 break;
5270
5271 case GE_EXPR:
5272 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
5273 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5274 OEP_ONLY_CONST)
5275 && operand_equal_p (arg01,
5276 const_binop (PLUS_EXPR, arg2,
5277 build_int_cst (type, 1)),
5278 OEP_ONLY_CONST))
5279 {
5280 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5281 fold_convert_loc (loc, TREE_TYPE (arg00),
5282 arg2));
5283 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5284 }
5285 break;
5286 case NE_EXPR:
5287 break;
5288 default:
5289 gcc_unreachable ();
5290 }
5291
5292 return NULL_TREE;
5293 }
5294
5295
5296 \f
5297 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5298 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5299 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5300 false) >= 2)
5301 #endif
5302
5303 /* EXP is some logical combination of boolean tests. See if we can
5304 merge it into some range test. Return the new tree if so. */
5305
5306 static tree
5307 fold_range_test (location_t loc, enum tree_code code, tree type,
5308 tree op0, tree op1)
5309 {
5310 int or_op = (code == TRUTH_ORIF_EXPR
5311 || code == TRUTH_OR_EXPR);
5312 int in0_p, in1_p, in_p;
5313 tree low0, low1, low, high0, high1, high;
5314 bool strict_overflow_p = false;
5315 tree tem, lhs, rhs;
5316 const char * const warnmsg = G_("assuming signed overflow does not occur "
5317 "when simplifying range test");
5318
5319 if (!INTEGRAL_TYPE_P (type))
5320 return 0;
5321
5322 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5323 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5324
5325 /* If this is an OR operation, invert both sides; we will invert
5326 again at the end. */
5327 if (or_op)
5328 in0_p = ! in0_p, in1_p = ! in1_p;
5329
5330 /* If both expressions are the same, if we can merge the ranges, and we
5331 can build the range test, return it or it inverted. If one of the
5332 ranges is always true or always false, consider it to be the same
5333 expression as the other. */
5334 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5335 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5336 in1_p, low1, high1)
5337 && 0 != (tem = (build_range_check (loc, type,
5338 lhs != 0 ? lhs
5339 : rhs != 0 ? rhs : integer_zero_node,
5340 in_p, low, high))))
5341 {
5342 if (strict_overflow_p)
5343 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5344 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5345 }
5346
5347 /* On machines where the branch cost is expensive, if this is a
5348 short-circuited branch and the underlying object on both sides
5349 is the same, make a non-short-circuit operation. */
5350 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5351 && lhs != 0 && rhs != 0
5352 && (code == TRUTH_ANDIF_EXPR
5353 || code == TRUTH_ORIF_EXPR)
5354 && operand_equal_p (lhs, rhs, 0))
5355 {
5356 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5357 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5358 which cases we can't do this. */
5359 if (simple_operand_p (lhs))
5360 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5361 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5362 type, op0, op1);
5363
5364 else if (!lang_hooks.decls.global_bindings_p ()
5365 && !CONTAINS_PLACEHOLDER_P (lhs))
5366 {
5367 tree common = save_expr (lhs);
5368
5369 if (0 != (lhs = build_range_check (loc, type, common,
5370 or_op ? ! in0_p : in0_p,
5371 low0, high0))
5372 && (0 != (rhs = build_range_check (loc, type, common,
5373 or_op ? ! in1_p : in1_p,
5374 low1, high1))))
5375 {
5376 if (strict_overflow_p)
5377 fold_overflow_warning (warnmsg,
5378 WARN_STRICT_OVERFLOW_COMPARISON);
5379 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5380 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5381 type, lhs, rhs);
5382 }
5383 }
5384 }
5385
5386 return 0;
5387 }
5388 \f
5389 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5390 bit value. Arrange things so the extra bits will be set to zero if and
5391 only if C is signed-extended to its full width. If MASK is nonzero,
5392 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5393
5394 static tree
5395 unextend (tree c, int p, int unsignedp, tree mask)
5396 {
5397 tree type = TREE_TYPE (c);
5398 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5399 tree temp;
5400
5401 if (p == modesize || unsignedp)
5402 return c;
5403
5404 /* We work by getting just the sign bit into the low-order bit, then
5405 into the high-order bit, then sign-extend. We then XOR that value
5406 with C. */
5407 temp = build_int_cst (TREE_TYPE (c), wi::extract_uhwi (c, p - 1, 1));
5408
5409 /* We must use a signed type in order to get an arithmetic right shift.
5410 However, we must also avoid introducing accidental overflows, so that
5411 a subsequent call to integer_zerop will work. Hence we must
5412 do the type conversion here. At this point, the constant is either
5413 zero or one, and the conversion to a signed type can never overflow.
5414 We could get an overflow if this conversion is done anywhere else. */
5415 if (TYPE_UNSIGNED (type))
5416 temp = fold_convert (signed_type_for (type), temp);
5417
5418 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5419 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5420 if (mask != 0)
5421 temp = const_binop (BIT_AND_EXPR, temp,
5422 fold_convert (TREE_TYPE (c), mask));
5423 /* If necessary, convert the type back to match the type of C. */
5424 if (TYPE_UNSIGNED (type))
5425 temp = fold_convert (type, temp);
5426
5427 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5428 }
5429 \f
5430 /* For an expression that has the form
5431 (A && B) || ~B
5432 or
5433 (A || B) && ~B,
5434 we can drop one of the inner expressions and simplify to
5435 A || ~B
5436 or
5437 A && ~B
5438 LOC is the location of the resulting expression. OP is the inner
5439 logical operation; the left-hand side in the examples above, while CMPOP
5440 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5441 removing a condition that guards another, as in
5442 (A != NULL && A->...) || A == NULL
5443 which we must not transform. If RHS_ONLY is true, only eliminate the
5444 right-most operand of the inner logical operation. */
5445
5446 static tree
5447 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5448 bool rhs_only)
5449 {
5450 tree type = TREE_TYPE (cmpop);
5451 enum tree_code code = TREE_CODE (cmpop);
5452 enum tree_code truthop_code = TREE_CODE (op);
5453 tree lhs = TREE_OPERAND (op, 0);
5454 tree rhs = TREE_OPERAND (op, 1);
5455 tree orig_lhs = lhs, orig_rhs = rhs;
5456 enum tree_code rhs_code = TREE_CODE (rhs);
5457 enum tree_code lhs_code = TREE_CODE (lhs);
5458 enum tree_code inv_code;
5459
5460 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5461 return NULL_TREE;
5462
5463 if (TREE_CODE_CLASS (code) != tcc_comparison)
5464 return NULL_TREE;
5465
5466 if (rhs_code == truthop_code)
5467 {
5468 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5469 if (newrhs != NULL_TREE)
5470 {
5471 rhs = newrhs;
5472 rhs_code = TREE_CODE (rhs);
5473 }
5474 }
5475 if (lhs_code == truthop_code && !rhs_only)
5476 {
5477 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5478 if (newlhs != NULL_TREE)
5479 {
5480 lhs = newlhs;
5481 lhs_code = TREE_CODE (lhs);
5482 }
5483 }
5484
5485 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
5486 if (inv_code == rhs_code
5487 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5488 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5489 return lhs;
5490 if (!rhs_only && inv_code == lhs_code
5491 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5492 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5493 return rhs;
5494 if (rhs != orig_rhs || lhs != orig_lhs)
5495 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5496 lhs, rhs);
5497 return NULL_TREE;
5498 }
5499
5500 /* Find ways of folding logical expressions of LHS and RHS:
5501 Try to merge two comparisons to the same innermost item.
5502 Look for range tests like "ch >= '0' && ch <= '9'".
5503 Look for combinations of simple terms on machines with expensive branches
5504 and evaluate the RHS unconditionally.
5505
5506 For example, if we have p->a == 2 && p->b == 4 and we can make an
5507 object large enough to span both A and B, we can do this with a comparison
5508 against the object ANDed with the a mask.
5509
5510 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5511 operations to do this with one comparison.
5512
5513 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5514 function and the one above.
5515
5516 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5517 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5518
5519 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5520 two operands.
5521
5522 We return the simplified tree or 0 if no optimization is possible. */
5523
5524 static tree
5525 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5526 tree lhs, tree rhs)
5527 {
5528 /* If this is the "or" of two comparisons, we can do something if
5529 the comparisons are NE_EXPR. If this is the "and", we can do something
5530 if the comparisons are EQ_EXPR. I.e.,
5531 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5532
5533 WANTED_CODE is this operation code. For single bit fields, we can
5534 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5535 comparison for one-bit fields. */
5536
5537 enum tree_code wanted_code;
5538 enum tree_code lcode, rcode;
5539 tree ll_arg, lr_arg, rl_arg, rr_arg;
5540 tree ll_inner, lr_inner, rl_inner, rr_inner;
5541 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5542 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5543 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5544 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5545 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5546 int ll_reversep, lr_reversep, rl_reversep, rr_reversep;
5547 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5548 machine_mode lnmode, rnmode;
5549 tree ll_mask, lr_mask, rl_mask, rr_mask;
5550 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5551 tree l_const, r_const;
5552 tree lntype, rntype, result;
5553 HOST_WIDE_INT first_bit, end_bit;
5554 int volatilep;
5555
5556 /* Start by getting the comparison codes. Fail if anything is volatile.
5557 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5558 it were surrounded with a NE_EXPR. */
5559
5560 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5561 return 0;
5562
5563 lcode = TREE_CODE (lhs);
5564 rcode = TREE_CODE (rhs);
5565
5566 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5567 {
5568 lhs = build2 (NE_EXPR, truth_type, lhs,
5569 build_int_cst (TREE_TYPE (lhs), 0));
5570 lcode = NE_EXPR;
5571 }
5572
5573 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5574 {
5575 rhs = build2 (NE_EXPR, truth_type, rhs,
5576 build_int_cst (TREE_TYPE (rhs), 0));
5577 rcode = NE_EXPR;
5578 }
5579
5580 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5581 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5582 return 0;
5583
5584 ll_arg = TREE_OPERAND (lhs, 0);
5585 lr_arg = TREE_OPERAND (lhs, 1);
5586 rl_arg = TREE_OPERAND (rhs, 0);
5587 rr_arg = TREE_OPERAND (rhs, 1);
5588
5589 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5590 if (simple_operand_p (ll_arg)
5591 && simple_operand_p (lr_arg))
5592 {
5593 if (operand_equal_p (ll_arg, rl_arg, 0)
5594 && operand_equal_p (lr_arg, rr_arg, 0))
5595 {
5596 result = combine_comparisons (loc, code, lcode, rcode,
5597 truth_type, ll_arg, lr_arg);
5598 if (result)
5599 return result;
5600 }
5601 else if (operand_equal_p (ll_arg, rr_arg, 0)
5602 && operand_equal_p (lr_arg, rl_arg, 0))
5603 {
5604 result = combine_comparisons (loc, code, lcode,
5605 swap_tree_comparison (rcode),
5606 truth_type, ll_arg, lr_arg);
5607 if (result)
5608 return result;
5609 }
5610 }
5611
5612 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5613 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5614
5615 /* If the RHS can be evaluated unconditionally and its operands are
5616 simple, it wins to evaluate the RHS unconditionally on machines
5617 with expensive branches. In this case, this isn't a comparison
5618 that can be merged. */
5619
5620 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5621 false) >= 2
5622 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5623 && simple_operand_p (rl_arg)
5624 && simple_operand_p (rr_arg))
5625 {
5626 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5627 if (code == TRUTH_OR_EXPR
5628 && lcode == NE_EXPR && integer_zerop (lr_arg)
5629 && rcode == NE_EXPR && integer_zerop (rr_arg)
5630 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5631 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5632 return build2_loc (loc, NE_EXPR, truth_type,
5633 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5634 ll_arg, rl_arg),
5635 build_int_cst (TREE_TYPE (ll_arg), 0));
5636
5637 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5638 if (code == TRUTH_AND_EXPR
5639 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5640 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5641 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5642 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5643 return build2_loc (loc, EQ_EXPR, truth_type,
5644 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5645 ll_arg, rl_arg),
5646 build_int_cst (TREE_TYPE (ll_arg), 0));
5647 }
5648
5649 /* See if the comparisons can be merged. Then get all the parameters for
5650 each side. */
5651
5652 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5653 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5654 return 0;
5655
5656 ll_reversep = lr_reversep = rl_reversep = rr_reversep = 0;
5657 volatilep = 0;
5658 ll_inner = decode_field_reference (loc, ll_arg,
5659 &ll_bitsize, &ll_bitpos, &ll_mode,
5660 &ll_unsignedp, &ll_reversep, &volatilep,
5661 &ll_mask, &ll_and_mask);
5662 lr_inner = decode_field_reference (loc, lr_arg,
5663 &lr_bitsize, &lr_bitpos, &lr_mode,
5664 &lr_unsignedp, &lr_reversep, &volatilep,
5665 &lr_mask, &lr_and_mask);
5666 rl_inner = decode_field_reference (loc, rl_arg,
5667 &rl_bitsize, &rl_bitpos, &rl_mode,
5668 &rl_unsignedp, &rl_reversep, &volatilep,
5669 &rl_mask, &rl_and_mask);
5670 rr_inner = decode_field_reference (loc, rr_arg,
5671 &rr_bitsize, &rr_bitpos, &rr_mode,
5672 &rr_unsignedp, &rr_reversep, &volatilep,
5673 &rr_mask, &rr_and_mask);
5674
5675 /* It must be true that the inner operation on the lhs of each
5676 comparison must be the same if we are to be able to do anything.
5677 Then see if we have constants. If not, the same must be true for
5678 the rhs's. */
5679 if (volatilep
5680 || ll_reversep != rl_reversep
5681 || ll_inner == 0 || rl_inner == 0
5682 || ! operand_equal_p (ll_inner, rl_inner, 0))
5683 return 0;
5684
5685 if (TREE_CODE (lr_arg) == INTEGER_CST
5686 && TREE_CODE (rr_arg) == INTEGER_CST)
5687 {
5688 l_const = lr_arg, r_const = rr_arg;
5689 lr_reversep = ll_reversep;
5690 }
5691 else if (lr_reversep != rr_reversep
5692 || lr_inner == 0 || rr_inner == 0
5693 || ! operand_equal_p (lr_inner, rr_inner, 0))
5694 return 0;
5695 else
5696 l_const = r_const = 0;
5697
5698 /* If either comparison code is not correct for our logical operation,
5699 fail. However, we can convert a one-bit comparison against zero into
5700 the opposite comparison against that bit being set in the field. */
5701
5702 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5703 if (lcode != wanted_code)
5704 {
5705 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5706 {
5707 /* Make the left operand unsigned, since we are only interested
5708 in the value of one bit. Otherwise we are doing the wrong
5709 thing below. */
5710 ll_unsignedp = 1;
5711 l_const = ll_mask;
5712 }
5713 else
5714 return 0;
5715 }
5716
5717 /* This is analogous to the code for l_const above. */
5718 if (rcode != wanted_code)
5719 {
5720 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5721 {
5722 rl_unsignedp = 1;
5723 r_const = rl_mask;
5724 }
5725 else
5726 return 0;
5727 }
5728
5729 /* See if we can find a mode that contains both fields being compared on
5730 the left. If we can't, fail. Otherwise, update all constants and masks
5731 to be relative to a field of that size. */
5732 first_bit = MIN (ll_bitpos, rl_bitpos);
5733 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5734 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5735 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5736 volatilep);
5737 if (lnmode == VOIDmode)
5738 return 0;
5739
5740 lnbitsize = GET_MODE_BITSIZE (lnmode);
5741 lnbitpos = first_bit & ~ (lnbitsize - 1);
5742 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5743 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5744
5745 if (ll_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5746 {
5747 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5748 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5749 }
5750
5751 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5752 size_int (xll_bitpos));
5753 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5754 size_int (xrl_bitpos));
5755
5756 if (l_const)
5757 {
5758 l_const = fold_convert_loc (loc, lntype, l_const);
5759 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5760 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5761 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5762 fold_build1_loc (loc, BIT_NOT_EXPR,
5763 lntype, ll_mask))))
5764 {
5765 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5766
5767 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5768 }
5769 }
5770 if (r_const)
5771 {
5772 r_const = fold_convert_loc (loc, lntype, r_const);
5773 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5774 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5775 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5776 fold_build1_loc (loc, BIT_NOT_EXPR,
5777 lntype, rl_mask))))
5778 {
5779 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5780
5781 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5782 }
5783 }
5784
5785 /* If the right sides are not constant, do the same for it. Also,
5786 disallow this optimization if a size or signedness mismatch occurs
5787 between the left and right sides. */
5788 if (l_const == 0)
5789 {
5790 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5791 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5792 /* Make sure the two fields on the right
5793 correspond to the left without being swapped. */
5794 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5795 return 0;
5796
5797 first_bit = MIN (lr_bitpos, rr_bitpos);
5798 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5799 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5800 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5801 volatilep);
5802 if (rnmode == VOIDmode)
5803 return 0;
5804
5805 rnbitsize = GET_MODE_BITSIZE (rnmode);
5806 rnbitpos = first_bit & ~ (rnbitsize - 1);
5807 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5808 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5809
5810 if (lr_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5811 {
5812 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5813 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5814 }
5815
5816 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5817 rntype, lr_mask),
5818 size_int (xlr_bitpos));
5819 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5820 rntype, rr_mask),
5821 size_int (xrr_bitpos));
5822
5823 /* Make a mask that corresponds to both fields being compared.
5824 Do this for both items being compared. If the operands are the
5825 same size and the bits being compared are in the same position
5826 then we can do this by masking both and comparing the masked
5827 results. */
5828 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5829 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5830 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5831 {
5832 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5833 ll_unsignedp || rl_unsignedp, ll_reversep);
5834 if (! all_ones_mask_p (ll_mask, lnbitsize))
5835 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5836
5837 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5838 lr_unsignedp || rr_unsignedp, lr_reversep);
5839 if (! all_ones_mask_p (lr_mask, rnbitsize))
5840 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5841
5842 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5843 }
5844
5845 /* There is still another way we can do something: If both pairs of
5846 fields being compared are adjacent, we may be able to make a wider
5847 field containing them both.
5848
5849 Note that we still must mask the lhs/rhs expressions. Furthermore,
5850 the mask must be shifted to account for the shift done by
5851 make_bit_field_ref. */
5852 if ((ll_bitsize + ll_bitpos == rl_bitpos
5853 && lr_bitsize + lr_bitpos == rr_bitpos)
5854 || (ll_bitpos == rl_bitpos + rl_bitsize
5855 && lr_bitpos == rr_bitpos + rr_bitsize))
5856 {
5857 tree type;
5858
5859 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5860 ll_bitsize + rl_bitsize,
5861 MIN (ll_bitpos, rl_bitpos),
5862 ll_unsignedp, ll_reversep);
5863 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5864 lr_bitsize + rr_bitsize,
5865 MIN (lr_bitpos, rr_bitpos),
5866 lr_unsignedp, lr_reversep);
5867
5868 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5869 size_int (MIN (xll_bitpos, xrl_bitpos)));
5870 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5871 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5872
5873 /* Convert to the smaller type before masking out unwanted bits. */
5874 type = lntype;
5875 if (lntype != rntype)
5876 {
5877 if (lnbitsize > rnbitsize)
5878 {
5879 lhs = fold_convert_loc (loc, rntype, lhs);
5880 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5881 type = rntype;
5882 }
5883 else if (lnbitsize < rnbitsize)
5884 {
5885 rhs = fold_convert_loc (loc, lntype, rhs);
5886 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5887 type = lntype;
5888 }
5889 }
5890
5891 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5892 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5893
5894 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5895 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5896
5897 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5898 }
5899
5900 return 0;
5901 }
5902
5903 /* Handle the case of comparisons with constants. If there is something in
5904 common between the masks, those bits of the constants must be the same.
5905 If not, the condition is always false. Test for this to avoid generating
5906 incorrect code below. */
5907 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5908 if (! integer_zerop (result)
5909 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5910 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5911 {
5912 if (wanted_code == NE_EXPR)
5913 {
5914 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5915 return constant_boolean_node (true, truth_type);
5916 }
5917 else
5918 {
5919 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5920 return constant_boolean_node (false, truth_type);
5921 }
5922 }
5923
5924 /* Construct the expression we will return. First get the component
5925 reference we will make. Unless the mask is all ones the width of
5926 that field, perform the mask operation. Then compare with the
5927 merged constant. */
5928 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5929 ll_unsignedp || rl_unsignedp, ll_reversep);
5930
5931 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5932 if (! all_ones_mask_p (ll_mask, lnbitsize))
5933 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5934
5935 return build2_loc (loc, wanted_code, truth_type, result,
5936 const_binop (BIT_IOR_EXPR, l_const, r_const));
5937 }
5938 \f
5939 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5940 constant. */
5941
5942 static tree
5943 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5944 tree op0, tree op1)
5945 {
5946 tree arg0 = op0;
5947 enum tree_code op_code;
5948 tree comp_const;
5949 tree minmax_const;
5950 int consts_equal, consts_lt;
5951 tree inner;
5952
5953 STRIP_SIGN_NOPS (arg0);
5954
5955 op_code = TREE_CODE (arg0);
5956 minmax_const = TREE_OPERAND (arg0, 1);
5957 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5958 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5959 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5960 inner = TREE_OPERAND (arg0, 0);
5961
5962 /* If something does not permit us to optimize, return the original tree. */
5963 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5964 || TREE_CODE (comp_const) != INTEGER_CST
5965 || TREE_OVERFLOW (comp_const)
5966 || TREE_CODE (minmax_const) != INTEGER_CST
5967 || TREE_OVERFLOW (minmax_const))
5968 return NULL_TREE;
5969
5970 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5971 and GT_EXPR, doing the rest with recursive calls using logical
5972 simplifications. */
5973 switch (code)
5974 {
5975 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5976 {
5977 tree tem
5978 = optimize_minmax_comparison (loc,
5979 invert_tree_comparison (code, false),
5980 type, op0, op1);
5981 if (tem)
5982 return invert_truthvalue_loc (loc, tem);
5983 return NULL_TREE;
5984 }
5985
5986 case GE_EXPR:
5987 return
5988 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5989 optimize_minmax_comparison
5990 (loc, EQ_EXPR, type, arg0, comp_const),
5991 optimize_minmax_comparison
5992 (loc, GT_EXPR, type, arg0, comp_const));
5993
5994 case EQ_EXPR:
5995 if (op_code == MAX_EXPR && consts_equal)
5996 /* MAX (X, 0) == 0 -> X <= 0 */
5997 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5998
5999 else if (op_code == MAX_EXPR && consts_lt)
6000 /* MAX (X, 0) == 5 -> X == 5 */
6001 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
6002
6003 else if (op_code == MAX_EXPR)
6004 /* MAX (X, 0) == -1 -> false */
6005 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
6006
6007 else if (consts_equal)
6008 /* MIN (X, 0) == 0 -> X >= 0 */
6009 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
6010
6011 else if (consts_lt)
6012 /* MIN (X, 0) == 5 -> false */
6013 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
6014
6015 else
6016 /* MIN (X, 0) == -1 -> X == -1 */
6017 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
6018
6019 case GT_EXPR:
6020 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
6021 /* MAX (X, 0) > 0 -> X > 0
6022 MAX (X, 0) > 5 -> X > 5 */
6023 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
6024
6025 else if (op_code == MAX_EXPR)
6026 /* MAX (X, 0) > -1 -> true */
6027 return omit_one_operand_loc (loc, type, integer_one_node, inner);
6028
6029 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
6030 /* MIN (X, 0) > 0 -> false
6031 MIN (X, 0) > 5 -> false */
6032 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
6033
6034 else
6035 /* MIN (X, 0) > -1 -> X > -1 */
6036 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
6037
6038 default:
6039 return NULL_TREE;
6040 }
6041 }
6042 \f
6043 /* T is an integer expression that is being multiplied, divided, or taken a
6044 modulus (CODE says which and what kind of divide or modulus) by a
6045 constant C. See if we can eliminate that operation by folding it with
6046 other operations already in T. WIDE_TYPE, if non-null, is a type that
6047 should be used for the computation if wider than our type.
6048
6049 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6050 (X * 2) + (Y * 4). We must, however, be assured that either the original
6051 expression would not overflow or that overflow is undefined for the type
6052 in the language in question.
6053
6054 If we return a non-null expression, it is an equivalent form of the
6055 original computation, but need not be in the original type.
6056
6057 We set *STRICT_OVERFLOW_P to true if the return values depends on
6058 signed overflow being undefined. Otherwise we do not change
6059 *STRICT_OVERFLOW_P. */
6060
6061 static tree
6062 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6063 bool *strict_overflow_p)
6064 {
6065 /* To avoid exponential search depth, refuse to allow recursion past
6066 three levels. Beyond that (1) it's highly unlikely that we'll find
6067 something interesting and (2) we've probably processed it before
6068 when we built the inner expression. */
6069
6070 static int depth;
6071 tree ret;
6072
6073 if (depth > 3)
6074 return NULL;
6075
6076 depth++;
6077 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6078 depth--;
6079
6080 return ret;
6081 }
6082
6083 static tree
6084 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6085 bool *strict_overflow_p)
6086 {
6087 tree type = TREE_TYPE (t);
6088 enum tree_code tcode = TREE_CODE (t);
6089 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
6090 > GET_MODE_SIZE (TYPE_MODE (type)))
6091 ? wide_type : type);
6092 tree t1, t2;
6093 int same_p = tcode == code;
6094 tree op0 = NULL_TREE, op1 = NULL_TREE;
6095 bool sub_strict_overflow_p;
6096
6097 /* Don't deal with constants of zero here; they confuse the code below. */
6098 if (integer_zerop (c))
6099 return NULL_TREE;
6100
6101 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6102 op0 = TREE_OPERAND (t, 0);
6103
6104 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6105 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6106
6107 /* Note that we need not handle conditional operations here since fold
6108 already handles those cases. So just do arithmetic here. */
6109 switch (tcode)
6110 {
6111 case INTEGER_CST:
6112 /* For a constant, we can always simplify if we are a multiply
6113 or (for divide and modulus) if it is a multiple of our constant. */
6114 if (code == MULT_EXPR
6115 || wi::multiple_of_p (t, c, TYPE_SIGN (type)))
6116 {
6117 tree tem = const_binop (code, fold_convert (ctype, t),
6118 fold_convert (ctype, c));
6119 /* If the multiplication overflowed, we lost information on it.
6120 See PR68142 and PR69845. */
6121 if (TREE_OVERFLOW (tem))
6122 return NULL_TREE;
6123 return tem;
6124 }
6125 break;
6126
6127 CASE_CONVERT: case NON_LVALUE_EXPR:
6128 /* If op0 is an expression ... */
6129 if ((COMPARISON_CLASS_P (op0)
6130 || UNARY_CLASS_P (op0)
6131 || BINARY_CLASS_P (op0)
6132 || VL_EXP_CLASS_P (op0)
6133 || EXPRESSION_CLASS_P (op0))
6134 /* ... and has wrapping overflow, and its type is smaller
6135 than ctype, then we cannot pass through as widening. */
6136 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6137 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0)))
6138 && (TYPE_PRECISION (ctype)
6139 > TYPE_PRECISION (TREE_TYPE (op0))))
6140 /* ... or this is a truncation (t is narrower than op0),
6141 then we cannot pass through this narrowing. */
6142 || (TYPE_PRECISION (type)
6143 < TYPE_PRECISION (TREE_TYPE (op0)))
6144 /* ... or signedness changes for division or modulus,
6145 then we cannot pass through this conversion. */
6146 || (code != MULT_EXPR
6147 && (TYPE_UNSIGNED (ctype)
6148 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6149 /* ... or has undefined overflow while the converted to
6150 type has not, we cannot do the operation in the inner type
6151 as that would introduce undefined overflow. */
6152 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6153 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0)))
6154 && !TYPE_OVERFLOW_UNDEFINED (type))))
6155 break;
6156
6157 /* Pass the constant down and see if we can make a simplification. If
6158 we can, replace this expression with the inner simplification for
6159 possible later conversion to our or some other type. */
6160 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6161 && TREE_CODE (t2) == INTEGER_CST
6162 && !TREE_OVERFLOW (t2)
6163 && (0 != (t1 = extract_muldiv (op0, t2, code,
6164 code == MULT_EXPR
6165 ? ctype : NULL_TREE,
6166 strict_overflow_p))))
6167 return t1;
6168 break;
6169
6170 case ABS_EXPR:
6171 /* If widening the type changes it from signed to unsigned, then we
6172 must avoid building ABS_EXPR itself as unsigned. */
6173 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6174 {
6175 tree cstype = (*signed_type_for) (ctype);
6176 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6177 != 0)
6178 {
6179 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6180 return fold_convert (ctype, t1);
6181 }
6182 break;
6183 }
6184 /* If the constant is negative, we cannot simplify this. */
6185 if (tree_int_cst_sgn (c) == -1)
6186 break;
6187 /* FALLTHROUGH */
6188 case NEGATE_EXPR:
6189 /* For division and modulus, type can't be unsigned, as e.g.
6190 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6191 For signed types, even with wrapping overflow, this is fine. */
6192 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6193 break;
6194 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6195 != 0)
6196 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6197 break;
6198
6199 case MIN_EXPR: case MAX_EXPR:
6200 /* If widening the type changes the signedness, then we can't perform
6201 this optimization as that changes the result. */
6202 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6203 break;
6204
6205 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6206 sub_strict_overflow_p = false;
6207 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6208 &sub_strict_overflow_p)) != 0
6209 && (t2 = extract_muldiv (op1, c, code, wide_type,
6210 &sub_strict_overflow_p)) != 0)
6211 {
6212 if (tree_int_cst_sgn (c) < 0)
6213 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6214 if (sub_strict_overflow_p)
6215 *strict_overflow_p = true;
6216 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6217 fold_convert (ctype, t2));
6218 }
6219 break;
6220
6221 case LSHIFT_EXPR: case RSHIFT_EXPR:
6222 /* If the second operand is constant, this is a multiplication
6223 or floor division, by a power of two, so we can treat it that
6224 way unless the multiplier or divisor overflows. Signed
6225 left-shift overflow is implementation-defined rather than
6226 undefined in C90, so do not convert signed left shift into
6227 multiplication. */
6228 if (TREE_CODE (op1) == INTEGER_CST
6229 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6230 /* const_binop may not detect overflow correctly,
6231 so check for it explicitly here. */
6232 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
6233 && 0 != (t1 = fold_convert (ctype,
6234 const_binop (LSHIFT_EXPR,
6235 size_one_node,
6236 op1)))
6237 && !TREE_OVERFLOW (t1))
6238 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6239 ? MULT_EXPR : FLOOR_DIV_EXPR,
6240 ctype,
6241 fold_convert (ctype, op0),
6242 t1),
6243 c, code, wide_type, strict_overflow_p);
6244 break;
6245
6246 case PLUS_EXPR: case MINUS_EXPR:
6247 /* See if we can eliminate the operation on both sides. If we can, we
6248 can return a new PLUS or MINUS. If we can't, the only remaining
6249 cases where we can do anything are if the second operand is a
6250 constant. */
6251 sub_strict_overflow_p = false;
6252 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6253 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6254 if (t1 != 0 && t2 != 0
6255 && (code == MULT_EXPR
6256 /* If not multiplication, we can only do this if both operands
6257 are divisible by c. */
6258 || (multiple_of_p (ctype, op0, c)
6259 && multiple_of_p (ctype, op1, c))))
6260 {
6261 if (sub_strict_overflow_p)
6262 *strict_overflow_p = true;
6263 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6264 fold_convert (ctype, t2));
6265 }
6266
6267 /* If this was a subtraction, negate OP1 and set it to be an addition.
6268 This simplifies the logic below. */
6269 if (tcode == MINUS_EXPR)
6270 {
6271 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6272 /* If OP1 was not easily negatable, the constant may be OP0. */
6273 if (TREE_CODE (op0) == INTEGER_CST)
6274 {
6275 std::swap (op0, op1);
6276 std::swap (t1, t2);
6277 }
6278 }
6279
6280 if (TREE_CODE (op1) != INTEGER_CST)
6281 break;
6282
6283 /* If either OP1 or C are negative, this optimization is not safe for
6284 some of the division and remainder types while for others we need
6285 to change the code. */
6286 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6287 {
6288 if (code == CEIL_DIV_EXPR)
6289 code = FLOOR_DIV_EXPR;
6290 else if (code == FLOOR_DIV_EXPR)
6291 code = CEIL_DIV_EXPR;
6292 else if (code != MULT_EXPR
6293 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6294 break;
6295 }
6296
6297 /* If it's a multiply or a division/modulus operation of a multiple
6298 of our constant, do the operation and verify it doesn't overflow. */
6299 if (code == MULT_EXPR
6300 || wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6301 {
6302 op1 = const_binop (code, fold_convert (ctype, op1),
6303 fold_convert (ctype, c));
6304 /* We allow the constant to overflow with wrapping semantics. */
6305 if (op1 == 0
6306 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6307 break;
6308 }
6309 else
6310 break;
6311
6312 /* If we have an unsigned type, we cannot widen the operation since it
6313 will change the result if the original computation overflowed. */
6314 if (TYPE_UNSIGNED (ctype) && ctype != type)
6315 break;
6316
6317 /* If we were able to eliminate our operation from the first side,
6318 apply our operation to the second side and reform the PLUS. */
6319 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
6320 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
6321
6322 /* The last case is if we are a multiply. In that case, we can
6323 apply the distributive law to commute the multiply and addition
6324 if the multiplication of the constants doesn't overflow
6325 and overflow is defined. With undefined overflow
6326 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
6327 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
6328 return fold_build2 (tcode, ctype,
6329 fold_build2 (code, ctype,
6330 fold_convert (ctype, op0),
6331 fold_convert (ctype, c)),
6332 op1);
6333
6334 break;
6335
6336 case MULT_EXPR:
6337 /* We have a special case here if we are doing something like
6338 (C * 8) % 4 since we know that's zero. */
6339 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6340 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6341 /* If the multiplication can overflow we cannot optimize this. */
6342 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6343 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6344 && wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6345 {
6346 *strict_overflow_p = true;
6347 return omit_one_operand (type, integer_zero_node, op0);
6348 }
6349
6350 /* ... fall through ... */
6351
6352 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6353 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6354 /* If we can extract our operation from the LHS, do so and return a
6355 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6356 do something only if the second operand is a constant. */
6357 if (same_p
6358 && (t1 = extract_muldiv (op0, c, code, wide_type,
6359 strict_overflow_p)) != 0)
6360 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6361 fold_convert (ctype, op1));
6362 else if (tcode == MULT_EXPR && code == MULT_EXPR
6363 && (t1 = extract_muldiv (op1, c, code, wide_type,
6364 strict_overflow_p)) != 0)
6365 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6366 fold_convert (ctype, t1));
6367 else if (TREE_CODE (op1) != INTEGER_CST)
6368 return 0;
6369
6370 /* If these are the same operation types, we can associate them
6371 assuming no overflow. */
6372 if (tcode == code)
6373 {
6374 bool overflow_p = false;
6375 bool overflow_mul_p;
6376 signop sign = TYPE_SIGN (ctype);
6377 unsigned prec = TYPE_PRECISION (ctype);
6378 wide_int mul = wi::mul (wide_int::from (op1, prec,
6379 TYPE_SIGN (TREE_TYPE (op1))),
6380 wide_int::from (c, prec,
6381 TYPE_SIGN (TREE_TYPE (c))),
6382 sign, &overflow_mul_p);
6383 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6384 if (overflow_mul_p
6385 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6386 overflow_p = true;
6387 if (!overflow_p)
6388 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6389 wide_int_to_tree (ctype, mul));
6390 }
6391
6392 /* If these operations "cancel" each other, we have the main
6393 optimizations of this pass, which occur when either constant is a
6394 multiple of the other, in which case we replace this with either an
6395 operation or CODE or TCODE.
6396
6397 If we have an unsigned type, we cannot do this since it will change
6398 the result if the original computation overflowed. */
6399 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6400 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6401 || (tcode == MULT_EXPR
6402 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6403 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6404 && code != MULT_EXPR)))
6405 {
6406 if (wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6407 {
6408 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6409 *strict_overflow_p = true;
6410 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6411 fold_convert (ctype,
6412 const_binop (TRUNC_DIV_EXPR,
6413 op1, c)));
6414 }
6415 else if (wi::multiple_of_p (c, op1, TYPE_SIGN (type)))
6416 {
6417 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6418 *strict_overflow_p = true;
6419 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6420 fold_convert (ctype,
6421 const_binop (TRUNC_DIV_EXPR,
6422 c, op1)));
6423 }
6424 }
6425 break;
6426
6427 default:
6428 break;
6429 }
6430
6431 return 0;
6432 }
6433 \f
6434 /* Return a node which has the indicated constant VALUE (either 0 or
6435 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6436 and is of the indicated TYPE. */
6437
6438 tree
6439 constant_boolean_node (bool value, tree type)
6440 {
6441 if (type == integer_type_node)
6442 return value ? integer_one_node : integer_zero_node;
6443 else if (type == boolean_type_node)
6444 return value ? boolean_true_node : boolean_false_node;
6445 else if (TREE_CODE (type) == VECTOR_TYPE)
6446 return build_vector_from_val (type,
6447 build_int_cst (TREE_TYPE (type),
6448 value ? -1 : 0));
6449 else
6450 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6451 }
6452
6453
6454 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6455 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6456 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6457 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6458 COND is the first argument to CODE; otherwise (as in the example
6459 given here), it is the second argument. TYPE is the type of the
6460 original expression. Return NULL_TREE if no simplification is
6461 possible. */
6462
6463 static tree
6464 fold_binary_op_with_conditional_arg (location_t loc,
6465 enum tree_code code,
6466 tree type, tree op0, tree op1,
6467 tree cond, tree arg, int cond_first_p)
6468 {
6469 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6470 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6471 tree test, true_value, false_value;
6472 tree lhs = NULL_TREE;
6473 tree rhs = NULL_TREE;
6474 enum tree_code cond_code = COND_EXPR;
6475
6476 if (TREE_CODE (cond) == COND_EXPR
6477 || TREE_CODE (cond) == VEC_COND_EXPR)
6478 {
6479 test = TREE_OPERAND (cond, 0);
6480 true_value = TREE_OPERAND (cond, 1);
6481 false_value = TREE_OPERAND (cond, 2);
6482 /* If this operand throws an expression, then it does not make
6483 sense to try to perform a logical or arithmetic operation
6484 involving it. */
6485 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6486 lhs = true_value;
6487 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6488 rhs = false_value;
6489 }
6490 else if (!(TREE_CODE (type) != VECTOR_TYPE
6491 && TREE_CODE (TREE_TYPE (cond)) == VECTOR_TYPE))
6492 {
6493 tree testtype = TREE_TYPE (cond);
6494 test = cond;
6495 true_value = constant_boolean_node (true, testtype);
6496 false_value = constant_boolean_node (false, testtype);
6497 }
6498 else
6499 /* Detect the case of mixing vector and scalar types - bail out. */
6500 return NULL_TREE;
6501
6502 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6503 cond_code = VEC_COND_EXPR;
6504
6505 /* This transformation is only worthwhile if we don't have to wrap ARG
6506 in a SAVE_EXPR and the operation can be simplified without recursing
6507 on at least one of the branches once its pushed inside the COND_EXPR. */
6508 if (!TREE_CONSTANT (arg)
6509 && (TREE_SIDE_EFFECTS (arg)
6510 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6511 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6512 return NULL_TREE;
6513
6514 arg = fold_convert_loc (loc, arg_type, arg);
6515 if (lhs == 0)
6516 {
6517 true_value = fold_convert_loc (loc, cond_type, true_value);
6518 if (cond_first_p)
6519 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6520 else
6521 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6522 }
6523 if (rhs == 0)
6524 {
6525 false_value = fold_convert_loc (loc, cond_type, false_value);
6526 if (cond_first_p)
6527 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6528 else
6529 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6530 }
6531
6532 /* Check that we have simplified at least one of the branches. */
6533 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6534 return NULL_TREE;
6535
6536 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6537 }
6538
6539 \f
6540 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6541
6542 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6543 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6544 ADDEND is the same as X.
6545
6546 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6547 and finite. The problematic cases are when X is zero, and its mode
6548 has signed zeros. In the case of rounding towards -infinity,
6549 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6550 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6551
6552 bool
6553 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6554 {
6555 if (!real_zerop (addend))
6556 return false;
6557
6558 /* Don't allow the fold with -fsignaling-nans. */
6559 if (HONOR_SNANS (element_mode (type)))
6560 return false;
6561
6562 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6563 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
6564 return true;
6565
6566 /* In a vector or complex, we would need to check the sign of all zeros. */
6567 if (TREE_CODE (addend) != REAL_CST)
6568 return false;
6569
6570 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6571 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6572 negate = !negate;
6573
6574 /* The mode has signed zeros, and we have to honor their sign.
6575 In this situation, there is only one case we can return true for.
6576 X - 0 is the same as X unless rounding towards -infinity is
6577 supported. */
6578 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type));
6579 }
6580
6581 /* Subroutine of fold() that optimizes comparisons of a division by
6582 a nonzero integer constant against an integer constant, i.e.
6583 X/C1 op C2.
6584
6585 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6586 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6587 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6588
6589 The function returns the constant folded tree if a simplification
6590 can be made, and NULL_TREE otherwise. */
6591
6592 static tree
6593 fold_div_compare (location_t loc,
6594 enum tree_code code, tree type, tree arg0, tree arg1)
6595 {
6596 tree prod, tmp, hi, lo;
6597 tree arg00 = TREE_OPERAND (arg0, 0);
6598 tree arg01 = TREE_OPERAND (arg0, 1);
6599 signop sign = TYPE_SIGN (TREE_TYPE (arg0));
6600 bool neg_overflow = false;
6601 bool overflow;
6602
6603 /* We have to do this the hard way to detect unsigned overflow.
6604 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6605 wide_int val = wi::mul (arg01, arg1, sign, &overflow);
6606 prod = force_fit_type (TREE_TYPE (arg00), val, -1, overflow);
6607 neg_overflow = false;
6608
6609 if (sign == UNSIGNED)
6610 {
6611 tmp = int_const_binop (MINUS_EXPR, arg01,
6612 build_int_cst (TREE_TYPE (arg01), 1));
6613 lo = prod;
6614
6615 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6616 val = wi::add (prod, tmp, sign, &overflow);
6617 hi = force_fit_type (TREE_TYPE (arg00), val,
6618 -1, overflow | TREE_OVERFLOW (prod));
6619 }
6620 else if (tree_int_cst_sgn (arg01) >= 0)
6621 {
6622 tmp = int_const_binop (MINUS_EXPR, arg01,
6623 build_int_cst (TREE_TYPE (arg01), 1));
6624 switch (tree_int_cst_sgn (arg1))
6625 {
6626 case -1:
6627 neg_overflow = true;
6628 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6629 hi = prod;
6630 break;
6631
6632 case 0:
6633 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6634 hi = tmp;
6635 break;
6636
6637 case 1:
6638 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6639 lo = prod;
6640 break;
6641
6642 default:
6643 gcc_unreachable ();
6644 }
6645 }
6646 else
6647 {
6648 /* A negative divisor reverses the relational operators. */
6649 code = swap_tree_comparison (code);
6650
6651 tmp = int_const_binop (PLUS_EXPR, arg01,
6652 build_int_cst (TREE_TYPE (arg01), 1));
6653 switch (tree_int_cst_sgn (arg1))
6654 {
6655 case -1:
6656 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6657 lo = prod;
6658 break;
6659
6660 case 0:
6661 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6662 lo = tmp;
6663 break;
6664
6665 case 1:
6666 neg_overflow = true;
6667 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6668 hi = prod;
6669 break;
6670
6671 default:
6672 gcc_unreachable ();
6673 }
6674 }
6675
6676 switch (code)
6677 {
6678 case EQ_EXPR:
6679 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6680 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6681 if (TREE_OVERFLOW (hi))
6682 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6683 if (TREE_OVERFLOW (lo))
6684 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6685 return build_range_check (loc, type, arg00, 1, lo, hi);
6686
6687 case NE_EXPR:
6688 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6689 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6690 if (TREE_OVERFLOW (hi))
6691 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6692 if (TREE_OVERFLOW (lo))
6693 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6694 return build_range_check (loc, type, arg00, 0, lo, hi);
6695
6696 case LT_EXPR:
6697 if (TREE_OVERFLOW (lo))
6698 {
6699 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6700 return omit_one_operand_loc (loc, type, tmp, arg00);
6701 }
6702 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6703
6704 case LE_EXPR:
6705 if (TREE_OVERFLOW (hi))
6706 {
6707 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6708 return omit_one_operand_loc (loc, type, tmp, arg00);
6709 }
6710 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6711
6712 case GT_EXPR:
6713 if (TREE_OVERFLOW (hi))
6714 {
6715 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6716 return omit_one_operand_loc (loc, type, tmp, arg00);
6717 }
6718 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6719
6720 case GE_EXPR:
6721 if (TREE_OVERFLOW (lo))
6722 {
6723 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6724 return omit_one_operand_loc (loc, type, tmp, arg00);
6725 }
6726 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6727
6728 default:
6729 break;
6730 }
6731
6732 return NULL_TREE;
6733 }
6734
6735
6736 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6737 equality/inequality test, then return a simplified form of the test
6738 using a sign testing. Otherwise return NULL. TYPE is the desired
6739 result type. */
6740
6741 static tree
6742 fold_single_bit_test_into_sign_test (location_t loc,
6743 enum tree_code code, tree arg0, tree arg1,
6744 tree result_type)
6745 {
6746 /* If this is testing a single bit, we can optimize the test. */
6747 if ((code == NE_EXPR || code == EQ_EXPR)
6748 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6749 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6750 {
6751 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6752 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6753 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6754
6755 if (arg00 != NULL_TREE
6756 /* This is only a win if casting to a signed type is cheap,
6757 i.e. when arg00's type is not a partial mode. */
6758 && TYPE_PRECISION (TREE_TYPE (arg00))
6759 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6760 {
6761 tree stype = signed_type_for (TREE_TYPE (arg00));
6762 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6763 result_type,
6764 fold_convert_loc (loc, stype, arg00),
6765 build_int_cst (stype, 0));
6766 }
6767 }
6768
6769 return NULL_TREE;
6770 }
6771
6772 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6773 equality/inequality test, then return a simplified form of
6774 the test using shifts and logical operations. Otherwise return
6775 NULL. TYPE is the desired result type. */
6776
6777 tree
6778 fold_single_bit_test (location_t loc, enum tree_code code,
6779 tree arg0, tree arg1, tree result_type)
6780 {
6781 /* If this is testing a single bit, we can optimize the test. */
6782 if ((code == NE_EXPR || code == EQ_EXPR)
6783 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6784 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6785 {
6786 tree inner = TREE_OPERAND (arg0, 0);
6787 tree type = TREE_TYPE (arg0);
6788 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6789 machine_mode operand_mode = TYPE_MODE (type);
6790 int ops_unsigned;
6791 tree signed_type, unsigned_type, intermediate_type;
6792 tree tem, one;
6793
6794 /* First, see if we can fold the single bit test into a sign-bit
6795 test. */
6796 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6797 result_type);
6798 if (tem)
6799 return tem;
6800
6801 /* Otherwise we have (A & C) != 0 where C is a single bit,
6802 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6803 Similarly for (A & C) == 0. */
6804
6805 /* If INNER is a right shift of a constant and it plus BITNUM does
6806 not overflow, adjust BITNUM and INNER. */
6807 if (TREE_CODE (inner) == RSHIFT_EXPR
6808 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6809 && bitnum < TYPE_PRECISION (type)
6810 && wi::ltu_p (TREE_OPERAND (inner, 1),
6811 TYPE_PRECISION (type) - bitnum))
6812 {
6813 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6814 inner = TREE_OPERAND (inner, 0);
6815 }
6816
6817 /* If we are going to be able to omit the AND below, we must do our
6818 operations as unsigned. If we must use the AND, we have a choice.
6819 Normally unsigned is faster, but for some machines signed is. */
6820 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6821 && !flag_syntax_only) ? 0 : 1;
6822
6823 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6824 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6825 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6826 inner = fold_convert_loc (loc, intermediate_type, inner);
6827
6828 if (bitnum != 0)
6829 inner = build2 (RSHIFT_EXPR, intermediate_type,
6830 inner, size_int (bitnum));
6831
6832 one = build_int_cst (intermediate_type, 1);
6833
6834 if (code == EQ_EXPR)
6835 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6836
6837 /* Put the AND last so it can combine with more things. */
6838 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6839
6840 /* Make sure to return the proper type. */
6841 inner = fold_convert_loc (loc, result_type, inner);
6842
6843 return inner;
6844 }
6845 return NULL_TREE;
6846 }
6847
6848 /* Check whether we are allowed to reorder operands arg0 and arg1,
6849 such that the evaluation of arg1 occurs before arg0. */
6850
6851 static bool
6852 reorder_operands_p (const_tree arg0, const_tree arg1)
6853 {
6854 if (! flag_evaluation_order)
6855 return true;
6856 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6857 return true;
6858 return ! TREE_SIDE_EFFECTS (arg0)
6859 && ! TREE_SIDE_EFFECTS (arg1);
6860 }
6861
6862 /* Test whether it is preferable two swap two operands, ARG0 and
6863 ARG1, for example because ARG0 is an integer constant and ARG1
6864 isn't. If REORDER is true, only recommend swapping if we can
6865 evaluate the operands in reverse order. */
6866
6867 bool
6868 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6869 {
6870 if (CONSTANT_CLASS_P (arg1))
6871 return 0;
6872 if (CONSTANT_CLASS_P (arg0))
6873 return 1;
6874
6875 STRIP_NOPS (arg0);
6876 STRIP_NOPS (arg1);
6877
6878 if (TREE_CONSTANT (arg1))
6879 return 0;
6880 if (TREE_CONSTANT (arg0))
6881 return 1;
6882
6883 if (reorder && flag_evaluation_order
6884 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6885 return 0;
6886
6887 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6888 for commutative and comparison operators. Ensuring a canonical
6889 form allows the optimizers to find additional redundancies without
6890 having to explicitly check for both orderings. */
6891 if (TREE_CODE (arg0) == SSA_NAME
6892 && TREE_CODE (arg1) == SSA_NAME
6893 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6894 return 1;
6895
6896 /* Put SSA_NAMEs last. */
6897 if (TREE_CODE (arg1) == SSA_NAME)
6898 return 0;
6899 if (TREE_CODE (arg0) == SSA_NAME)
6900 return 1;
6901
6902 /* Put variables last. */
6903 if (DECL_P (arg1))
6904 return 0;
6905 if (DECL_P (arg0))
6906 return 1;
6907
6908 return 0;
6909 }
6910
6911
6912 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6913 means A >= Y && A != MAX, but in this case we know that
6914 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6915
6916 static tree
6917 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6918 {
6919 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6920
6921 if (TREE_CODE (bound) == LT_EXPR)
6922 a = TREE_OPERAND (bound, 0);
6923 else if (TREE_CODE (bound) == GT_EXPR)
6924 a = TREE_OPERAND (bound, 1);
6925 else
6926 return NULL_TREE;
6927
6928 typea = TREE_TYPE (a);
6929 if (!INTEGRAL_TYPE_P (typea)
6930 && !POINTER_TYPE_P (typea))
6931 return NULL_TREE;
6932
6933 if (TREE_CODE (ineq) == LT_EXPR)
6934 {
6935 a1 = TREE_OPERAND (ineq, 1);
6936 y = TREE_OPERAND (ineq, 0);
6937 }
6938 else if (TREE_CODE (ineq) == GT_EXPR)
6939 {
6940 a1 = TREE_OPERAND (ineq, 0);
6941 y = TREE_OPERAND (ineq, 1);
6942 }
6943 else
6944 return NULL_TREE;
6945
6946 if (TREE_TYPE (a1) != typea)
6947 return NULL_TREE;
6948
6949 if (POINTER_TYPE_P (typea))
6950 {
6951 /* Convert the pointer types into integer before taking the difference. */
6952 tree ta = fold_convert_loc (loc, ssizetype, a);
6953 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
6954 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
6955 }
6956 else
6957 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
6958
6959 if (!diff || !integer_onep (diff))
6960 return NULL_TREE;
6961
6962 return fold_build2_loc (loc, GE_EXPR, type, a, y);
6963 }
6964
6965 /* Fold a sum or difference of at least one multiplication.
6966 Returns the folded tree or NULL if no simplification could be made. */
6967
6968 static tree
6969 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
6970 tree arg0, tree arg1)
6971 {
6972 tree arg00, arg01, arg10, arg11;
6973 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6974
6975 /* (A * C) +- (B * C) -> (A+-B) * C.
6976 (A * C) +- A -> A * (C+-1).
6977 We are most concerned about the case where C is a constant,
6978 but other combinations show up during loop reduction. Since
6979 it is not difficult, try all four possibilities. */
6980
6981 if (TREE_CODE (arg0) == MULT_EXPR)
6982 {
6983 arg00 = TREE_OPERAND (arg0, 0);
6984 arg01 = TREE_OPERAND (arg0, 1);
6985 }
6986 else if (TREE_CODE (arg0) == INTEGER_CST)
6987 {
6988 arg00 = build_one_cst (type);
6989 arg01 = arg0;
6990 }
6991 else
6992 {
6993 /* We cannot generate constant 1 for fract. */
6994 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6995 return NULL_TREE;
6996 arg00 = arg0;
6997 arg01 = build_one_cst (type);
6998 }
6999 if (TREE_CODE (arg1) == MULT_EXPR)
7000 {
7001 arg10 = TREE_OPERAND (arg1, 0);
7002 arg11 = TREE_OPERAND (arg1, 1);
7003 }
7004 else if (TREE_CODE (arg1) == INTEGER_CST)
7005 {
7006 arg10 = build_one_cst (type);
7007 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7008 the purpose of this canonicalization. */
7009 if (wi::neg_p (arg1, TYPE_SIGN (TREE_TYPE (arg1)))
7010 && negate_expr_p (arg1)
7011 && code == PLUS_EXPR)
7012 {
7013 arg11 = negate_expr (arg1);
7014 code = MINUS_EXPR;
7015 }
7016 else
7017 arg11 = arg1;
7018 }
7019 else
7020 {
7021 /* We cannot generate constant 1 for fract. */
7022 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7023 return NULL_TREE;
7024 arg10 = arg1;
7025 arg11 = build_one_cst (type);
7026 }
7027 same = NULL_TREE;
7028
7029 if (operand_equal_p (arg01, arg11, 0))
7030 same = arg01, alt0 = arg00, alt1 = arg10;
7031 else if (operand_equal_p (arg00, arg10, 0))
7032 same = arg00, alt0 = arg01, alt1 = arg11;
7033 else if (operand_equal_p (arg00, arg11, 0))
7034 same = arg00, alt0 = arg01, alt1 = arg10;
7035 else if (operand_equal_p (arg01, arg10, 0))
7036 same = arg01, alt0 = arg00, alt1 = arg11;
7037
7038 /* No identical multiplicands; see if we can find a common
7039 power-of-two factor in non-power-of-two multiplies. This
7040 can help in multi-dimensional array access. */
7041 else if (tree_fits_shwi_p (arg01)
7042 && tree_fits_shwi_p (arg11))
7043 {
7044 HOST_WIDE_INT int01, int11, tmp;
7045 bool swap = false;
7046 tree maybe_same;
7047 int01 = tree_to_shwi (arg01);
7048 int11 = tree_to_shwi (arg11);
7049
7050 /* Move min of absolute values to int11. */
7051 if (absu_hwi (int01) < absu_hwi (int11))
7052 {
7053 tmp = int01, int01 = int11, int11 = tmp;
7054 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7055 maybe_same = arg01;
7056 swap = true;
7057 }
7058 else
7059 maybe_same = arg11;
7060
7061 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7062 /* The remainder should not be a constant, otherwise we
7063 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7064 increased the number of multiplications necessary. */
7065 && TREE_CODE (arg10) != INTEGER_CST)
7066 {
7067 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7068 build_int_cst (TREE_TYPE (arg00),
7069 int01 / int11));
7070 alt1 = arg10;
7071 same = maybe_same;
7072 if (swap)
7073 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7074 }
7075 }
7076
7077 if (same)
7078 return fold_build2_loc (loc, MULT_EXPR, type,
7079 fold_build2_loc (loc, code, type,
7080 fold_convert_loc (loc, type, alt0),
7081 fold_convert_loc (loc, type, alt1)),
7082 fold_convert_loc (loc, type, same));
7083
7084 return NULL_TREE;
7085 }
7086
7087 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7088 specified by EXPR into the buffer PTR of length LEN bytes.
7089 Return the number of bytes placed in the buffer, or zero
7090 upon failure. */
7091
7092 static int
7093 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7094 {
7095 tree type = TREE_TYPE (expr);
7096 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7097 int byte, offset, word, words;
7098 unsigned char value;
7099
7100 if ((off == -1 && total_bytes > len)
7101 || off >= total_bytes)
7102 return 0;
7103 if (off == -1)
7104 off = 0;
7105 words = total_bytes / UNITS_PER_WORD;
7106
7107 for (byte = 0; byte < total_bytes; byte++)
7108 {
7109 int bitpos = byte * BITS_PER_UNIT;
7110 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7111 number of bytes. */
7112 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7113
7114 if (total_bytes > UNITS_PER_WORD)
7115 {
7116 word = byte / UNITS_PER_WORD;
7117 if (WORDS_BIG_ENDIAN)
7118 word = (words - 1) - word;
7119 offset = word * UNITS_PER_WORD;
7120 if (BYTES_BIG_ENDIAN)
7121 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7122 else
7123 offset += byte % UNITS_PER_WORD;
7124 }
7125 else
7126 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7127 if (offset >= off
7128 && offset - off < len)
7129 ptr[offset - off] = value;
7130 }
7131 return MIN (len, total_bytes - off);
7132 }
7133
7134
7135 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7136 specified by EXPR into the buffer PTR of length LEN bytes.
7137 Return the number of bytes placed in the buffer, or zero
7138 upon failure. */
7139
7140 static int
7141 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7142 {
7143 tree type = TREE_TYPE (expr);
7144 machine_mode mode = TYPE_MODE (type);
7145 int total_bytes = GET_MODE_SIZE (mode);
7146 FIXED_VALUE_TYPE value;
7147 tree i_value, i_type;
7148
7149 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7150 return 0;
7151
7152 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7153
7154 if (NULL_TREE == i_type
7155 || TYPE_PRECISION (i_type) != total_bytes)
7156 return 0;
7157
7158 value = TREE_FIXED_CST (expr);
7159 i_value = double_int_to_tree (i_type, value.data);
7160
7161 return native_encode_int (i_value, ptr, len, off);
7162 }
7163
7164
7165 /* Subroutine of native_encode_expr. Encode the REAL_CST
7166 specified by EXPR into the buffer PTR of length LEN bytes.
7167 Return the number of bytes placed in the buffer, or zero
7168 upon failure. */
7169
7170 static int
7171 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7172 {
7173 tree type = TREE_TYPE (expr);
7174 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7175 int byte, offset, word, words, bitpos;
7176 unsigned char value;
7177
7178 /* There are always 32 bits in each long, no matter the size of
7179 the hosts long. We handle floating point representations with
7180 up to 192 bits. */
7181 long tmp[6];
7182
7183 if ((off == -1 && total_bytes > len)
7184 || off >= total_bytes)
7185 return 0;
7186 if (off == -1)
7187 off = 0;
7188 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7189
7190 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7191
7192 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7193 bitpos += BITS_PER_UNIT)
7194 {
7195 byte = (bitpos / BITS_PER_UNIT) & 3;
7196 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7197
7198 if (UNITS_PER_WORD < 4)
7199 {
7200 word = byte / UNITS_PER_WORD;
7201 if (WORDS_BIG_ENDIAN)
7202 word = (words - 1) - word;
7203 offset = word * UNITS_PER_WORD;
7204 if (BYTES_BIG_ENDIAN)
7205 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7206 else
7207 offset += byte % UNITS_PER_WORD;
7208 }
7209 else
7210 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7211 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7212 if (offset >= off
7213 && offset - off < len)
7214 ptr[offset - off] = value;
7215 }
7216 return MIN (len, total_bytes - off);
7217 }
7218
7219 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7220 specified by EXPR into the buffer PTR of length LEN bytes.
7221 Return the number of bytes placed in the buffer, or zero
7222 upon failure. */
7223
7224 static int
7225 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7226 {
7227 int rsize, isize;
7228 tree part;
7229
7230 part = TREE_REALPART (expr);
7231 rsize = native_encode_expr (part, ptr, len, off);
7232 if (off == -1
7233 && rsize == 0)
7234 return 0;
7235 part = TREE_IMAGPART (expr);
7236 if (off != -1)
7237 off = MAX (0, off - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part))));
7238 isize = native_encode_expr (part, ptr+rsize, len-rsize, off);
7239 if (off == -1
7240 && isize != rsize)
7241 return 0;
7242 return rsize + isize;
7243 }
7244
7245
7246 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7247 specified by EXPR into the buffer PTR of length LEN bytes.
7248 Return the number of bytes placed in the buffer, or zero
7249 upon failure. */
7250
7251 static int
7252 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7253 {
7254 unsigned i, count;
7255 int size, offset;
7256 tree itype, elem;
7257
7258 offset = 0;
7259 count = VECTOR_CST_NELTS (expr);
7260 itype = TREE_TYPE (TREE_TYPE (expr));
7261 size = GET_MODE_SIZE (TYPE_MODE (itype));
7262 for (i = 0; i < count; i++)
7263 {
7264 if (off >= size)
7265 {
7266 off -= size;
7267 continue;
7268 }
7269 elem = VECTOR_CST_ELT (expr, i);
7270 int res = native_encode_expr (elem, ptr+offset, len-offset, off);
7271 if ((off == -1 && res != size)
7272 || res == 0)
7273 return 0;
7274 offset += res;
7275 if (offset >= len)
7276 return offset;
7277 if (off != -1)
7278 off = 0;
7279 }
7280 return offset;
7281 }
7282
7283
7284 /* Subroutine of native_encode_expr. Encode the STRING_CST
7285 specified by EXPR into the buffer PTR of length LEN bytes.
7286 Return the number of bytes placed in the buffer, or zero
7287 upon failure. */
7288
7289 static int
7290 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7291 {
7292 tree type = TREE_TYPE (expr);
7293 HOST_WIDE_INT total_bytes;
7294
7295 if (TREE_CODE (type) != ARRAY_TYPE
7296 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7297 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7298 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7299 return 0;
7300 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7301 if ((off == -1 && total_bytes > len)
7302 || off >= total_bytes)
7303 return 0;
7304 if (off == -1)
7305 off = 0;
7306 if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7307 {
7308 int written = 0;
7309 if (off < TREE_STRING_LENGTH (expr))
7310 {
7311 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7312 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7313 }
7314 memset (ptr + written, 0,
7315 MIN (total_bytes - written, len - written));
7316 }
7317 else
7318 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7319 return MIN (total_bytes - off, len);
7320 }
7321
7322
7323 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7324 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7325 buffer PTR of length LEN bytes. If OFF is not -1 then start
7326 the encoding at byte offset OFF and encode at most LEN bytes.
7327 Return the number of bytes placed in the buffer, or zero upon failure. */
7328
7329 int
7330 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7331 {
7332 /* We don't support starting at negative offset and -1 is special. */
7333 if (off < -1)
7334 return 0;
7335
7336 switch (TREE_CODE (expr))
7337 {
7338 case INTEGER_CST:
7339 return native_encode_int (expr, ptr, len, off);
7340
7341 case REAL_CST:
7342 return native_encode_real (expr, ptr, len, off);
7343
7344 case FIXED_CST:
7345 return native_encode_fixed (expr, ptr, len, off);
7346
7347 case COMPLEX_CST:
7348 return native_encode_complex (expr, ptr, len, off);
7349
7350 case VECTOR_CST:
7351 return native_encode_vector (expr, ptr, len, off);
7352
7353 case STRING_CST:
7354 return native_encode_string (expr, ptr, len, off);
7355
7356 default:
7357 return 0;
7358 }
7359 }
7360
7361
7362 /* Subroutine of native_interpret_expr. Interpret the contents of
7363 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7364 If the buffer cannot be interpreted, return NULL_TREE. */
7365
7366 static tree
7367 native_interpret_int (tree type, const unsigned char *ptr, int len)
7368 {
7369 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7370
7371 if (total_bytes > len
7372 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7373 return NULL_TREE;
7374
7375 wide_int result = wi::from_buffer (ptr, total_bytes);
7376
7377 return wide_int_to_tree (type, result);
7378 }
7379
7380
7381 /* Subroutine of native_interpret_expr. Interpret the contents of
7382 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7383 If the buffer cannot be interpreted, return NULL_TREE. */
7384
7385 static tree
7386 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7387 {
7388 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7389 double_int result;
7390 FIXED_VALUE_TYPE fixed_value;
7391
7392 if (total_bytes > len
7393 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7394 return NULL_TREE;
7395
7396 result = double_int::from_buffer (ptr, total_bytes);
7397 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7398
7399 return build_fixed (type, fixed_value);
7400 }
7401
7402
7403 /* Subroutine of native_interpret_expr. Interpret the contents of
7404 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7405 If the buffer cannot be interpreted, return NULL_TREE. */
7406
7407 static tree
7408 native_interpret_real (tree type, const unsigned char *ptr, int len)
7409 {
7410 machine_mode mode = TYPE_MODE (type);
7411 int total_bytes = GET_MODE_SIZE (mode);
7412 unsigned char value;
7413 /* There are always 32 bits in each long, no matter the size of
7414 the hosts long. We handle floating point representations with
7415 up to 192 bits. */
7416 REAL_VALUE_TYPE r;
7417 long tmp[6];
7418
7419 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7420 if (total_bytes > len || total_bytes > 24)
7421 return NULL_TREE;
7422 int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7423
7424 memset (tmp, 0, sizeof (tmp));
7425 for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7426 bitpos += BITS_PER_UNIT)
7427 {
7428 /* Both OFFSET and BYTE index within a long;
7429 bitpos indexes the whole float. */
7430 int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
7431 if (UNITS_PER_WORD < 4)
7432 {
7433 int word = byte / UNITS_PER_WORD;
7434 if (WORDS_BIG_ENDIAN)
7435 word = (words - 1) - word;
7436 offset = word * UNITS_PER_WORD;
7437 if (BYTES_BIG_ENDIAN)
7438 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7439 else
7440 offset += byte % UNITS_PER_WORD;
7441 }
7442 else
7443 {
7444 offset = byte;
7445 if (BYTES_BIG_ENDIAN)
7446 {
7447 /* Reverse bytes within each long, or within the entire float
7448 if it's smaller than a long (for HFmode). */
7449 offset = MIN (3, total_bytes - 1) - offset;
7450 gcc_assert (offset >= 0);
7451 }
7452 }
7453 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7454
7455 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7456 }
7457
7458 real_from_target (&r, tmp, mode);
7459 return build_real (type, r);
7460 }
7461
7462
7463 /* Subroutine of native_interpret_expr. Interpret the contents of
7464 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7465 If the buffer cannot be interpreted, return NULL_TREE. */
7466
7467 static tree
7468 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7469 {
7470 tree etype, rpart, ipart;
7471 int size;
7472
7473 etype = TREE_TYPE (type);
7474 size = GET_MODE_SIZE (TYPE_MODE (etype));
7475 if (size * 2 > len)
7476 return NULL_TREE;
7477 rpart = native_interpret_expr (etype, ptr, size);
7478 if (!rpart)
7479 return NULL_TREE;
7480 ipart = native_interpret_expr (etype, ptr+size, size);
7481 if (!ipart)
7482 return NULL_TREE;
7483 return build_complex (type, rpart, ipart);
7484 }
7485
7486
7487 /* Subroutine of native_interpret_expr. Interpret the contents of
7488 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7489 If the buffer cannot be interpreted, return NULL_TREE. */
7490
7491 static tree
7492 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7493 {
7494 tree etype, elem;
7495 int i, size, count;
7496 tree *elements;
7497
7498 etype = TREE_TYPE (type);
7499 size = GET_MODE_SIZE (TYPE_MODE (etype));
7500 count = TYPE_VECTOR_SUBPARTS (type);
7501 if (size * count > len)
7502 return NULL_TREE;
7503
7504 elements = XALLOCAVEC (tree, count);
7505 for (i = count - 1; i >= 0; i--)
7506 {
7507 elem = native_interpret_expr (etype, ptr+(i*size), size);
7508 if (!elem)
7509 return NULL_TREE;
7510 elements[i] = elem;
7511 }
7512 return build_vector (type, elements);
7513 }
7514
7515
7516 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7517 the buffer PTR of length LEN as a constant of type TYPE. For
7518 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7519 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7520 return NULL_TREE. */
7521
7522 tree
7523 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7524 {
7525 switch (TREE_CODE (type))
7526 {
7527 case INTEGER_TYPE:
7528 case ENUMERAL_TYPE:
7529 case BOOLEAN_TYPE:
7530 case POINTER_TYPE:
7531 case REFERENCE_TYPE:
7532 return native_interpret_int (type, ptr, len);
7533
7534 case REAL_TYPE:
7535 return native_interpret_real (type, ptr, len);
7536
7537 case FIXED_POINT_TYPE:
7538 return native_interpret_fixed (type, ptr, len);
7539
7540 case COMPLEX_TYPE:
7541 return native_interpret_complex (type, ptr, len);
7542
7543 case VECTOR_TYPE:
7544 return native_interpret_vector (type, ptr, len);
7545
7546 default:
7547 return NULL_TREE;
7548 }
7549 }
7550
7551 /* Returns true if we can interpret the contents of a native encoding
7552 as TYPE. */
7553
7554 static bool
7555 can_native_interpret_type_p (tree type)
7556 {
7557 switch (TREE_CODE (type))
7558 {
7559 case INTEGER_TYPE:
7560 case ENUMERAL_TYPE:
7561 case BOOLEAN_TYPE:
7562 case POINTER_TYPE:
7563 case REFERENCE_TYPE:
7564 case FIXED_POINT_TYPE:
7565 case REAL_TYPE:
7566 case COMPLEX_TYPE:
7567 case VECTOR_TYPE:
7568 return true;
7569 default:
7570 return false;
7571 }
7572 }
7573
7574 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7575 TYPE at compile-time. If we're unable to perform the conversion
7576 return NULL_TREE. */
7577
7578 static tree
7579 fold_view_convert_expr (tree type, tree expr)
7580 {
7581 /* We support up to 512-bit values (for V8DFmode). */
7582 unsigned char buffer[64];
7583 int len;
7584
7585 /* Check that the host and target are sane. */
7586 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7587 return NULL_TREE;
7588
7589 len = native_encode_expr (expr, buffer, sizeof (buffer));
7590 if (len == 0)
7591 return NULL_TREE;
7592
7593 return native_interpret_expr (type, buffer, len);
7594 }
7595
7596 /* Build an expression for the address of T. Folds away INDIRECT_REF
7597 to avoid confusing the gimplify process. */
7598
7599 tree
7600 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7601 {
7602 /* The size of the object is not relevant when talking about its address. */
7603 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7604 t = TREE_OPERAND (t, 0);
7605
7606 if (TREE_CODE (t) == INDIRECT_REF)
7607 {
7608 t = TREE_OPERAND (t, 0);
7609
7610 if (TREE_TYPE (t) != ptrtype)
7611 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7612 }
7613 else if (TREE_CODE (t) == MEM_REF
7614 && integer_zerop (TREE_OPERAND (t, 1)))
7615 return TREE_OPERAND (t, 0);
7616 else if (TREE_CODE (t) == MEM_REF
7617 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7618 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7619 TREE_OPERAND (t, 0),
7620 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7621 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7622 {
7623 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7624
7625 if (TREE_TYPE (t) != ptrtype)
7626 t = fold_convert_loc (loc, ptrtype, t);
7627 }
7628 else
7629 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7630
7631 return t;
7632 }
7633
7634 /* Build an expression for the address of T. */
7635
7636 tree
7637 build_fold_addr_expr_loc (location_t loc, tree t)
7638 {
7639 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7640
7641 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7642 }
7643
7644 /* Fold a unary expression of code CODE and type TYPE with operand
7645 OP0. Return the folded expression if folding is successful.
7646 Otherwise, return NULL_TREE. */
7647
7648 tree
7649 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7650 {
7651 tree tem;
7652 tree arg0;
7653 enum tree_code_class kind = TREE_CODE_CLASS (code);
7654
7655 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7656 && TREE_CODE_LENGTH (code) == 1);
7657
7658 arg0 = op0;
7659 if (arg0)
7660 {
7661 if (CONVERT_EXPR_CODE_P (code)
7662 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7663 {
7664 /* Don't use STRIP_NOPS, because signedness of argument type
7665 matters. */
7666 STRIP_SIGN_NOPS (arg0);
7667 }
7668 else
7669 {
7670 /* Strip any conversions that don't change the mode. This
7671 is safe for every expression, except for a comparison
7672 expression because its signedness is derived from its
7673 operands.
7674
7675 Note that this is done as an internal manipulation within
7676 the constant folder, in order to find the simplest
7677 representation of the arguments so that their form can be
7678 studied. In any cases, the appropriate type conversions
7679 should be put back in the tree that will get out of the
7680 constant folder. */
7681 STRIP_NOPS (arg0);
7682 }
7683
7684 if (CONSTANT_CLASS_P (arg0))
7685 {
7686 tree tem = const_unop (code, type, arg0);
7687 if (tem)
7688 {
7689 if (TREE_TYPE (tem) != type)
7690 tem = fold_convert_loc (loc, type, tem);
7691 return tem;
7692 }
7693 }
7694 }
7695
7696 tem = generic_simplify (loc, code, type, op0);
7697 if (tem)
7698 return tem;
7699
7700 if (TREE_CODE_CLASS (code) == tcc_unary)
7701 {
7702 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7703 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7704 fold_build1_loc (loc, code, type,
7705 fold_convert_loc (loc, TREE_TYPE (op0),
7706 TREE_OPERAND (arg0, 1))));
7707 else if (TREE_CODE (arg0) == COND_EXPR)
7708 {
7709 tree arg01 = TREE_OPERAND (arg0, 1);
7710 tree arg02 = TREE_OPERAND (arg0, 2);
7711 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7712 arg01 = fold_build1_loc (loc, code, type,
7713 fold_convert_loc (loc,
7714 TREE_TYPE (op0), arg01));
7715 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7716 arg02 = fold_build1_loc (loc, code, type,
7717 fold_convert_loc (loc,
7718 TREE_TYPE (op0), arg02));
7719 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7720 arg01, arg02);
7721
7722 /* If this was a conversion, and all we did was to move into
7723 inside the COND_EXPR, bring it back out. But leave it if
7724 it is a conversion from integer to integer and the
7725 result precision is no wider than a word since such a
7726 conversion is cheap and may be optimized away by combine,
7727 while it couldn't if it were outside the COND_EXPR. Then return
7728 so we don't get into an infinite recursion loop taking the
7729 conversion out and then back in. */
7730
7731 if ((CONVERT_EXPR_CODE_P (code)
7732 || code == NON_LVALUE_EXPR)
7733 && TREE_CODE (tem) == COND_EXPR
7734 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7735 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7736 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7737 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7738 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7739 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7740 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7741 && (INTEGRAL_TYPE_P
7742 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7743 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7744 || flag_syntax_only))
7745 tem = build1_loc (loc, code, type,
7746 build3 (COND_EXPR,
7747 TREE_TYPE (TREE_OPERAND
7748 (TREE_OPERAND (tem, 1), 0)),
7749 TREE_OPERAND (tem, 0),
7750 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7751 TREE_OPERAND (TREE_OPERAND (tem, 2),
7752 0)));
7753 return tem;
7754 }
7755 }
7756
7757 switch (code)
7758 {
7759 case NON_LVALUE_EXPR:
7760 if (!maybe_lvalue_p (op0))
7761 return fold_convert_loc (loc, type, op0);
7762 return NULL_TREE;
7763
7764 CASE_CONVERT:
7765 case FLOAT_EXPR:
7766 case FIX_TRUNC_EXPR:
7767 if (COMPARISON_CLASS_P (op0))
7768 {
7769 /* If we have (type) (a CMP b) and type is an integral type, return
7770 new expression involving the new type. Canonicalize
7771 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7772 non-integral type.
7773 Do not fold the result as that would not simplify further, also
7774 folding again results in recursions. */
7775 if (TREE_CODE (type) == BOOLEAN_TYPE)
7776 return build2_loc (loc, TREE_CODE (op0), type,
7777 TREE_OPERAND (op0, 0),
7778 TREE_OPERAND (op0, 1));
7779 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7780 && TREE_CODE (type) != VECTOR_TYPE)
7781 return build3_loc (loc, COND_EXPR, type, op0,
7782 constant_boolean_node (true, type),
7783 constant_boolean_node (false, type));
7784 }
7785
7786 /* Handle (T *)&A.B.C for A being of type T and B and C
7787 living at offset zero. This occurs frequently in
7788 C++ upcasting and then accessing the base. */
7789 if (TREE_CODE (op0) == ADDR_EXPR
7790 && POINTER_TYPE_P (type)
7791 && handled_component_p (TREE_OPERAND (op0, 0)))
7792 {
7793 HOST_WIDE_INT bitsize, bitpos;
7794 tree offset;
7795 machine_mode mode;
7796 int unsignedp, reversep, volatilep;
7797 tree base
7798 = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
7799 &offset, &mode, &unsignedp, &reversep,
7800 &volatilep, false);
7801 /* If the reference was to a (constant) zero offset, we can use
7802 the address of the base if it has the same base type
7803 as the result type and the pointer type is unqualified. */
7804 if (! offset && bitpos == 0
7805 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7806 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7807 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7808 return fold_convert_loc (loc, type,
7809 build_fold_addr_expr_loc (loc, base));
7810 }
7811
7812 if (TREE_CODE (op0) == MODIFY_EXPR
7813 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7814 /* Detect assigning a bitfield. */
7815 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7816 && DECL_BIT_FIELD
7817 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7818 {
7819 /* Don't leave an assignment inside a conversion
7820 unless assigning a bitfield. */
7821 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7822 /* First do the assignment, then return converted constant. */
7823 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7824 TREE_NO_WARNING (tem) = 1;
7825 TREE_USED (tem) = 1;
7826 return tem;
7827 }
7828
7829 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7830 constants (if x has signed type, the sign bit cannot be set
7831 in c). This folds extension into the BIT_AND_EXPR.
7832 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7833 very likely don't have maximal range for their precision and this
7834 transformation effectively doesn't preserve non-maximal ranges. */
7835 if (TREE_CODE (type) == INTEGER_TYPE
7836 && TREE_CODE (op0) == BIT_AND_EXPR
7837 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7838 {
7839 tree and_expr = op0;
7840 tree and0 = TREE_OPERAND (and_expr, 0);
7841 tree and1 = TREE_OPERAND (and_expr, 1);
7842 int change = 0;
7843
7844 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7845 || (TYPE_PRECISION (type)
7846 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7847 change = 1;
7848 else if (TYPE_PRECISION (TREE_TYPE (and1))
7849 <= HOST_BITS_PER_WIDE_INT
7850 && tree_fits_uhwi_p (and1))
7851 {
7852 unsigned HOST_WIDE_INT cst;
7853
7854 cst = tree_to_uhwi (and1);
7855 cst &= HOST_WIDE_INT_M1U
7856 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7857 change = (cst == 0);
7858 if (change
7859 && !flag_syntax_only
7860 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7861 == ZERO_EXTEND))
7862 {
7863 tree uns = unsigned_type_for (TREE_TYPE (and0));
7864 and0 = fold_convert_loc (loc, uns, and0);
7865 and1 = fold_convert_loc (loc, uns, and1);
7866 }
7867 }
7868 if (change)
7869 {
7870 tem = force_fit_type (type, wi::to_widest (and1), 0,
7871 TREE_OVERFLOW (and1));
7872 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7873 fold_convert_loc (loc, type, and0), tem);
7874 }
7875 }
7876
7877 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
7878 cast (T1)X will fold away. We assume that this happens when X itself
7879 is a cast. */
7880 if (POINTER_TYPE_P (type)
7881 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7882 && CONVERT_EXPR_P (TREE_OPERAND (arg0, 0)))
7883 {
7884 tree arg00 = TREE_OPERAND (arg0, 0);
7885 tree arg01 = TREE_OPERAND (arg0, 1);
7886
7887 return fold_build_pointer_plus_loc
7888 (loc, fold_convert_loc (loc, type, arg00), arg01);
7889 }
7890
7891 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7892 of the same precision, and X is an integer type not narrower than
7893 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7894 if (INTEGRAL_TYPE_P (type)
7895 && TREE_CODE (op0) == BIT_NOT_EXPR
7896 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7897 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7898 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7899 {
7900 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7901 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7902 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7903 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7904 fold_convert_loc (loc, type, tem));
7905 }
7906
7907 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7908 type of X and Y (integer types only). */
7909 if (INTEGRAL_TYPE_P (type)
7910 && TREE_CODE (op0) == MULT_EXPR
7911 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7912 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7913 {
7914 /* Be careful not to introduce new overflows. */
7915 tree mult_type;
7916 if (TYPE_OVERFLOW_WRAPS (type))
7917 mult_type = type;
7918 else
7919 mult_type = unsigned_type_for (type);
7920
7921 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7922 {
7923 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7924 fold_convert_loc (loc, mult_type,
7925 TREE_OPERAND (op0, 0)),
7926 fold_convert_loc (loc, mult_type,
7927 TREE_OPERAND (op0, 1)));
7928 return fold_convert_loc (loc, type, tem);
7929 }
7930 }
7931
7932 return NULL_TREE;
7933
7934 case VIEW_CONVERT_EXPR:
7935 if (TREE_CODE (op0) == MEM_REF)
7936 {
7937 tem = fold_build2_loc (loc, MEM_REF, type,
7938 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
7939 REF_REVERSE_STORAGE_ORDER (tem) = REF_REVERSE_STORAGE_ORDER (op0);
7940 return tem;
7941 }
7942
7943 return NULL_TREE;
7944
7945 case NEGATE_EXPR:
7946 tem = fold_negate_expr (loc, arg0);
7947 if (tem)
7948 return fold_convert_loc (loc, type, tem);
7949 return NULL_TREE;
7950
7951 case ABS_EXPR:
7952 /* Convert fabs((double)float) into (double)fabsf(float). */
7953 if (TREE_CODE (arg0) == NOP_EXPR
7954 && TREE_CODE (type) == REAL_TYPE)
7955 {
7956 tree targ0 = strip_float_extensions (arg0);
7957 if (targ0 != arg0)
7958 return fold_convert_loc (loc, type,
7959 fold_build1_loc (loc, ABS_EXPR,
7960 TREE_TYPE (targ0),
7961 targ0));
7962 }
7963 return NULL_TREE;
7964
7965 case BIT_NOT_EXPR:
7966 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7967 if (TREE_CODE (arg0) == BIT_XOR_EXPR
7968 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7969 fold_convert_loc (loc, type,
7970 TREE_OPERAND (arg0, 0)))))
7971 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
7972 fold_convert_loc (loc, type,
7973 TREE_OPERAND (arg0, 1)));
7974 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7975 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7976 fold_convert_loc (loc, type,
7977 TREE_OPERAND (arg0, 1)))))
7978 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
7979 fold_convert_loc (loc, type,
7980 TREE_OPERAND (arg0, 0)), tem);
7981
7982 return NULL_TREE;
7983
7984 case TRUTH_NOT_EXPR:
7985 /* Note that the operand of this must be an int
7986 and its values must be 0 or 1.
7987 ("true" is a fixed value perhaps depending on the language,
7988 but we don't handle values other than 1 correctly yet.) */
7989 tem = fold_truth_not_expr (loc, arg0);
7990 if (!tem)
7991 return NULL_TREE;
7992 return fold_convert_loc (loc, type, tem);
7993
7994 case INDIRECT_REF:
7995 /* Fold *&X to X if X is an lvalue. */
7996 if (TREE_CODE (op0) == ADDR_EXPR)
7997 {
7998 tree op00 = TREE_OPERAND (op0, 0);
7999 if ((TREE_CODE (op00) == VAR_DECL
8000 || TREE_CODE (op00) == PARM_DECL
8001 || TREE_CODE (op00) == RESULT_DECL)
8002 && !TREE_READONLY (op00))
8003 return op00;
8004 }
8005 return NULL_TREE;
8006
8007 default:
8008 return NULL_TREE;
8009 } /* switch (code) */
8010 }
8011
8012
8013 /* If the operation was a conversion do _not_ mark a resulting constant
8014 with TREE_OVERFLOW if the original constant was not. These conversions
8015 have implementation defined behavior and retaining the TREE_OVERFLOW
8016 flag here would confuse later passes such as VRP. */
8017 tree
8018 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8019 tree type, tree op0)
8020 {
8021 tree res = fold_unary_loc (loc, code, type, op0);
8022 if (res
8023 && TREE_CODE (res) == INTEGER_CST
8024 && TREE_CODE (op0) == INTEGER_CST
8025 && CONVERT_EXPR_CODE_P (code))
8026 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8027
8028 return res;
8029 }
8030
8031 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8032 operands OP0 and OP1. LOC is the location of the resulting expression.
8033 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8034 Return the folded expression if folding is successful. Otherwise,
8035 return NULL_TREE. */
8036 static tree
8037 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8038 tree arg0, tree arg1, tree op0, tree op1)
8039 {
8040 tree tem;
8041
8042 /* We only do these simplifications if we are optimizing. */
8043 if (!optimize)
8044 return NULL_TREE;
8045
8046 /* Check for things like (A || B) && (A || C). We can convert this
8047 to A || (B && C). Note that either operator can be any of the four
8048 truth and/or operations and the transformation will still be
8049 valid. Also note that we only care about order for the
8050 ANDIF and ORIF operators. If B contains side effects, this
8051 might change the truth-value of A. */
8052 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8053 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8054 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8055 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8056 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8057 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8058 {
8059 tree a00 = TREE_OPERAND (arg0, 0);
8060 tree a01 = TREE_OPERAND (arg0, 1);
8061 tree a10 = TREE_OPERAND (arg1, 0);
8062 tree a11 = TREE_OPERAND (arg1, 1);
8063 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8064 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8065 && (code == TRUTH_AND_EXPR
8066 || code == TRUTH_OR_EXPR));
8067
8068 if (operand_equal_p (a00, a10, 0))
8069 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8070 fold_build2_loc (loc, code, type, a01, a11));
8071 else if (commutative && operand_equal_p (a00, a11, 0))
8072 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8073 fold_build2_loc (loc, code, type, a01, a10));
8074 else if (commutative && operand_equal_p (a01, a10, 0))
8075 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8076 fold_build2_loc (loc, code, type, a00, a11));
8077
8078 /* This case if tricky because we must either have commutative
8079 operators or else A10 must not have side-effects. */
8080
8081 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8082 && operand_equal_p (a01, a11, 0))
8083 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8084 fold_build2_loc (loc, code, type, a00, a10),
8085 a01);
8086 }
8087
8088 /* See if we can build a range comparison. */
8089 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8090 return tem;
8091
8092 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8093 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8094 {
8095 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8096 if (tem)
8097 return fold_build2_loc (loc, code, type, tem, arg1);
8098 }
8099
8100 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8101 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8102 {
8103 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8104 if (tem)
8105 return fold_build2_loc (loc, code, type, arg0, tem);
8106 }
8107
8108 /* Check for the possibility of merging component references. If our
8109 lhs is another similar operation, try to merge its rhs with our
8110 rhs. Then try to merge our lhs and rhs. */
8111 if (TREE_CODE (arg0) == code
8112 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8113 TREE_OPERAND (arg0, 1), arg1)))
8114 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8115
8116 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8117 return tem;
8118
8119 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8120 && (code == TRUTH_AND_EXPR
8121 || code == TRUTH_ANDIF_EXPR
8122 || code == TRUTH_OR_EXPR
8123 || code == TRUTH_ORIF_EXPR))
8124 {
8125 enum tree_code ncode, icode;
8126
8127 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8128 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8129 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8130
8131 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8132 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8133 We don't want to pack more than two leafs to a non-IF AND/OR
8134 expression.
8135 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8136 equal to IF-CODE, then we don't want to add right-hand operand.
8137 If the inner right-hand side of left-hand operand has
8138 side-effects, or isn't simple, then we can't add to it,
8139 as otherwise we might destroy if-sequence. */
8140 if (TREE_CODE (arg0) == icode
8141 && simple_operand_p_2 (arg1)
8142 /* Needed for sequence points to handle trappings, and
8143 side-effects. */
8144 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8145 {
8146 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8147 arg1);
8148 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8149 tem);
8150 }
8151 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8152 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8153 else if (TREE_CODE (arg1) == icode
8154 && simple_operand_p_2 (arg0)
8155 /* Needed for sequence points to handle trappings, and
8156 side-effects. */
8157 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8158 {
8159 tem = fold_build2_loc (loc, ncode, type,
8160 arg0, TREE_OPERAND (arg1, 0));
8161 return fold_build2_loc (loc, icode, type, tem,
8162 TREE_OPERAND (arg1, 1));
8163 }
8164 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8165 into (A OR B).
8166 For sequence point consistancy, we need to check for trapping,
8167 and side-effects. */
8168 else if (code == icode && simple_operand_p_2 (arg0)
8169 && simple_operand_p_2 (arg1))
8170 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8171 }
8172
8173 return NULL_TREE;
8174 }
8175
8176 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8177 by changing CODE to reduce the magnitude of constants involved in
8178 ARG0 of the comparison.
8179 Returns a canonicalized comparison tree if a simplification was
8180 possible, otherwise returns NULL_TREE.
8181 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8182 valid if signed overflow is undefined. */
8183
8184 static tree
8185 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8186 tree arg0, tree arg1,
8187 bool *strict_overflow_p)
8188 {
8189 enum tree_code code0 = TREE_CODE (arg0);
8190 tree t, cst0 = NULL_TREE;
8191 int sgn0;
8192
8193 /* Match A +- CST code arg1. We can change this only if overflow
8194 is undefined. */
8195 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8196 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8197 /* In principle pointers also have undefined overflow behavior,
8198 but that causes problems elsewhere. */
8199 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8200 && (code0 == MINUS_EXPR
8201 || code0 == PLUS_EXPR)
8202 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
8203 return NULL_TREE;
8204
8205 /* Identify the constant in arg0 and its sign. */
8206 cst0 = TREE_OPERAND (arg0, 1);
8207 sgn0 = tree_int_cst_sgn (cst0);
8208
8209 /* Overflowed constants and zero will cause problems. */
8210 if (integer_zerop (cst0)
8211 || TREE_OVERFLOW (cst0))
8212 return NULL_TREE;
8213
8214 /* See if we can reduce the magnitude of the constant in
8215 arg0 by changing the comparison code. */
8216 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8217 if (code == LT_EXPR
8218 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8219 code = LE_EXPR;
8220 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8221 else if (code == GT_EXPR
8222 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8223 code = GE_EXPR;
8224 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8225 else if (code == LE_EXPR
8226 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8227 code = LT_EXPR;
8228 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8229 else if (code == GE_EXPR
8230 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8231 code = GT_EXPR;
8232 else
8233 return NULL_TREE;
8234 *strict_overflow_p = true;
8235
8236 /* Now build the constant reduced in magnitude. But not if that
8237 would produce one outside of its types range. */
8238 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8239 && ((sgn0 == 1
8240 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8241 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8242 || (sgn0 == -1
8243 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8244 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8245 return NULL_TREE;
8246
8247 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8248 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8249 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8250 t = fold_convert (TREE_TYPE (arg1), t);
8251
8252 return fold_build2_loc (loc, code, type, t, arg1);
8253 }
8254
8255 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8256 overflow further. Try to decrease the magnitude of constants involved
8257 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8258 and put sole constants at the second argument position.
8259 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8260
8261 static tree
8262 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8263 tree arg0, tree arg1)
8264 {
8265 tree t;
8266 bool strict_overflow_p;
8267 const char * const warnmsg = G_("assuming signed overflow does not occur "
8268 "when reducing constant in comparison");
8269
8270 /* Try canonicalization by simplifying arg0. */
8271 strict_overflow_p = false;
8272 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8273 &strict_overflow_p);
8274 if (t)
8275 {
8276 if (strict_overflow_p)
8277 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8278 return t;
8279 }
8280
8281 /* Try canonicalization by simplifying arg1 using the swapped
8282 comparison. */
8283 code = swap_tree_comparison (code);
8284 strict_overflow_p = false;
8285 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8286 &strict_overflow_p);
8287 if (t && strict_overflow_p)
8288 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8289 return t;
8290 }
8291
8292 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8293 space. This is used to avoid issuing overflow warnings for
8294 expressions like &p->x which can not wrap. */
8295
8296 static bool
8297 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8298 {
8299 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8300 return true;
8301
8302 if (bitpos < 0)
8303 return true;
8304
8305 wide_int wi_offset;
8306 int precision = TYPE_PRECISION (TREE_TYPE (base));
8307 if (offset == NULL_TREE)
8308 wi_offset = wi::zero (precision);
8309 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8310 return true;
8311 else
8312 wi_offset = offset;
8313
8314 bool overflow;
8315 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8316 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8317 if (overflow)
8318 return true;
8319
8320 if (!wi::fits_uhwi_p (total))
8321 return true;
8322
8323 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8324 if (size <= 0)
8325 return true;
8326
8327 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8328 array. */
8329 if (TREE_CODE (base) == ADDR_EXPR)
8330 {
8331 HOST_WIDE_INT base_size;
8332
8333 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8334 if (base_size > 0 && size < base_size)
8335 size = base_size;
8336 }
8337
8338 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8339 }
8340
8341 /* Return a positive integer when the symbol DECL is known to have
8342 a nonzero address, zero when it's known not to (e.g., it's a weak
8343 symbol), and a negative integer when the symbol is not yet in the
8344 symbol table and so whether or not its address is zero is unknown. */
8345 static int
8346 maybe_nonzero_address (tree decl)
8347 {
8348 if (DECL_P (decl) && decl_in_symtab_p (decl))
8349 if (struct symtab_node *symbol = symtab_node::get_create (decl))
8350 return symbol->nonzero_address ();
8351
8352 return -1;
8353 }
8354
8355 /* Subroutine of fold_binary. This routine performs all of the
8356 transformations that are common to the equality/inequality
8357 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8358 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8359 fold_binary should call fold_binary. Fold a comparison with
8360 tree code CODE and type TYPE with operands OP0 and OP1. Return
8361 the folded comparison or NULL_TREE. */
8362
8363 static tree
8364 fold_comparison (location_t loc, enum tree_code code, tree type,
8365 tree op0, tree op1)
8366 {
8367 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8368 tree arg0, arg1, tem;
8369
8370 arg0 = op0;
8371 arg1 = op1;
8372
8373 STRIP_SIGN_NOPS (arg0);
8374 STRIP_SIGN_NOPS (arg1);
8375
8376 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
8377 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8378 && (equality_code
8379 || (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8380 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
8381 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8382 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8383 && TREE_CODE (arg1) == INTEGER_CST
8384 && !TREE_OVERFLOW (arg1))
8385 {
8386 const enum tree_code
8387 reverse_op = TREE_CODE (arg0) == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR;
8388 tree const1 = TREE_OPERAND (arg0, 1);
8389 tree const2 = fold_convert_loc (loc, TREE_TYPE (const1), arg1);
8390 tree variable = TREE_OPERAND (arg0, 0);
8391 tree new_const = int_const_binop (reverse_op, const2, const1);
8392
8393 /* If the constant operation overflowed this can be
8394 simplified as a comparison against INT_MAX/INT_MIN. */
8395 if (TREE_OVERFLOW (new_const)
8396 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
8397 {
8398 int const1_sgn = tree_int_cst_sgn (const1);
8399 enum tree_code code2 = code;
8400
8401 /* Get the sign of the constant on the lhs if the
8402 operation were VARIABLE + CONST1. */
8403 if (TREE_CODE (arg0) == MINUS_EXPR)
8404 const1_sgn = -const1_sgn;
8405
8406 /* The sign of the constant determines if we overflowed
8407 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8408 Canonicalize to the INT_MIN overflow by swapping the comparison
8409 if necessary. */
8410 if (const1_sgn == -1)
8411 code2 = swap_tree_comparison (code);
8412
8413 /* We now can look at the canonicalized case
8414 VARIABLE + 1 CODE2 INT_MIN
8415 and decide on the result. */
8416 switch (code2)
8417 {
8418 case EQ_EXPR:
8419 case LT_EXPR:
8420 case LE_EXPR:
8421 return
8422 omit_one_operand_loc (loc, type, boolean_false_node, variable);
8423
8424 case NE_EXPR:
8425 case GE_EXPR:
8426 case GT_EXPR:
8427 return
8428 omit_one_operand_loc (loc, type, boolean_true_node, variable);
8429
8430 default:
8431 gcc_unreachable ();
8432 }
8433 }
8434 else
8435 {
8436 if (!equality_code)
8437 fold_overflow_warning ("assuming signed overflow does not occur "
8438 "when changing X +- C1 cmp C2 to "
8439 "X cmp C2 -+ C1",
8440 WARN_STRICT_OVERFLOW_COMPARISON);
8441 return fold_build2_loc (loc, code, type, variable, new_const);
8442 }
8443 }
8444
8445 /* For comparisons of pointers we can decompose it to a compile time
8446 comparison of the base objects and the offsets into the object.
8447 This requires at least one operand being an ADDR_EXPR or a
8448 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8449 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8450 && (TREE_CODE (arg0) == ADDR_EXPR
8451 || TREE_CODE (arg1) == ADDR_EXPR
8452 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8453 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8454 {
8455 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8456 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8457 machine_mode mode;
8458 int volatilep, reversep, unsignedp;
8459 bool indirect_base0 = false, indirect_base1 = false;
8460
8461 /* Get base and offset for the access. Strip ADDR_EXPR for
8462 get_inner_reference, but put it back by stripping INDIRECT_REF
8463 off the base object if possible. indirect_baseN will be true
8464 if baseN is not an address but refers to the object itself. */
8465 base0 = arg0;
8466 if (TREE_CODE (arg0) == ADDR_EXPR)
8467 {
8468 base0
8469 = get_inner_reference (TREE_OPERAND (arg0, 0),
8470 &bitsize, &bitpos0, &offset0, &mode,
8471 &unsignedp, &reversep, &volatilep, false);
8472 if (TREE_CODE (base0) == INDIRECT_REF)
8473 base0 = TREE_OPERAND (base0, 0);
8474 else
8475 indirect_base0 = true;
8476 }
8477 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8478 {
8479 base0 = TREE_OPERAND (arg0, 0);
8480 STRIP_SIGN_NOPS (base0);
8481 if (TREE_CODE (base0) == ADDR_EXPR)
8482 {
8483 base0
8484 = get_inner_reference (TREE_OPERAND (base0, 0),
8485 &bitsize, &bitpos0, &offset0, &mode,
8486 &unsignedp, &reversep, &volatilep,
8487 false);
8488 if (TREE_CODE (base0) == INDIRECT_REF)
8489 base0 = TREE_OPERAND (base0, 0);
8490 else
8491 indirect_base0 = true;
8492 }
8493 if (offset0 == NULL_TREE || integer_zerop (offset0))
8494 offset0 = TREE_OPERAND (arg0, 1);
8495 else
8496 offset0 = size_binop (PLUS_EXPR, offset0,
8497 TREE_OPERAND (arg0, 1));
8498 if (TREE_CODE (offset0) == INTEGER_CST)
8499 {
8500 offset_int tem = wi::sext (wi::to_offset (offset0),
8501 TYPE_PRECISION (sizetype));
8502 tem = wi::lshift (tem, LOG2_BITS_PER_UNIT);
8503 tem += bitpos0;
8504 if (wi::fits_shwi_p (tem))
8505 {
8506 bitpos0 = tem.to_shwi ();
8507 offset0 = NULL_TREE;
8508 }
8509 }
8510 }
8511
8512 base1 = arg1;
8513 if (TREE_CODE (arg1) == ADDR_EXPR)
8514 {
8515 base1
8516 = get_inner_reference (TREE_OPERAND (arg1, 0),
8517 &bitsize, &bitpos1, &offset1, &mode,
8518 &unsignedp, &reversep, &volatilep, false);
8519 if (TREE_CODE (base1) == INDIRECT_REF)
8520 base1 = TREE_OPERAND (base1, 0);
8521 else
8522 indirect_base1 = true;
8523 }
8524 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8525 {
8526 base1 = TREE_OPERAND (arg1, 0);
8527 STRIP_SIGN_NOPS (base1);
8528 if (TREE_CODE (base1) == ADDR_EXPR)
8529 {
8530 base1
8531 = get_inner_reference (TREE_OPERAND (base1, 0),
8532 &bitsize, &bitpos1, &offset1, &mode,
8533 &unsignedp, &reversep, &volatilep,
8534 false);
8535 if (TREE_CODE (base1) == INDIRECT_REF)
8536 base1 = TREE_OPERAND (base1, 0);
8537 else
8538 indirect_base1 = true;
8539 }
8540 if (offset1 == NULL_TREE || integer_zerop (offset1))
8541 offset1 = TREE_OPERAND (arg1, 1);
8542 else
8543 offset1 = size_binop (PLUS_EXPR, offset1,
8544 TREE_OPERAND (arg1, 1));
8545 if (TREE_CODE (offset1) == INTEGER_CST)
8546 {
8547 offset_int tem = wi::sext (wi::to_offset (offset1),
8548 TYPE_PRECISION (sizetype));
8549 tem = wi::lshift (tem, LOG2_BITS_PER_UNIT);
8550 tem += bitpos1;
8551 if (wi::fits_shwi_p (tem))
8552 {
8553 bitpos1 = tem.to_shwi ();
8554 offset1 = NULL_TREE;
8555 }
8556 }
8557 }
8558
8559 /* If we have equivalent bases we might be able to simplify. */
8560 if (indirect_base0 == indirect_base1
8561 && operand_equal_p (base0, base1,
8562 indirect_base0 ? OEP_ADDRESS_OF : 0))
8563 {
8564 /* We can fold this expression to a constant if the non-constant
8565 offset parts are equal. */
8566 if ((offset0 == offset1
8567 || (offset0 && offset1
8568 && operand_equal_p (offset0, offset1, 0)))
8569 && (code == EQ_EXPR
8570 || code == NE_EXPR
8571 || (indirect_base0 && DECL_P (base0))
8572 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8573
8574 {
8575 if (!equality_code
8576 && bitpos0 != bitpos1
8577 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8578 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8579 fold_overflow_warning (("assuming pointer wraparound does not "
8580 "occur when comparing P +- C1 with "
8581 "P +- C2"),
8582 WARN_STRICT_OVERFLOW_CONDITIONAL);
8583
8584 switch (code)
8585 {
8586 case EQ_EXPR:
8587 return constant_boolean_node (bitpos0 == bitpos1, type);
8588 case NE_EXPR:
8589 return constant_boolean_node (bitpos0 != bitpos1, type);
8590 case LT_EXPR:
8591 return constant_boolean_node (bitpos0 < bitpos1, type);
8592 case LE_EXPR:
8593 return constant_boolean_node (bitpos0 <= bitpos1, type);
8594 case GE_EXPR:
8595 return constant_boolean_node (bitpos0 >= bitpos1, type);
8596 case GT_EXPR:
8597 return constant_boolean_node (bitpos0 > bitpos1, type);
8598 default:;
8599 }
8600 }
8601 /* We can simplify the comparison to a comparison of the variable
8602 offset parts if the constant offset parts are equal.
8603 Be careful to use signed sizetype here because otherwise we
8604 mess with array offsets in the wrong way. This is possible
8605 because pointer arithmetic is restricted to retain within an
8606 object and overflow on pointer differences is undefined as of
8607 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8608 else if (bitpos0 == bitpos1
8609 && (equality_code
8610 || (indirect_base0 && DECL_P (base0))
8611 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8612 {
8613 /* By converting to signed sizetype we cover middle-end pointer
8614 arithmetic which operates on unsigned pointer types of size
8615 type size and ARRAY_REF offsets which are properly sign or
8616 zero extended from their type in case it is narrower than
8617 sizetype. */
8618 if (offset0 == NULL_TREE)
8619 offset0 = build_int_cst (ssizetype, 0);
8620 else
8621 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8622 if (offset1 == NULL_TREE)
8623 offset1 = build_int_cst (ssizetype, 0);
8624 else
8625 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8626
8627 if (!equality_code
8628 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8629 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8630 fold_overflow_warning (("assuming pointer wraparound does not "
8631 "occur when comparing P +- C1 with "
8632 "P +- C2"),
8633 WARN_STRICT_OVERFLOW_COMPARISON);
8634
8635 return fold_build2_loc (loc, code, type, offset0, offset1);
8636 }
8637 }
8638 /* For equal offsets we can simplify to a comparison of the
8639 base addresses. */
8640 else if (bitpos0 == bitpos1
8641 && (indirect_base0
8642 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8643 && (indirect_base1
8644 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8645 && ((offset0 == offset1)
8646 || (offset0 && offset1
8647 && operand_equal_p (offset0, offset1, 0))))
8648 {
8649 if (indirect_base0)
8650 base0 = build_fold_addr_expr_loc (loc, base0);
8651 if (indirect_base1)
8652 base1 = build_fold_addr_expr_loc (loc, base1);
8653 return fold_build2_loc (loc, code, type, base0, base1);
8654 }
8655 /* Comparison between an ordinary (non-weak) symbol and a null
8656 pointer can be eliminated since such symbols must have a non
8657 null address. In C, relational expressions between pointers
8658 to objects and null pointers are undefined. The results
8659 below follow the C++ rules with the additional property that
8660 every object pointer compares greater than a null pointer.
8661 */
8662 else if (DECL_P (base0)
8663 && maybe_nonzero_address (base0) > 0
8664 /* Avoid folding references to struct members at offset 0 to
8665 prevent tests like '&ptr->firstmember == 0' from getting
8666 eliminated. When ptr is null, although the -> expression
8667 is strictly speaking invalid, GCC retains it as a matter
8668 of QoI. See PR c/44555. */
8669 && (offset0 == NULL_TREE && bitpos0 != 0)
8670 /* The caller guarantees that when one of the arguments is
8671 constant (i.e., null in this case) it is second. */
8672 && integer_zerop (arg1))
8673 {
8674 switch (code)
8675 {
8676 case EQ_EXPR:
8677 case LE_EXPR:
8678 case LT_EXPR:
8679 return constant_boolean_node (false, type);
8680 case GE_EXPR:
8681 case GT_EXPR:
8682 case NE_EXPR:
8683 return constant_boolean_node (true, type);
8684 default:
8685 gcc_unreachable ();
8686 }
8687 }
8688 }
8689
8690 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8691 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8692 the resulting offset is smaller in absolute value than the
8693 original one and has the same sign. */
8694 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8695 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8696 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8697 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8698 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8699 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8700 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8701 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8702 {
8703 tree const1 = TREE_OPERAND (arg0, 1);
8704 tree const2 = TREE_OPERAND (arg1, 1);
8705 tree variable1 = TREE_OPERAND (arg0, 0);
8706 tree variable2 = TREE_OPERAND (arg1, 0);
8707 tree cst;
8708 const char * const warnmsg = G_("assuming signed overflow does not "
8709 "occur when combining constants around "
8710 "a comparison");
8711
8712 /* Put the constant on the side where it doesn't overflow and is
8713 of lower absolute value and of same sign than before. */
8714 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8715 ? MINUS_EXPR : PLUS_EXPR,
8716 const2, const1);
8717 if (!TREE_OVERFLOW (cst)
8718 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
8719 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
8720 {
8721 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8722 return fold_build2_loc (loc, code, type,
8723 variable1,
8724 fold_build2_loc (loc, TREE_CODE (arg1),
8725 TREE_TYPE (arg1),
8726 variable2, cst));
8727 }
8728
8729 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8730 ? MINUS_EXPR : PLUS_EXPR,
8731 const1, const2);
8732 if (!TREE_OVERFLOW (cst)
8733 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
8734 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
8735 {
8736 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8737 return fold_build2_loc (loc, code, type,
8738 fold_build2_loc (loc, TREE_CODE (arg0),
8739 TREE_TYPE (arg0),
8740 variable1, cst),
8741 variable2);
8742 }
8743 }
8744
8745 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
8746 if (tem)
8747 return tem;
8748
8749 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8750 constant, we can simplify it. */
8751 if (TREE_CODE (arg1) == INTEGER_CST
8752 && (TREE_CODE (arg0) == MIN_EXPR
8753 || TREE_CODE (arg0) == MAX_EXPR)
8754 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8755 {
8756 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
8757 if (tem)
8758 return tem;
8759 }
8760
8761 /* If we are comparing an expression that just has comparisons
8762 of two integer values, arithmetic expressions of those comparisons,
8763 and constants, we can simplify it. There are only three cases
8764 to check: the two values can either be equal, the first can be
8765 greater, or the second can be greater. Fold the expression for
8766 those three values. Since each value must be 0 or 1, we have
8767 eight possibilities, each of which corresponds to the constant 0
8768 or 1 or one of the six possible comparisons.
8769
8770 This handles common cases like (a > b) == 0 but also handles
8771 expressions like ((x > y) - (y > x)) > 0, which supposedly
8772 occur in macroized code. */
8773
8774 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8775 {
8776 tree cval1 = 0, cval2 = 0;
8777 int save_p = 0;
8778
8779 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8780 /* Don't handle degenerate cases here; they should already
8781 have been handled anyway. */
8782 && cval1 != 0 && cval2 != 0
8783 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8784 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8785 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8786 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8787 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8788 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8789 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8790 {
8791 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8792 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8793
8794 /* We can't just pass T to eval_subst in case cval1 or cval2
8795 was the same as ARG1. */
8796
8797 tree high_result
8798 = fold_build2_loc (loc, code, type,
8799 eval_subst (loc, arg0, cval1, maxval,
8800 cval2, minval),
8801 arg1);
8802 tree equal_result
8803 = fold_build2_loc (loc, code, type,
8804 eval_subst (loc, arg0, cval1, maxval,
8805 cval2, maxval),
8806 arg1);
8807 tree low_result
8808 = fold_build2_loc (loc, code, type,
8809 eval_subst (loc, arg0, cval1, minval,
8810 cval2, maxval),
8811 arg1);
8812
8813 /* All three of these results should be 0 or 1. Confirm they are.
8814 Then use those values to select the proper code to use. */
8815
8816 if (TREE_CODE (high_result) == INTEGER_CST
8817 && TREE_CODE (equal_result) == INTEGER_CST
8818 && TREE_CODE (low_result) == INTEGER_CST)
8819 {
8820 /* Make a 3-bit mask with the high-order bit being the
8821 value for `>', the next for '=', and the low for '<'. */
8822 switch ((integer_onep (high_result) * 4)
8823 + (integer_onep (equal_result) * 2)
8824 + integer_onep (low_result))
8825 {
8826 case 0:
8827 /* Always false. */
8828 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
8829 case 1:
8830 code = LT_EXPR;
8831 break;
8832 case 2:
8833 code = EQ_EXPR;
8834 break;
8835 case 3:
8836 code = LE_EXPR;
8837 break;
8838 case 4:
8839 code = GT_EXPR;
8840 break;
8841 case 5:
8842 code = NE_EXPR;
8843 break;
8844 case 6:
8845 code = GE_EXPR;
8846 break;
8847 case 7:
8848 /* Always true. */
8849 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
8850 }
8851
8852 if (save_p)
8853 {
8854 tem = save_expr (build2 (code, type, cval1, cval2));
8855 SET_EXPR_LOCATION (tem, loc);
8856 return tem;
8857 }
8858 return fold_build2_loc (loc, code, type, cval1, cval2);
8859 }
8860 }
8861 }
8862
8863 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8864 into a single range test. */
8865 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
8866 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
8867 && TREE_CODE (arg1) == INTEGER_CST
8868 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8869 && !integer_zerop (TREE_OPERAND (arg0, 1))
8870 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8871 && !TREE_OVERFLOW (arg1))
8872 {
8873 tem = fold_div_compare (loc, code, type, arg0, arg1);
8874 if (tem != NULL_TREE)
8875 return tem;
8876 }
8877
8878 return NULL_TREE;
8879 }
8880
8881
8882 /* Subroutine of fold_binary. Optimize complex multiplications of the
8883 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8884 argument EXPR represents the expression "z" of type TYPE. */
8885
8886 static tree
8887 fold_mult_zconjz (location_t loc, tree type, tree expr)
8888 {
8889 tree itype = TREE_TYPE (type);
8890 tree rpart, ipart, tem;
8891
8892 if (TREE_CODE (expr) == COMPLEX_EXPR)
8893 {
8894 rpart = TREE_OPERAND (expr, 0);
8895 ipart = TREE_OPERAND (expr, 1);
8896 }
8897 else if (TREE_CODE (expr) == COMPLEX_CST)
8898 {
8899 rpart = TREE_REALPART (expr);
8900 ipart = TREE_IMAGPART (expr);
8901 }
8902 else
8903 {
8904 expr = save_expr (expr);
8905 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
8906 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
8907 }
8908
8909 rpart = save_expr (rpart);
8910 ipart = save_expr (ipart);
8911 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
8912 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
8913 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
8914 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
8915 build_zero_cst (itype));
8916 }
8917
8918
8919 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
8920 CONSTRUCTOR ARG into array ELTS and return true if successful. */
8921
8922 static bool
8923 vec_cst_ctor_to_array (tree arg, tree *elts)
8924 {
8925 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
8926
8927 if (TREE_CODE (arg) == VECTOR_CST)
8928 {
8929 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
8930 elts[i] = VECTOR_CST_ELT (arg, i);
8931 }
8932 else if (TREE_CODE (arg) == CONSTRUCTOR)
8933 {
8934 constructor_elt *elt;
8935
8936 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
8937 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
8938 return false;
8939 else
8940 elts[i] = elt->value;
8941 }
8942 else
8943 return false;
8944 for (; i < nelts; i++)
8945 elts[i]
8946 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
8947 return true;
8948 }
8949
8950 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
8951 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
8952 NULL_TREE otherwise. */
8953
8954 static tree
8955 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
8956 {
8957 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8958 tree *elts;
8959 bool need_ctor = false;
8960
8961 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
8962 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
8963 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
8964 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
8965 return NULL_TREE;
8966
8967 elts = XALLOCAVEC (tree, nelts * 3);
8968 if (!vec_cst_ctor_to_array (arg0, elts)
8969 || !vec_cst_ctor_to_array (arg1, elts + nelts))
8970 return NULL_TREE;
8971
8972 for (i = 0; i < nelts; i++)
8973 {
8974 if (!CONSTANT_CLASS_P (elts[sel[i]]))
8975 need_ctor = true;
8976 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
8977 }
8978
8979 if (need_ctor)
8980 {
8981 vec<constructor_elt, va_gc> *v;
8982 vec_alloc (v, nelts);
8983 for (i = 0; i < nelts; i++)
8984 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
8985 return build_constructor (type, v);
8986 }
8987 else
8988 return build_vector (type, &elts[2 * nelts]);
8989 }
8990
8991 /* Try to fold a pointer difference of type TYPE two address expressions of
8992 array references AREF0 and AREF1 using location LOC. Return a
8993 simplified expression for the difference or NULL_TREE. */
8994
8995 static tree
8996 fold_addr_of_array_ref_difference (location_t loc, tree type,
8997 tree aref0, tree aref1)
8998 {
8999 tree base0 = TREE_OPERAND (aref0, 0);
9000 tree base1 = TREE_OPERAND (aref1, 0);
9001 tree base_offset = build_int_cst (type, 0);
9002
9003 /* If the bases are array references as well, recurse. If the bases
9004 are pointer indirections compute the difference of the pointers.
9005 If the bases are equal, we are set. */
9006 if ((TREE_CODE (base0) == ARRAY_REF
9007 && TREE_CODE (base1) == ARRAY_REF
9008 && (base_offset
9009 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9010 || (INDIRECT_REF_P (base0)
9011 && INDIRECT_REF_P (base1)
9012 && (base_offset
9013 = fold_binary_loc (loc, MINUS_EXPR, type,
9014 fold_convert (type, TREE_OPERAND (base0, 0)),
9015 fold_convert (type,
9016 TREE_OPERAND (base1, 0)))))
9017 || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
9018 {
9019 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9020 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9021 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9022 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9023 return fold_build2_loc (loc, PLUS_EXPR, type,
9024 base_offset,
9025 fold_build2_loc (loc, MULT_EXPR, type,
9026 diff, esz));
9027 }
9028 return NULL_TREE;
9029 }
9030
9031 /* If the real or vector real constant CST of type TYPE has an exact
9032 inverse, return it, else return NULL. */
9033
9034 tree
9035 exact_inverse (tree type, tree cst)
9036 {
9037 REAL_VALUE_TYPE r;
9038 tree unit_type, *elts;
9039 machine_mode mode;
9040 unsigned vec_nelts, i;
9041
9042 switch (TREE_CODE (cst))
9043 {
9044 case REAL_CST:
9045 r = TREE_REAL_CST (cst);
9046
9047 if (exact_real_inverse (TYPE_MODE (type), &r))
9048 return build_real (type, r);
9049
9050 return NULL_TREE;
9051
9052 case VECTOR_CST:
9053 vec_nelts = VECTOR_CST_NELTS (cst);
9054 elts = XALLOCAVEC (tree, vec_nelts);
9055 unit_type = TREE_TYPE (type);
9056 mode = TYPE_MODE (unit_type);
9057
9058 for (i = 0; i < vec_nelts; i++)
9059 {
9060 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9061 if (!exact_real_inverse (mode, &r))
9062 return NULL_TREE;
9063 elts[i] = build_real (unit_type, r);
9064 }
9065
9066 return build_vector (type, elts);
9067
9068 default:
9069 return NULL_TREE;
9070 }
9071 }
9072
9073 /* Mask out the tz least significant bits of X of type TYPE where
9074 tz is the number of trailing zeroes in Y. */
9075 static wide_int
9076 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
9077 {
9078 int tz = wi::ctz (y);
9079 if (tz > 0)
9080 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
9081 return x;
9082 }
9083
9084 /* Return true when T is an address and is known to be nonzero.
9085 For floating point we further ensure that T is not denormal.
9086 Similar logic is present in nonzero_address in rtlanal.h.
9087
9088 If the return value is based on the assumption that signed overflow
9089 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9090 change *STRICT_OVERFLOW_P. */
9091
9092 static bool
9093 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
9094 {
9095 tree type = TREE_TYPE (t);
9096 enum tree_code code;
9097
9098 /* Doing something useful for floating point would need more work. */
9099 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9100 return false;
9101
9102 code = TREE_CODE (t);
9103 switch (TREE_CODE_CLASS (code))
9104 {
9105 case tcc_unary:
9106 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9107 strict_overflow_p);
9108 case tcc_binary:
9109 case tcc_comparison:
9110 return tree_binary_nonzero_warnv_p (code, type,
9111 TREE_OPERAND (t, 0),
9112 TREE_OPERAND (t, 1),
9113 strict_overflow_p);
9114 case tcc_constant:
9115 case tcc_declaration:
9116 case tcc_reference:
9117 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9118
9119 default:
9120 break;
9121 }
9122
9123 switch (code)
9124 {
9125 case TRUTH_NOT_EXPR:
9126 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9127 strict_overflow_p);
9128
9129 case TRUTH_AND_EXPR:
9130 case TRUTH_OR_EXPR:
9131 case TRUTH_XOR_EXPR:
9132 return tree_binary_nonzero_warnv_p (code, type,
9133 TREE_OPERAND (t, 0),
9134 TREE_OPERAND (t, 1),
9135 strict_overflow_p);
9136
9137 case COND_EXPR:
9138 case CONSTRUCTOR:
9139 case OBJ_TYPE_REF:
9140 case ASSERT_EXPR:
9141 case ADDR_EXPR:
9142 case WITH_SIZE_EXPR:
9143 case SSA_NAME:
9144 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9145
9146 case COMPOUND_EXPR:
9147 case MODIFY_EXPR:
9148 case BIND_EXPR:
9149 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
9150 strict_overflow_p);
9151
9152 case SAVE_EXPR:
9153 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
9154 strict_overflow_p);
9155
9156 case CALL_EXPR:
9157 {
9158 tree fndecl = get_callee_fndecl (t);
9159 if (!fndecl) return false;
9160 if (flag_delete_null_pointer_checks && !flag_check_new
9161 && DECL_IS_OPERATOR_NEW (fndecl)
9162 && !TREE_NOTHROW (fndecl))
9163 return true;
9164 if (flag_delete_null_pointer_checks
9165 && lookup_attribute ("returns_nonnull",
9166 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9167 return true;
9168 return alloca_call_p (t);
9169 }
9170
9171 default:
9172 break;
9173 }
9174 return false;
9175 }
9176
9177 /* Return true when T is an address and is known to be nonzero.
9178 Handle warnings about undefined signed overflow. */
9179
9180 static bool
9181 tree_expr_nonzero_p (tree t)
9182 {
9183 bool ret, strict_overflow_p;
9184
9185 strict_overflow_p = false;
9186 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9187 if (strict_overflow_p)
9188 fold_overflow_warning (("assuming signed overflow does not occur when "
9189 "determining that expression is always "
9190 "non-zero"),
9191 WARN_STRICT_OVERFLOW_MISC);
9192 return ret;
9193 }
9194
9195 /* Return true if T is known not to be equal to an integer W. */
9196
9197 bool
9198 expr_not_equal_to (tree t, const wide_int &w)
9199 {
9200 wide_int min, max, nz;
9201 value_range_type rtype;
9202 switch (TREE_CODE (t))
9203 {
9204 case INTEGER_CST:
9205 return wi::ne_p (t, w);
9206
9207 case SSA_NAME:
9208 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
9209 return false;
9210 rtype = get_range_info (t, &min, &max);
9211 if (rtype == VR_RANGE)
9212 {
9213 if (wi::lt_p (max, w, TYPE_SIGN (TREE_TYPE (t))))
9214 return true;
9215 if (wi::lt_p (w, min, TYPE_SIGN (TREE_TYPE (t))))
9216 return true;
9217 }
9218 else if (rtype == VR_ANTI_RANGE
9219 && wi::le_p (min, w, TYPE_SIGN (TREE_TYPE (t)))
9220 && wi::le_p (w, max, TYPE_SIGN (TREE_TYPE (t))))
9221 return true;
9222 /* If T has some known zero bits and W has any of those bits set,
9223 then T is known not to be equal to W. */
9224 if (wi::ne_p (wi::zext (wi::bit_and_not (w, get_nonzero_bits (t)),
9225 TYPE_PRECISION (TREE_TYPE (t))), 0))
9226 return true;
9227 return false;
9228
9229 default:
9230 return false;
9231 }
9232 }
9233
9234 /* Fold a binary expression of code CODE and type TYPE with operands
9235 OP0 and OP1. LOC is the location of the resulting expression.
9236 Return the folded expression if folding is successful. Otherwise,
9237 return NULL_TREE. */
9238
9239 tree
9240 fold_binary_loc (location_t loc,
9241 enum tree_code code, tree type, tree op0, tree op1)
9242 {
9243 enum tree_code_class kind = TREE_CODE_CLASS (code);
9244 tree arg0, arg1, tem;
9245 tree t1 = NULL_TREE;
9246 bool strict_overflow_p;
9247 unsigned int prec;
9248
9249 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9250 && TREE_CODE_LENGTH (code) == 2
9251 && op0 != NULL_TREE
9252 && op1 != NULL_TREE);
9253
9254 arg0 = op0;
9255 arg1 = op1;
9256
9257 /* Strip any conversions that don't change the mode. This is
9258 safe for every expression, except for a comparison expression
9259 because its signedness is derived from its operands. So, in
9260 the latter case, only strip conversions that don't change the
9261 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9262 preserved.
9263
9264 Note that this is done as an internal manipulation within the
9265 constant folder, in order to find the simplest representation
9266 of the arguments so that their form can be studied. In any
9267 cases, the appropriate type conversions should be put back in
9268 the tree that will get out of the constant folder. */
9269
9270 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9271 {
9272 STRIP_SIGN_NOPS (arg0);
9273 STRIP_SIGN_NOPS (arg1);
9274 }
9275 else
9276 {
9277 STRIP_NOPS (arg0);
9278 STRIP_NOPS (arg1);
9279 }
9280
9281 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9282 constant but we can't do arithmetic on them. */
9283 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
9284 {
9285 tem = const_binop (code, type, arg0, arg1);
9286 if (tem != NULL_TREE)
9287 {
9288 if (TREE_TYPE (tem) != type)
9289 tem = fold_convert_loc (loc, type, tem);
9290 return tem;
9291 }
9292 }
9293
9294 /* If this is a commutative operation, and ARG0 is a constant, move it
9295 to ARG1 to reduce the number of tests below. */
9296 if (commutative_tree_code (code)
9297 && tree_swap_operands_p (arg0, arg1, true))
9298 return fold_build2_loc (loc, code, type, op1, op0);
9299
9300 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9301 to ARG1 to reduce the number of tests below. */
9302 if (kind == tcc_comparison
9303 && tree_swap_operands_p (arg0, arg1, true))
9304 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9305
9306 tem = generic_simplify (loc, code, type, op0, op1);
9307 if (tem)
9308 return tem;
9309
9310 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9311
9312 First check for cases where an arithmetic operation is applied to a
9313 compound, conditional, or comparison operation. Push the arithmetic
9314 operation inside the compound or conditional to see if any folding
9315 can then be done. Convert comparison to conditional for this purpose.
9316 The also optimizes non-constant cases that used to be done in
9317 expand_expr.
9318
9319 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9320 one of the operands is a comparison and the other is a comparison, a
9321 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9322 code below would make the expression more complex. Change it to a
9323 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9324 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9325
9326 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9327 || code == EQ_EXPR || code == NE_EXPR)
9328 && TREE_CODE (type) != VECTOR_TYPE
9329 && ((truth_value_p (TREE_CODE (arg0))
9330 && (truth_value_p (TREE_CODE (arg1))
9331 || (TREE_CODE (arg1) == BIT_AND_EXPR
9332 && integer_onep (TREE_OPERAND (arg1, 1)))))
9333 || (truth_value_p (TREE_CODE (arg1))
9334 && (truth_value_p (TREE_CODE (arg0))
9335 || (TREE_CODE (arg0) == BIT_AND_EXPR
9336 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9337 {
9338 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9339 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9340 : TRUTH_XOR_EXPR,
9341 boolean_type_node,
9342 fold_convert_loc (loc, boolean_type_node, arg0),
9343 fold_convert_loc (loc, boolean_type_node, arg1));
9344
9345 if (code == EQ_EXPR)
9346 tem = invert_truthvalue_loc (loc, tem);
9347
9348 return fold_convert_loc (loc, type, tem);
9349 }
9350
9351 if (TREE_CODE_CLASS (code) == tcc_binary
9352 || TREE_CODE_CLASS (code) == tcc_comparison)
9353 {
9354 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9355 {
9356 tem = fold_build2_loc (loc, code, type,
9357 fold_convert_loc (loc, TREE_TYPE (op0),
9358 TREE_OPERAND (arg0, 1)), op1);
9359 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9360 tem);
9361 }
9362 if (TREE_CODE (arg1) == COMPOUND_EXPR
9363 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9364 {
9365 tem = fold_build2_loc (loc, code, type, op0,
9366 fold_convert_loc (loc, TREE_TYPE (op1),
9367 TREE_OPERAND (arg1, 1)));
9368 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9369 tem);
9370 }
9371
9372 if (TREE_CODE (arg0) == COND_EXPR
9373 || TREE_CODE (arg0) == VEC_COND_EXPR
9374 || COMPARISON_CLASS_P (arg0))
9375 {
9376 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9377 arg0, arg1,
9378 /*cond_first_p=*/1);
9379 if (tem != NULL_TREE)
9380 return tem;
9381 }
9382
9383 if (TREE_CODE (arg1) == COND_EXPR
9384 || TREE_CODE (arg1) == VEC_COND_EXPR
9385 || COMPARISON_CLASS_P (arg1))
9386 {
9387 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9388 arg1, arg0,
9389 /*cond_first_p=*/0);
9390 if (tem != NULL_TREE)
9391 return tem;
9392 }
9393 }
9394
9395 switch (code)
9396 {
9397 case MEM_REF:
9398 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9399 if (TREE_CODE (arg0) == ADDR_EXPR
9400 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9401 {
9402 tree iref = TREE_OPERAND (arg0, 0);
9403 return fold_build2 (MEM_REF, type,
9404 TREE_OPERAND (iref, 0),
9405 int_const_binop (PLUS_EXPR, arg1,
9406 TREE_OPERAND (iref, 1)));
9407 }
9408
9409 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9410 if (TREE_CODE (arg0) == ADDR_EXPR
9411 && handled_component_p (TREE_OPERAND (arg0, 0)))
9412 {
9413 tree base;
9414 HOST_WIDE_INT coffset;
9415 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9416 &coffset);
9417 if (!base)
9418 return NULL_TREE;
9419 return fold_build2 (MEM_REF, type,
9420 build_fold_addr_expr (base),
9421 int_const_binop (PLUS_EXPR, arg1,
9422 size_int (coffset)));
9423 }
9424
9425 return NULL_TREE;
9426
9427 case POINTER_PLUS_EXPR:
9428 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9429 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9430 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9431 return fold_convert_loc (loc, type,
9432 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9433 fold_convert_loc (loc, sizetype,
9434 arg1),
9435 fold_convert_loc (loc, sizetype,
9436 arg0)));
9437
9438 return NULL_TREE;
9439
9440 case PLUS_EXPR:
9441 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
9442 {
9443 /* X + (X / CST) * -CST is X % CST. */
9444 if (TREE_CODE (arg1) == MULT_EXPR
9445 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9446 && operand_equal_p (arg0,
9447 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9448 {
9449 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9450 tree cst1 = TREE_OPERAND (arg1, 1);
9451 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9452 cst1, cst0);
9453 if (sum && integer_zerop (sum))
9454 return fold_convert_loc (loc, type,
9455 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9456 TREE_TYPE (arg0), arg0,
9457 cst0));
9458 }
9459 }
9460
9461 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
9462 one. Make sure the type is not saturating and has the signedness of
9463 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9464 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9465 if ((TREE_CODE (arg0) == MULT_EXPR
9466 || TREE_CODE (arg1) == MULT_EXPR)
9467 && !TYPE_SATURATING (type)
9468 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9469 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9470 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9471 {
9472 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9473 if (tem)
9474 return tem;
9475 }
9476
9477 if (! FLOAT_TYPE_P (type))
9478 {
9479 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9480 (plus (plus (mult) (mult)) (foo)) so that we can
9481 take advantage of the factoring cases below. */
9482 if (ANY_INTEGRAL_TYPE_P (type)
9483 && TYPE_OVERFLOW_WRAPS (type)
9484 && (((TREE_CODE (arg0) == PLUS_EXPR
9485 || TREE_CODE (arg0) == MINUS_EXPR)
9486 && TREE_CODE (arg1) == MULT_EXPR)
9487 || ((TREE_CODE (arg1) == PLUS_EXPR
9488 || TREE_CODE (arg1) == MINUS_EXPR)
9489 && TREE_CODE (arg0) == MULT_EXPR)))
9490 {
9491 tree parg0, parg1, parg, marg;
9492 enum tree_code pcode;
9493
9494 if (TREE_CODE (arg1) == MULT_EXPR)
9495 parg = arg0, marg = arg1;
9496 else
9497 parg = arg1, marg = arg0;
9498 pcode = TREE_CODE (parg);
9499 parg0 = TREE_OPERAND (parg, 0);
9500 parg1 = TREE_OPERAND (parg, 1);
9501 STRIP_NOPS (parg0);
9502 STRIP_NOPS (parg1);
9503
9504 if (TREE_CODE (parg0) == MULT_EXPR
9505 && TREE_CODE (parg1) != MULT_EXPR)
9506 return fold_build2_loc (loc, pcode, type,
9507 fold_build2_loc (loc, PLUS_EXPR, type,
9508 fold_convert_loc (loc, type,
9509 parg0),
9510 fold_convert_loc (loc, type,
9511 marg)),
9512 fold_convert_loc (loc, type, parg1));
9513 if (TREE_CODE (parg0) != MULT_EXPR
9514 && TREE_CODE (parg1) == MULT_EXPR)
9515 return
9516 fold_build2_loc (loc, PLUS_EXPR, type,
9517 fold_convert_loc (loc, type, parg0),
9518 fold_build2_loc (loc, pcode, type,
9519 fold_convert_loc (loc, type, marg),
9520 fold_convert_loc (loc, type,
9521 parg1)));
9522 }
9523 }
9524 else
9525 {
9526 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9527 to __complex__ ( x, y ). This is not the same for SNaNs or
9528 if signed zeros are involved. */
9529 if (!HONOR_SNANS (element_mode (arg0))
9530 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9531 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9532 {
9533 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9534 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9535 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9536 bool arg0rz = false, arg0iz = false;
9537 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9538 || (arg0i && (arg0iz = real_zerop (arg0i))))
9539 {
9540 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9541 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9542 if (arg0rz && arg1i && real_zerop (arg1i))
9543 {
9544 tree rp = arg1r ? arg1r
9545 : build1 (REALPART_EXPR, rtype, arg1);
9546 tree ip = arg0i ? arg0i
9547 : build1 (IMAGPART_EXPR, rtype, arg0);
9548 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9549 }
9550 else if (arg0iz && arg1r && real_zerop (arg1r))
9551 {
9552 tree rp = arg0r ? arg0r
9553 : build1 (REALPART_EXPR, rtype, arg0);
9554 tree ip = arg1i ? arg1i
9555 : build1 (IMAGPART_EXPR, rtype, arg1);
9556 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9557 }
9558 }
9559 }
9560
9561 if (flag_unsafe_math_optimizations
9562 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9563 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9564 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9565 return tem;
9566
9567 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9568 We associate floats only if the user has specified
9569 -fassociative-math. */
9570 if (flag_associative_math
9571 && TREE_CODE (arg1) == PLUS_EXPR
9572 && TREE_CODE (arg0) != MULT_EXPR)
9573 {
9574 tree tree10 = TREE_OPERAND (arg1, 0);
9575 tree tree11 = TREE_OPERAND (arg1, 1);
9576 if (TREE_CODE (tree11) == MULT_EXPR
9577 && TREE_CODE (tree10) == MULT_EXPR)
9578 {
9579 tree tree0;
9580 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
9581 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
9582 }
9583 }
9584 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9585 We associate floats only if the user has specified
9586 -fassociative-math. */
9587 if (flag_associative_math
9588 && TREE_CODE (arg0) == PLUS_EXPR
9589 && TREE_CODE (arg1) != MULT_EXPR)
9590 {
9591 tree tree00 = TREE_OPERAND (arg0, 0);
9592 tree tree01 = TREE_OPERAND (arg0, 1);
9593 if (TREE_CODE (tree01) == MULT_EXPR
9594 && TREE_CODE (tree00) == MULT_EXPR)
9595 {
9596 tree tree0;
9597 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
9598 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
9599 }
9600 }
9601 }
9602
9603 bit_rotate:
9604 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9605 is a rotate of A by C1 bits. */
9606 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9607 is a rotate of A by B bits. */
9608 {
9609 enum tree_code code0, code1;
9610 tree rtype;
9611 code0 = TREE_CODE (arg0);
9612 code1 = TREE_CODE (arg1);
9613 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9614 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9615 && operand_equal_p (TREE_OPERAND (arg0, 0),
9616 TREE_OPERAND (arg1, 0), 0)
9617 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9618 TYPE_UNSIGNED (rtype))
9619 /* Only create rotates in complete modes. Other cases are not
9620 expanded properly. */
9621 && (element_precision (rtype)
9622 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
9623 {
9624 tree tree01, tree11;
9625 enum tree_code code01, code11;
9626
9627 tree01 = TREE_OPERAND (arg0, 1);
9628 tree11 = TREE_OPERAND (arg1, 1);
9629 STRIP_NOPS (tree01);
9630 STRIP_NOPS (tree11);
9631 code01 = TREE_CODE (tree01);
9632 code11 = TREE_CODE (tree11);
9633 if (code01 == INTEGER_CST
9634 && code11 == INTEGER_CST
9635 && (wi::to_widest (tree01) + wi::to_widest (tree11)
9636 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9637 {
9638 tem = build2_loc (loc, LROTATE_EXPR,
9639 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9640 TREE_OPERAND (arg0, 0),
9641 code0 == LSHIFT_EXPR
9642 ? TREE_OPERAND (arg0, 1)
9643 : TREE_OPERAND (arg1, 1));
9644 return fold_convert_loc (loc, type, tem);
9645 }
9646 else if (code11 == MINUS_EXPR)
9647 {
9648 tree tree110, tree111;
9649 tree110 = TREE_OPERAND (tree11, 0);
9650 tree111 = TREE_OPERAND (tree11, 1);
9651 STRIP_NOPS (tree110);
9652 STRIP_NOPS (tree111);
9653 if (TREE_CODE (tree110) == INTEGER_CST
9654 && 0 == compare_tree_int (tree110,
9655 element_precision
9656 (TREE_TYPE (TREE_OPERAND
9657 (arg0, 0))))
9658 && operand_equal_p (tree01, tree111, 0))
9659 return
9660 fold_convert_loc (loc, type,
9661 build2 ((code0 == LSHIFT_EXPR
9662 ? LROTATE_EXPR
9663 : RROTATE_EXPR),
9664 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9665 TREE_OPERAND (arg0, 0),
9666 TREE_OPERAND (arg0, 1)));
9667 }
9668 else if (code01 == MINUS_EXPR)
9669 {
9670 tree tree010, tree011;
9671 tree010 = TREE_OPERAND (tree01, 0);
9672 tree011 = TREE_OPERAND (tree01, 1);
9673 STRIP_NOPS (tree010);
9674 STRIP_NOPS (tree011);
9675 if (TREE_CODE (tree010) == INTEGER_CST
9676 && 0 == compare_tree_int (tree010,
9677 element_precision
9678 (TREE_TYPE (TREE_OPERAND
9679 (arg0, 0))))
9680 && operand_equal_p (tree11, tree011, 0))
9681 return fold_convert_loc
9682 (loc, type,
9683 build2 ((code0 != LSHIFT_EXPR
9684 ? LROTATE_EXPR
9685 : RROTATE_EXPR),
9686 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9687 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1)));
9688 }
9689 }
9690 }
9691
9692 associate:
9693 /* In most languages, can't associate operations on floats through
9694 parentheses. Rather than remember where the parentheses were, we
9695 don't associate floats at all, unless the user has specified
9696 -fassociative-math.
9697 And, we need to make sure type is not saturating. */
9698
9699 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9700 && !TYPE_SATURATING (type))
9701 {
9702 tree var0, con0, lit0, minus_lit0;
9703 tree var1, con1, lit1, minus_lit1;
9704 tree atype = type;
9705 bool ok = true;
9706
9707 /* Split both trees into variables, constants, and literals. Then
9708 associate each group together, the constants with literals,
9709 then the result with variables. This increases the chances of
9710 literals being recombined later and of generating relocatable
9711 expressions for the sum of a constant and literal. */
9712 var0 = split_tree (loc, arg0, type, code,
9713 &con0, &lit0, &minus_lit0, 0);
9714 var1 = split_tree (loc, arg1, type, code,
9715 &con1, &lit1, &minus_lit1, code == MINUS_EXPR);
9716
9717 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9718 if (code == MINUS_EXPR)
9719 code = PLUS_EXPR;
9720
9721 /* With undefined overflow prefer doing association in a type
9722 which wraps on overflow, if that is one of the operand types. */
9723 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9724 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9725 {
9726 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9727 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
9728 atype = TREE_TYPE (arg0);
9729 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9730 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
9731 atype = TREE_TYPE (arg1);
9732 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
9733 }
9734
9735 /* With undefined overflow we can only associate constants with one
9736 variable, and constants whose association doesn't overflow. */
9737 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9738 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
9739 {
9740 if (var0 && var1)
9741 {
9742 tree tmp0 = var0;
9743 tree tmp1 = var1;
9744 bool one_neg = false;
9745
9746 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9747 {
9748 tmp0 = TREE_OPERAND (tmp0, 0);
9749 one_neg = !one_neg;
9750 }
9751 if (CONVERT_EXPR_P (tmp0)
9752 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9753 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9754 <= TYPE_PRECISION (atype)))
9755 tmp0 = TREE_OPERAND (tmp0, 0);
9756 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9757 {
9758 tmp1 = TREE_OPERAND (tmp1, 0);
9759 one_neg = !one_neg;
9760 }
9761 if (CONVERT_EXPR_P (tmp1)
9762 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9763 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9764 <= TYPE_PRECISION (atype)))
9765 tmp1 = TREE_OPERAND (tmp1, 0);
9766 /* The only case we can still associate with two variables
9767 is if they cancel out. */
9768 if (!one_neg
9769 || !operand_equal_p (tmp0, tmp1, 0))
9770 ok = false;
9771 }
9772 }
9773
9774 /* Only do something if we found more than two objects. Otherwise,
9775 nothing has changed and we risk infinite recursion. */
9776 if (ok
9777 && (2 < ((var0 != 0) + (var1 != 0)
9778 + (con0 != 0) + (con1 != 0)
9779 + (lit0 != 0) + (lit1 != 0)
9780 + (minus_lit0 != 0) + (minus_lit1 != 0))))
9781 {
9782 bool any_overflows = false;
9783 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
9784 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
9785 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
9786 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
9787 var0 = associate_trees (loc, var0, var1, code, atype);
9788 con0 = associate_trees (loc, con0, con1, code, atype);
9789 lit0 = associate_trees (loc, lit0, lit1, code, atype);
9790 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
9791 code, atype);
9792
9793 /* Preserve the MINUS_EXPR if the negative part of the literal is
9794 greater than the positive part. Otherwise, the multiplicative
9795 folding code (i.e extract_muldiv) may be fooled in case
9796 unsigned constants are subtracted, like in the following
9797 example: ((X*2 + 4) - 8U)/2. */
9798 if (minus_lit0 && lit0)
9799 {
9800 if (TREE_CODE (lit0) == INTEGER_CST
9801 && TREE_CODE (minus_lit0) == INTEGER_CST
9802 && tree_int_cst_lt (lit0, minus_lit0))
9803 {
9804 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
9805 MINUS_EXPR, atype);
9806 lit0 = 0;
9807 }
9808 else
9809 {
9810 lit0 = associate_trees (loc, lit0, minus_lit0,
9811 MINUS_EXPR, atype);
9812 minus_lit0 = 0;
9813 }
9814 }
9815
9816 /* Don't introduce overflows through reassociation. */
9817 if (!any_overflows
9818 && ((lit0 && TREE_OVERFLOW_P (lit0))
9819 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0))))
9820 return NULL_TREE;
9821
9822 if (minus_lit0)
9823 {
9824 if (con0 == 0)
9825 return
9826 fold_convert_loc (loc, type,
9827 associate_trees (loc, var0, minus_lit0,
9828 MINUS_EXPR, atype));
9829 else
9830 {
9831 con0 = associate_trees (loc, con0, minus_lit0,
9832 MINUS_EXPR, atype);
9833 return
9834 fold_convert_loc (loc, type,
9835 associate_trees (loc, var0, con0,
9836 PLUS_EXPR, atype));
9837 }
9838 }
9839
9840 con0 = associate_trees (loc, con0, lit0, code, atype);
9841 return
9842 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
9843 code, atype));
9844 }
9845 }
9846
9847 return NULL_TREE;
9848
9849 case MINUS_EXPR:
9850 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9851 if (TREE_CODE (arg0) == NEGATE_EXPR
9852 && negate_expr_p (op1)
9853 && reorder_operands_p (arg0, arg1))
9854 return fold_build2_loc (loc, MINUS_EXPR, type,
9855 negate_expr (op1),
9856 fold_convert_loc (loc, type,
9857 TREE_OPERAND (arg0, 0)));
9858
9859 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
9860 __complex__ ( x, -y ). This is not the same for SNaNs or if
9861 signed zeros are involved. */
9862 if (!HONOR_SNANS (element_mode (arg0))
9863 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9864 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9865 {
9866 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9867 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9868 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9869 bool arg0rz = false, arg0iz = false;
9870 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9871 || (arg0i && (arg0iz = real_zerop (arg0i))))
9872 {
9873 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9874 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9875 if (arg0rz && arg1i && real_zerop (arg1i))
9876 {
9877 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9878 arg1r ? arg1r
9879 : build1 (REALPART_EXPR, rtype, arg1));
9880 tree ip = arg0i ? arg0i
9881 : build1 (IMAGPART_EXPR, rtype, arg0);
9882 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9883 }
9884 else if (arg0iz && arg1r && real_zerop (arg1r))
9885 {
9886 tree rp = arg0r ? arg0r
9887 : build1 (REALPART_EXPR, rtype, arg0);
9888 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9889 arg1i ? arg1i
9890 : build1 (IMAGPART_EXPR, rtype, arg1));
9891 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9892 }
9893 }
9894 }
9895
9896 /* A - B -> A + (-B) if B is easily negatable. */
9897 if (negate_expr_p (op1)
9898 && ! TYPE_OVERFLOW_SANITIZED (type)
9899 && ((FLOAT_TYPE_P (type)
9900 /* Avoid this transformation if B is a positive REAL_CST. */
9901 && (TREE_CODE (op1) != REAL_CST
9902 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1))))
9903 || INTEGRAL_TYPE_P (type)))
9904 return fold_build2_loc (loc, PLUS_EXPR, type,
9905 fold_convert_loc (loc, type, arg0),
9906 negate_expr (op1));
9907
9908 /* Fold &a[i] - &a[j] to i-j. */
9909 if (TREE_CODE (arg0) == ADDR_EXPR
9910 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9911 && TREE_CODE (arg1) == ADDR_EXPR
9912 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9913 {
9914 tree tem = fold_addr_of_array_ref_difference (loc, type,
9915 TREE_OPERAND (arg0, 0),
9916 TREE_OPERAND (arg1, 0));
9917 if (tem)
9918 return tem;
9919 }
9920
9921 if (FLOAT_TYPE_P (type)
9922 && flag_unsafe_math_optimizations
9923 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9924 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9925 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9926 return tem;
9927
9928 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
9929 one. Make sure the type is not saturating and has the signedness of
9930 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9931 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9932 if ((TREE_CODE (arg0) == MULT_EXPR
9933 || TREE_CODE (arg1) == MULT_EXPR)
9934 && !TYPE_SATURATING (type)
9935 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9936 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9937 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9938 {
9939 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9940 if (tem)
9941 return tem;
9942 }
9943
9944 goto associate;
9945
9946 case MULT_EXPR:
9947 if (! FLOAT_TYPE_P (type))
9948 {
9949 /* Transform x * -C into -x * C if x is easily negatable. */
9950 if (TREE_CODE (op1) == INTEGER_CST
9951 && tree_int_cst_sgn (op1) == -1
9952 && negate_expr_p (op0)
9953 && (tem = negate_expr (op1)) != op1
9954 && ! TREE_OVERFLOW (tem))
9955 return fold_build2_loc (loc, MULT_EXPR, type,
9956 fold_convert_loc (loc, type,
9957 negate_expr (op0)), tem);
9958
9959 strict_overflow_p = false;
9960 if (TREE_CODE (arg1) == INTEGER_CST
9961 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
9962 &strict_overflow_p)))
9963 {
9964 if (strict_overflow_p)
9965 fold_overflow_warning (("assuming signed overflow does not "
9966 "occur when simplifying "
9967 "multiplication"),
9968 WARN_STRICT_OVERFLOW_MISC);
9969 return fold_convert_loc (loc, type, tem);
9970 }
9971
9972 /* Optimize z * conj(z) for integer complex numbers. */
9973 if (TREE_CODE (arg0) == CONJ_EXPR
9974 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9975 return fold_mult_zconjz (loc, type, arg1);
9976 if (TREE_CODE (arg1) == CONJ_EXPR
9977 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9978 return fold_mult_zconjz (loc, type, arg0);
9979 }
9980 else
9981 {
9982 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
9983 This is not the same for NaNs or if signed zeros are
9984 involved. */
9985 if (!HONOR_NANS (arg0)
9986 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9987 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9988 && TREE_CODE (arg1) == COMPLEX_CST
9989 && real_zerop (TREE_REALPART (arg1)))
9990 {
9991 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9992 if (real_onep (TREE_IMAGPART (arg1)))
9993 return
9994 fold_build2_loc (loc, COMPLEX_EXPR, type,
9995 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
9996 rtype, arg0)),
9997 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
9998 else if (real_minus_onep (TREE_IMAGPART (arg1)))
9999 return
10000 fold_build2_loc (loc, COMPLEX_EXPR, type,
10001 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10002 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10003 rtype, arg0)));
10004 }
10005
10006 /* Optimize z * conj(z) for floating point complex numbers.
10007 Guarded by flag_unsafe_math_optimizations as non-finite
10008 imaginary components don't produce scalar results. */
10009 if (flag_unsafe_math_optimizations
10010 && TREE_CODE (arg0) == CONJ_EXPR
10011 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10012 return fold_mult_zconjz (loc, type, arg1);
10013 if (flag_unsafe_math_optimizations
10014 && TREE_CODE (arg1) == CONJ_EXPR
10015 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10016 return fold_mult_zconjz (loc, type, arg0);
10017
10018 if (flag_unsafe_math_optimizations)
10019 {
10020
10021 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
10022 if (!in_gimple_form
10023 && optimize
10024 && operand_equal_p (arg0, arg1, 0))
10025 {
10026 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10027
10028 if (powfn)
10029 {
10030 tree arg = build_real (type, dconst2);
10031 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10032 }
10033 }
10034 }
10035 }
10036 goto associate;
10037
10038 case BIT_IOR_EXPR:
10039 /* Canonicalize (X & C1) | C2. */
10040 if (TREE_CODE (arg0) == BIT_AND_EXPR
10041 && TREE_CODE (arg1) == INTEGER_CST
10042 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10043 {
10044 int width = TYPE_PRECISION (type), w;
10045 wide_int c1 = TREE_OPERAND (arg0, 1);
10046 wide_int c2 = arg1;
10047
10048 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10049 if ((c1 & c2) == c1)
10050 return omit_one_operand_loc (loc, type, arg1,
10051 TREE_OPERAND (arg0, 0));
10052
10053 wide_int msk = wi::mask (width, false,
10054 TYPE_PRECISION (TREE_TYPE (arg1)));
10055
10056 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10057 if (msk.and_not (c1 | c2) == 0)
10058 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10059 TREE_OPERAND (arg0, 0), arg1);
10060
10061 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10062 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10063 mode which allows further optimizations. */
10064 c1 &= msk;
10065 c2 &= msk;
10066 wide_int c3 = c1.and_not (c2);
10067 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
10068 {
10069 wide_int mask = wi::mask (w, false,
10070 TYPE_PRECISION (type));
10071 if (((c1 | c2) & mask) == mask && c1.and_not (mask) == 0)
10072 {
10073 c3 = mask;
10074 break;
10075 }
10076 }
10077
10078 if (c3 != c1)
10079 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10080 fold_build2_loc (loc, BIT_AND_EXPR, type,
10081 TREE_OPERAND (arg0, 0),
10082 wide_int_to_tree (type,
10083 c3)),
10084 arg1);
10085 }
10086
10087 /* See if this can be simplified into a rotate first. If that
10088 is unsuccessful continue in the association code. */
10089 goto bit_rotate;
10090
10091 case BIT_XOR_EXPR:
10092 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10093 if (TREE_CODE (arg0) == BIT_AND_EXPR
10094 && INTEGRAL_TYPE_P (type)
10095 && integer_onep (TREE_OPERAND (arg0, 1))
10096 && integer_onep (arg1))
10097 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
10098 build_zero_cst (TREE_TYPE (arg0)));
10099
10100 /* See if this can be simplified into a rotate first. If that
10101 is unsuccessful continue in the association code. */
10102 goto bit_rotate;
10103
10104 case BIT_AND_EXPR:
10105 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10106 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10107 && INTEGRAL_TYPE_P (type)
10108 && integer_onep (TREE_OPERAND (arg0, 1))
10109 && integer_onep (arg1))
10110 {
10111 tree tem2;
10112 tem = TREE_OPERAND (arg0, 0);
10113 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
10114 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
10115 tem, tem2);
10116 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
10117 build_zero_cst (TREE_TYPE (tem)));
10118 }
10119 /* Fold ~X & 1 as (X & 1) == 0. */
10120 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10121 && INTEGRAL_TYPE_P (type)
10122 && integer_onep (arg1))
10123 {
10124 tree tem2;
10125 tem = TREE_OPERAND (arg0, 0);
10126 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
10127 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
10128 tem, tem2);
10129 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
10130 build_zero_cst (TREE_TYPE (tem)));
10131 }
10132 /* Fold !X & 1 as X == 0. */
10133 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10134 && integer_onep (arg1))
10135 {
10136 tem = TREE_OPERAND (arg0, 0);
10137 return fold_build2_loc (loc, EQ_EXPR, type, tem,
10138 build_zero_cst (TREE_TYPE (tem)));
10139 }
10140
10141 /* Fold (X ^ Y) & Y as ~X & Y. */
10142 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10143 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10144 {
10145 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10146 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10147 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10148 fold_convert_loc (loc, type, arg1));
10149 }
10150 /* Fold (X ^ Y) & X as ~Y & X. */
10151 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10152 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10153 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10154 {
10155 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10156 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10157 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10158 fold_convert_loc (loc, type, arg1));
10159 }
10160 /* Fold X & (X ^ Y) as X & ~Y. */
10161 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10162 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10163 {
10164 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10165 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10166 fold_convert_loc (loc, type, arg0),
10167 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
10168 }
10169 /* Fold X & (Y ^ X) as ~Y & X. */
10170 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10171 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10172 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10173 {
10174 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10175 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10176 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10177 fold_convert_loc (loc, type, arg0));
10178 }
10179
10180 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
10181 multiple of 1 << CST. */
10182 if (TREE_CODE (arg1) == INTEGER_CST)
10183 {
10184 wide_int cst1 = arg1;
10185 wide_int ncst1 = -cst1;
10186 if ((cst1 & ncst1) == ncst1
10187 && multiple_of_p (type, arg0,
10188 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
10189 return fold_convert_loc (loc, type, arg0);
10190 }
10191
10192 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
10193 bits from CST2. */
10194 if (TREE_CODE (arg1) == INTEGER_CST
10195 && TREE_CODE (arg0) == MULT_EXPR
10196 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10197 {
10198 wide_int warg1 = arg1;
10199 wide_int masked = mask_with_tz (type, warg1, TREE_OPERAND (arg0, 1));
10200
10201 if (masked == 0)
10202 return omit_two_operands_loc (loc, type, build_zero_cst (type),
10203 arg0, arg1);
10204 else if (masked != warg1)
10205 {
10206 /* Avoid the transform if arg1 is a mask of some
10207 mode which allows further optimizations. */
10208 int pop = wi::popcount (warg1);
10209 if (!(pop >= BITS_PER_UNIT
10210 && exact_log2 (pop) != -1
10211 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
10212 return fold_build2_loc (loc, code, type, op0,
10213 wide_int_to_tree (type, masked));
10214 }
10215 }
10216
10217 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
10218 ((A & N) + B) & M -> (A + B) & M
10219 Similarly if (N & M) == 0,
10220 ((A | N) + B) & M -> (A + B) & M
10221 and for - instead of + (or unary - instead of +)
10222 and/or ^ instead of |.
10223 If B is constant and (B & M) == 0, fold into A & M. */
10224 if (TREE_CODE (arg1) == INTEGER_CST)
10225 {
10226 wide_int cst1 = arg1;
10227 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
10228 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10229 && (TREE_CODE (arg0) == PLUS_EXPR
10230 || TREE_CODE (arg0) == MINUS_EXPR
10231 || TREE_CODE (arg0) == NEGATE_EXPR)
10232 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
10233 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
10234 {
10235 tree pmop[2];
10236 int which = 0;
10237 wide_int cst0;
10238
10239 /* Now we know that arg0 is (C + D) or (C - D) or
10240 -C and arg1 (M) is == (1LL << cst) - 1.
10241 Store C into PMOP[0] and D into PMOP[1]. */
10242 pmop[0] = TREE_OPERAND (arg0, 0);
10243 pmop[1] = NULL;
10244 if (TREE_CODE (arg0) != NEGATE_EXPR)
10245 {
10246 pmop[1] = TREE_OPERAND (arg0, 1);
10247 which = 1;
10248 }
10249
10250 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
10251 which = -1;
10252
10253 for (; which >= 0; which--)
10254 switch (TREE_CODE (pmop[which]))
10255 {
10256 case BIT_AND_EXPR:
10257 case BIT_IOR_EXPR:
10258 case BIT_XOR_EXPR:
10259 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
10260 != INTEGER_CST)
10261 break;
10262 cst0 = TREE_OPERAND (pmop[which], 1);
10263 cst0 &= cst1;
10264 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
10265 {
10266 if (cst0 != cst1)
10267 break;
10268 }
10269 else if (cst0 != 0)
10270 break;
10271 /* If C or D is of the form (A & N) where
10272 (N & M) == M, or of the form (A | N) or
10273 (A ^ N) where (N & M) == 0, replace it with A. */
10274 pmop[which] = TREE_OPERAND (pmop[which], 0);
10275 break;
10276 case INTEGER_CST:
10277 /* If C or D is a N where (N & M) == 0, it can be
10278 omitted (assumed 0). */
10279 if ((TREE_CODE (arg0) == PLUS_EXPR
10280 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
10281 && (cst1 & pmop[which]) == 0)
10282 pmop[which] = NULL;
10283 break;
10284 default:
10285 break;
10286 }
10287
10288 /* Only build anything new if we optimized one or both arguments
10289 above. */
10290 if (pmop[0] != TREE_OPERAND (arg0, 0)
10291 || (TREE_CODE (arg0) != NEGATE_EXPR
10292 && pmop[1] != TREE_OPERAND (arg0, 1)))
10293 {
10294 tree utype = TREE_TYPE (arg0);
10295 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10296 {
10297 /* Perform the operations in a type that has defined
10298 overflow behavior. */
10299 utype = unsigned_type_for (TREE_TYPE (arg0));
10300 if (pmop[0] != NULL)
10301 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
10302 if (pmop[1] != NULL)
10303 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
10304 }
10305
10306 if (TREE_CODE (arg0) == NEGATE_EXPR)
10307 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
10308 else if (TREE_CODE (arg0) == PLUS_EXPR)
10309 {
10310 if (pmop[0] != NULL && pmop[1] != NULL)
10311 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
10312 pmop[0], pmop[1]);
10313 else if (pmop[0] != NULL)
10314 tem = pmop[0];
10315 else if (pmop[1] != NULL)
10316 tem = pmop[1];
10317 else
10318 return build_int_cst (type, 0);
10319 }
10320 else if (pmop[0] == NULL)
10321 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
10322 else
10323 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
10324 pmop[0], pmop[1]);
10325 /* TEM is now the new binary +, - or unary - replacement. */
10326 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
10327 fold_convert_loc (loc, utype, arg1));
10328 return fold_convert_loc (loc, type, tem);
10329 }
10330 }
10331 }
10332
10333 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10334 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10335 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10336 {
10337 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10338
10339 wide_int mask = wide_int::from (arg1, prec, UNSIGNED);
10340 if (mask == -1)
10341 return
10342 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10343 }
10344
10345 goto associate;
10346
10347 case RDIV_EXPR:
10348 /* Don't touch a floating-point divide by zero unless the mode
10349 of the constant can represent infinity. */
10350 if (TREE_CODE (arg1) == REAL_CST
10351 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10352 && real_zerop (arg1))
10353 return NULL_TREE;
10354
10355 /* (-A) / (-B) -> A / B */
10356 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10357 return fold_build2_loc (loc, RDIV_EXPR, type,
10358 TREE_OPERAND (arg0, 0),
10359 negate_expr (arg1));
10360 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10361 return fold_build2_loc (loc, RDIV_EXPR, type,
10362 negate_expr (arg0),
10363 TREE_OPERAND (arg1, 0));
10364 return NULL_TREE;
10365
10366 case TRUNC_DIV_EXPR:
10367 /* Fall through */
10368
10369 case FLOOR_DIV_EXPR:
10370 /* Simplify A / (B << N) where A and B are positive and B is
10371 a power of 2, to A >> (N + log2(B)). */
10372 strict_overflow_p = false;
10373 if (TREE_CODE (arg1) == LSHIFT_EXPR
10374 && (TYPE_UNSIGNED (type)
10375 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
10376 {
10377 tree sval = TREE_OPERAND (arg1, 0);
10378 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10379 {
10380 tree sh_cnt = TREE_OPERAND (arg1, 1);
10381 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
10382 wi::exact_log2 (sval));
10383
10384 if (strict_overflow_p)
10385 fold_overflow_warning (("assuming signed overflow does not "
10386 "occur when simplifying A / (B << N)"),
10387 WARN_STRICT_OVERFLOW_MISC);
10388
10389 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
10390 sh_cnt, pow2);
10391 return fold_build2_loc (loc, RSHIFT_EXPR, type,
10392 fold_convert_loc (loc, type, arg0), sh_cnt);
10393 }
10394 }
10395
10396 /* Fall through */
10397
10398 case ROUND_DIV_EXPR:
10399 case CEIL_DIV_EXPR:
10400 case EXACT_DIV_EXPR:
10401 if (integer_zerop (arg1))
10402 return NULL_TREE;
10403
10404 /* Convert -A / -B to A / B when the type is signed and overflow is
10405 undefined. */
10406 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10407 && TREE_CODE (arg0) == NEGATE_EXPR
10408 && negate_expr_p (op1))
10409 {
10410 if (INTEGRAL_TYPE_P (type))
10411 fold_overflow_warning (("assuming signed overflow does not occur "
10412 "when distributing negation across "
10413 "division"),
10414 WARN_STRICT_OVERFLOW_MISC);
10415 return fold_build2_loc (loc, code, type,
10416 fold_convert_loc (loc, type,
10417 TREE_OPERAND (arg0, 0)),
10418 negate_expr (op1));
10419 }
10420 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10421 && TREE_CODE (arg1) == NEGATE_EXPR
10422 && negate_expr_p (op0))
10423 {
10424 if (INTEGRAL_TYPE_P (type))
10425 fold_overflow_warning (("assuming signed overflow does not occur "
10426 "when distributing negation across "
10427 "division"),
10428 WARN_STRICT_OVERFLOW_MISC);
10429 return fold_build2_loc (loc, code, type,
10430 negate_expr (op0),
10431 fold_convert_loc (loc, type,
10432 TREE_OPERAND (arg1, 0)));
10433 }
10434
10435 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10436 operation, EXACT_DIV_EXPR.
10437
10438 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10439 At one time others generated faster code, it's not clear if they do
10440 after the last round to changes to the DIV code in expmed.c. */
10441 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10442 && multiple_of_p (type, arg0, arg1))
10443 return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
10444 fold_convert (type, arg0),
10445 fold_convert (type, arg1));
10446
10447 strict_overflow_p = false;
10448 if (TREE_CODE (arg1) == INTEGER_CST
10449 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10450 &strict_overflow_p)))
10451 {
10452 if (strict_overflow_p)
10453 fold_overflow_warning (("assuming signed overflow does not occur "
10454 "when simplifying division"),
10455 WARN_STRICT_OVERFLOW_MISC);
10456 return fold_convert_loc (loc, type, tem);
10457 }
10458
10459 return NULL_TREE;
10460
10461 case CEIL_MOD_EXPR:
10462 case FLOOR_MOD_EXPR:
10463 case ROUND_MOD_EXPR:
10464 case TRUNC_MOD_EXPR:
10465 strict_overflow_p = false;
10466 if (TREE_CODE (arg1) == INTEGER_CST
10467 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10468 &strict_overflow_p)))
10469 {
10470 if (strict_overflow_p)
10471 fold_overflow_warning (("assuming signed overflow does not occur "
10472 "when simplifying modulus"),
10473 WARN_STRICT_OVERFLOW_MISC);
10474 return fold_convert_loc (loc, type, tem);
10475 }
10476
10477 return NULL_TREE;
10478
10479 case LROTATE_EXPR:
10480 case RROTATE_EXPR:
10481 case RSHIFT_EXPR:
10482 case LSHIFT_EXPR:
10483 /* Since negative shift count is not well-defined,
10484 don't try to compute it in the compiler. */
10485 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10486 return NULL_TREE;
10487
10488 prec = element_precision (type);
10489
10490 /* If we have a rotate of a bit operation with the rotate count and
10491 the second operand of the bit operation both constant,
10492 permute the two operations. */
10493 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10494 && (TREE_CODE (arg0) == BIT_AND_EXPR
10495 || TREE_CODE (arg0) == BIT_IOR_EXPR
10496 || TREE_CODE (arg0) == BIT_XOR_EXPR)
10497 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10498 return fold_build2_loc (loc, TREE_CODE (arg0), type,
10499 fold_build2_loc (loc, code, type,
10500 TREE_OPERAND (arg0, 0), arg1),
10501 fold_build2_loc (loc, code, type,
10502 TREE_OPERAND (arg0, 1), arg1));
10503
10504 /* Two consecutive rotates adding up to the some integer
10505 multiple of the precision of the type can be ignored. */
10506 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10507 && TREE_CODE (arg0) == RROTATE_EXPR
10508 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10509 && wi::umod_trunc (wi::add (arg1, TREE_OPERAND (arg0, 1)),
10510 prec) == 0)
10511 return TREE_OPERAND (arg0, 0);
10512
10513 return NULL_TREE;
10514
10515 case MIN_EXPR:
10516 case MAX_EXPR:
10517 goto associate;
10518
10519 case TRUTH_ANDIF_EXPR:
10520 /* Note that the operands of this must be ints
10521 and their values must be 0 or 1.
10522 ("true" is a fixed value perhaps depending on the language.) */
10523 /* If first arg is constant zero, return it. */
10524 if (integer_zerop (arg0))
10525 return fold_convert_loc (loc, type, arg0);
10526 case TRUTH_AND_EXPR:
10527 /* If either arg is constant true, drop it. */
10528 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10529 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10530 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10531 /* Preserve sequence points. */
10532 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10533 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10534 /* If second arg is constant zero, result is zero, but first arg
10535 must be evaluated. */
10536 if (integer_zerop (arg1))
10537 return omit_one_operand_loc (loc, type, arg1, arg0);
10538 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10539 case will be handled here. */
10540 if (integer_zerop (arg0))
10541 return omit_one_operand_loc (loc, type, arg0, arg1);
10542
10543 /* !X && X is always false. */
10544 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10545 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10546 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
10547 /* X && !X is always false. */
10548 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10549 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10550 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10551
10552 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10553 means A >= Y && A != MAX, but in this case we know that
10554 A < X <= MAX. */
10555
10556 if (!TREE_SIDE_EFFECTS (arg0)
10557 && !TREE_SIDE_EFFECTS (arg1))
10558 {
10559 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
10560 if (tem && !operand_equal_p (tem, arg0, 0))
10561 return fold_build2_loc (loc, code, type, tem, arg1);
10562
10563 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
10564 if (tem && !operand_equal_p (tem, arg1, 0))
10565 return fold_build2_loc (loc, code, type, arg0, tem);
10566 }
10567
10568 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10569 != NULL_TREE)
10570 return tem;
10571
10572 return NULL_TREE;
10573
10574 case TRUTH_ORIF_EXPR:
10575 /* Note that the operands of this must be ints
10576 and their values must be 0 or true.
10577 ("true" is a fixed value perhaps depending on the language.) */
10578 /* If first arg is constant true, return it. */
10579 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10580 return fold_convert_loc (loc, type, arg0);
10581 case TRUTH_OR_EXPR:
10582 /* If either arg is constant zero, drop it. */
10583 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10584 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10585 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10586 /* Preserve sequence points. */
10587 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10588 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10589 /* If second arg is constant true, result is true, but we must
10590 evaluate first arg. */
10591 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10592 return omit_one_operand_loc (loc, type, arg1, arg0);
10593 /* Likewise for first arg, but note this only occurs here for
10594 TRUTH_OR_EXPR. */
10595 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10596 return omit_one_operand_loc (loc, type, arg0, arg1);
10597
10598 /* !X || X is always true. */
10599 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10600 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10601 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10602 /* X || !X is always true. */
10603 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10604 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10605 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10606
10607 /* (X && !Y) || (!X && Y) is X ^ Y */
10608 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
10609 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
10610 {
10611 tree a0, a1, l0, l1, n0, n1;
10612
10613 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10614 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10615
10616 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10617 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10618
10619 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
10620 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
10621
10622 if ((operand_equal_p (n0, a0, 0)
10623 && operand_equal_p (n1, a1, 0))
10624 || (operand_equal_p (n0, a1, 0)
10625 && operand_equal_p (n1, a0, 0)))
10626 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
10627 }
10628
10629 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10630 != NULL_TREE)
10631 return tem;
10632
10633 return NULL_TREE;
10634
10635 case TRUTH_XOR_EXPR:
10636 /* If the second arg is constant zero, drop it. */
10637 if (integer_zerop (arg1))
10638 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10639 /* If the second arg is constant true, this is a logical inversion. */
10640 if (integer_onep (arg1))
10641 {
10642 tem = invert_truthvalue_loc (loc, arg0);
10643 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
10644 }
10645 /* Identical arguments cancel to zero. */
10646 if (operand_equal_p (arg0, arg1, 0))
10647 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10648
10649 /* !X ^ X is always true. */
10650 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10651 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10652 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10653
10654 /* X ^ !X is always true. */
10655 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10656 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10657 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10658
10659 return NULL_TREE;
10660
10661 case EQ_EXPR:
10662 case NE_EXPR:
10663 STRIP_NOPS (arg0);
10664 STRIP_NOPS (arg1);
10665
10666 tem = fold_comparison (loc, code, type, op0, op1);
10667 if (tem != NULL_TREE)
10668 return tem;
10669
10670 /* bool_var != 1 becomes !bool_var. */
10671 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10672 && code == NE_EXPR)
10673 return fold_convert_loc (loc, type,
10674 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10675 TREE_TYPE (arg0), arg0));
10676
10677 /* bool_var == 0 becomes !bool_var. */
10678 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10679 && code == EQ_EXPR)
10680 return fold_convert_loc (loc, type,
10681 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10682 TREE_TYPE (arg0), arg0));
10683
10684 /* !exp != 0 becomes !exp */
10685 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
10686 && code == NE_EXPR)
10687 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10688
10689 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
10690 if ((TREE_CODE (arg0) == PLUS_EXPR
10691 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
10692 || TREE_CODE (arg0) == MINUS_EXPR)
10693 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
10694 0)),
10695 arg1, 0)
10696 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10697 || POINTER_TYPE_P (TREE_TYPE (arg0))))
10698 {
10699 tree val = TREE_OPERAND (arg0, 1);
10700 val = fold_build2_loc (loc, code, type, val,
10701 build_int_cst (TREE_TYPE (val), 0));
10702 return omit_two_operands_loc (loc, type, val,
10703 TREE_OPERAND (arg0, 0), arg1);
10704 }
10705
10706 /* Transform comparisons of the form X CMP X +- Y to Y CMP 0. */
10707 if ((TREE_CODE (arg1) == PLUS_EXPR
10708 || TREE_CODE (arg1) == POINTER_PLUS_EXPR
10709 || TREE_CODE (arg1) == MINUS_EXPR)
10710 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg1,
10711 0)),
10712 arg0, 0)
10713 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10714 || POINTER_TYPE_P (TREE_TYPE (arg1))))
10715 {
10716 tree val = TREE_OPERAND (arg1, 1);
10717 val = fold_build2_loc (loc, code, type, val,
10718 build_int_cst (TREE_TYPE (val), 0));
10719 return omit_two_operands_loc (loc, type, val,
10720 TREE_OPERAND (arg1, 0), arg0);
10721 }
10722
10723 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
10724 if (TREE_CODE (arg0) == MINUS_EXPR
10725 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
10726 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
10727 1)),
10728 arg1, 0)
10729 && wi::extract_uhwi (TREE_OPERAND (arg0, 0), 0, 1) == 1)
10730 return omit_two_operands_loc (loc, type,
10731 code == NE_EXPR
10732 ? boolean_true_node : boolean_false_node,
10733 TREE_OPERAND (arg0, 1), arg1);
10734
10735 /* Transform comparisons of the form X CMP C - X if C % 2 == 1. */
10736 if (TREE_CODE (arg1) == MINUS_EXPR
10737 && TREE_CODE (TREE_OPERAND (arg1, 0)) == INTEGER_CST
10738 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg1,
10739 1)),
10740 arg0, 0)
10741 && wi::extract_uhwi (TREE_OPERAND (arg1, 0), 0, 1) == 1)
10742 return omit_two_operands_loc (loc, type,
10743 code == NE_EXPR
10744 ? boolean_true_node : boolean_false_node,
10745 TREE_OPERAND (arg1, 1), arg0);
10746
10747 /* If this is an EQ or NE comparison with zero and ARG0 is
10748 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10749 two operations, but the latter can be done in one less insn
10750 on machines that have only two-operand insns or on which a
10751 constant cannot be the first operand. */
10752 if (TREE_CODE (arg0) == BIT_AND_EXPR
10753 && integer_zerop (arg1))
10754 {
10755 tree arg00 = TREE_OPERAND (arg0, 0);
10756 tree arg01 = TREE_OPERAND (arg0, 1);
10757 if (TREE_CODE (arg00) == LSHIFT_EXPR
10758 && integer_onep (TREE_OPERAND (arg00, 0)))
10759 {
10760 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
10761 arg01, TREE_OPERAND (arg00, 1));
10762 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10763 build_int_cst (TREE_TYPE (arg0), 1));
10764 return fold_build2_loc (loc, code, type,
10765 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10766 arg1);
10767 }
10768 else if (TREE_CODE (arg01) == LSHIFT_EXPR
10769 && integer_onep (TREE_OPERAND (arg01, 0)))
10770 {
10771 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
10772 arg00, TREE_OPERAND (arg01, 1));
10773 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10774 build_int_cst (TREE_TYPE (arg0), 1));
10775 return fold_build2_loc (loc, code, type,
10776 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10777 arg1);
10778 }
10779 }
10780
10781 /* If this is an NE or EQ comparison of zero against the result of a
10782 signed MOD operation whose second operand is a power of 2, make
10783 the MOD operation unsigned since it is simpler and equivalent. */
10784 if (integer_zerop (arg1)
10785 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10786 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10787 || TREE_CODE (arg0) == CEIL_MOD_EXPR
10788 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10789 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10790 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10791 {
10792 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
10793 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
10794 fold_convert_loc (loc, newtype,
10795 TREE_OPERAND (arg0, 0)),
10796 fold_convert_loc (loc, newtype,
10797 TREE_OPERAND (arg0, 1)));
10798
10799 return fold_build2_loc (loc, code, type, newmod,
10800 fold_convert_loc (loc, newtype, arg1));
10801 }
10802
10803 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10804 C1 is a valid shift constant, and C2 is a power of two, i.e.
10805 a single bit. */
10806 if (TREE_CODE (arg0) == BIT_AND_EXPR
10807 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10808 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10809 == INTEGER_CST
10810 && integer_pow2p (TREE_OPERAND (arg0, 1))
10811 && integer_zerop (arg1))
10812 {
10813 tree itype = TREE_TYPE (arg0);
10814 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10815 prec = TYPE_PRECISION (itype);
10816
10817 /* Check for a valid shift count. */
10818 if (wi::ltu_p (arg001, prec))
10819 {
10820 tree arg01 = TREE_OPERAND (arg0, 1);
10821 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10822 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10823 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10824 can be rewritten as (X & (C2 << C1)) != 0. */
10825 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
10826 {
10827 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
10828 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
10829 return fold_build2_loc (loc, code, type, tem,
10830 fold_convert_loc (loc, itype, arg1));
10831 }
10832 /* Otherwise, for signed (arithmetic) shifts,
10833 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10834 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10835 else if (!TYPE_UNSIGNED (itype))
10836 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10837 arg000, build_int_cst (itype, 0));
10838 /* Otherwise, of unsigned (logical) shifts,
10839 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10840 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10841 else
10842 return omit_one_operand_loc (loc, type,
10843 code == EQ_EXPR ? integer_one_node
10844 : integer_zero_node,
10845 arg000);
10846 }
10847 }
10848
10849 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
10850 Similarly for NE_EXPR. */
10851 if (TREE_CODE (arg0) == BIT_AND_EXPR
10852 && TREE_CODE (arg1) == INTEGER_CST
10853 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10854 {
10855 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
10856 TREE_TYPE (TREE_OPERAND (arg0, 1)),
10857 TREE_OPERAND (arg0, 1));
10858 tree dandnotc
10859 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
10860 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
10861 notc);
10862 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10863 if (integer_nonzerop (dandnotc))
10864 return omit_one_operand_loc (loc, type, rslt, arg0);
10865 }
10866
10867 /* If this is a comparison of a field, we may be able to simplify it. */
10868 if ((TREE_CODE (arg0) == COMPONENT_REF
10869 || TREE_CODE (arg0) == BIT_FIELD_REF)
10870 /* Handle the constant case even without -O
10871 to make sure the warnings are given. */
10872 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
10873 {
10874 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
10875 if (t1)
10876 return t1;
10877 }
10878
10879 /* Optimize comparisons of strlen vs zero to a compare of the
10880 first character of the string vs zero. To wit,
10881 strlen(ptr) == 0 => *ptr == 0
10882 strlen(ptr) != 0 => *ptr != 0
10883 Other cases should reduce to one of these two (or a constant)
10884 due to the return value of strlen being unsigned. */
10885 if (TREE_CODE (arg0) == CALL_EXPR
10886 && integer_zerop (arg1))
10887 {
10888 tree fndecl = get_callee_fndecl (arg0);
10889
10890 if (fndecl
10891 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
10892 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
10893 && call_expr_nargs (arg0) == 1
10894 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
10895 {
10896 tree iref = build_fold_indirect_ref_loc (loc,
10897 CALL_EXPR_ARG (arg0, 0));
10898 return fold_build2_loc (loc, code, type, iref,
10899 build_int_cst (TREE_TYPE (iref), 0));
10900 }
10901 }
10902
10903 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10904 of X. Similarly fold (X >> C) == 0 into X >= 0. */
10905 if (TREE_CODE (arg0) == RSHIFT_EXPR
10906 && integer_zerop (arg1)
10907 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10908 {
10909 tree arg00 = TREE_OPERAND (arg0, 0);
10910 tree arg01 = TREE_OPERAND (arg0, 1);
10911 tree itype = TREE_TYPE (arg00);
10912 if (wi::eq_p (arg01, element_precision (itype) - 1))
10913 {
10914 if (TYPE_UNSIGNED (itype))
10915 {
10916 itype = signed_type_for (itype);
10917 arg00 = fold_convert_loc (loc, itype, arg00);
10918 }
10919 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
10920 type, arg00, build_zero_cst (itype));
10921 }
10922 }
10923
10924 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
10925 (X & C) == 0 when C is a single bit. */
10926 if (TREE_CODE (arg0) == BIT_AND_EXPR
10927 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
10928 && integer_zerop (arg1)
10929 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10930 {
10931 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
10932 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
10933 TREE_OPERAND (arg0, 1));
10934 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
10935 type, tem,
10936 fold_convert_loc (loc, TREE_TYPE (arg0),
10937 arg1));
10938 }
10939
10940 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
10941 constant C is a power of two, i.e. a single bit. */
10942 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10943 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10944 && integer_zerop (arg1)
10945 && integer_pow2p (TREE_OPERAND (arg0, 1))
10946 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10947 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10948 {
10949 tree arg00 = TREE_OPERAND (arg0, 0);
10950 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10951 arg00, build_int_cst (TREE_TYPE (arg00), 0));
10952 }
10953
10954 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
10955 when is C is a power of two, i.e. a single bit. */
10956 if (TREE_CODE (arg0) == BIT_AND_EXPR
10957 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
10958 && integer_zerop (arg1)
10959 && integer_pow2p (TREE_OPERAND (arg0, 1))
10960 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10961 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10962 {
10963 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10964 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
10965 arg000, TREE_OPERAND (arg0, 1));
10966 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10967 tem, build_int_cst (TREE_TYPE (tem), 0));
10968 }
10969
10970 if (integer_zerop (arg1)
10971 && tree_expr_nonzero_p (arg0))
10972 {
10973 tree res = constant_boolean_node (code==NE_EXPR, type);
10974 return omit_one_operand_loc (loc, type, res, arg0);
10975 }
10976
10977 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
10978 if (TREE_CODE (arg0) == BIT_AND_EXPR
10979 && TREE_CODE (arg1) == BIT_AND_EXPR)
10980 {
10981 tree arg00 = TREE_OPERAND (arg0, 0);
10982 tree arg01 = TREE_OPERAND (arg0, 1);
10983 tree arg10 = TREE_OPERAND (arg1, 0);
10984 tree arg11 = TREE_OPERAND (arg1, 1);
10985 tree itype = TREE_TYPE (arg0);
10986
10987 if (operand_equal_p (arg01, arg11, 0))
10988 return fold_build2_loc (loc, code, type,
10989 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10990 fold_build2_loc (loc,
10991 BIT_XOR_EXPR, itype,
10992 arg00, arg10),
10993 arg01),
10994 build_zero_cst (itype));
10995
10996 if (operand_equal_p (arg01, arg10, 0))
10997 return fold_build2_loc (loc, code, type,
10998 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10999 fold_build2_loc (loc,
11000 BIT_XOR_EXPR, itype,
11001 arg00, arg11),
11002 arg01),
11003 build_zero_cst (itype));
11004
11005 if (operand_equal_p (arg00, arg11, 0))
11006 return fold_build2_loc (loc, code, type,
11007 fold_build2_loc (loc, BIT_AND_EXPR, itype,
11008 fold_build2_loc (loc,
11009 BIT_XOR_EXPR, itype,
11010 arg01, arg10),
11011 arg00),
11012 build_zero_cst (itype));
11013
11014 if (operand_equal_p (arg00, arg10, 0))
11015 return fold_build2_loc (loc, code, type,
11016 fold_build2_loc (loc, BIT_AND_EXPR, itype,
11017 fold_build2_loc (loc,
11018 BIT_XOR_EXPR, itype,
11019 arg01, arg11),
11020 arg00),
11021 build_zero_cst (itype));
11022 }
11023
11024 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11025 && TREE_CODE (arg1) == BIT_XOR_EXPR)
11026 {
11027 tree arg00 = TREE_OPERAND (arg0, 0);
11028 tree arg01 = TREE_OPERAND (arg0, 1);
11029 tree arg10 = TREE_OPERAND (arg1, 0);
11030 tree arg11 = TREE_OPERAND (arg1, 1);
11031 tree itype = TREE_TYPE (arg0);
11032
11033 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
11034 operand_equal_p guarantees no side-effects so we don't need
11035 to use omit_one_operand on Z. */
11036 if (operand_equal_p (arg01, arg11, 0))
11037 return fold_build2_loc (loc, code, type, arg00,
11038 fold_convert_loc (loc, TREE_TYPE (arg00),
11039 arg10));
11040 if (operand_equal_p (arg01, arg10, 0))
11041 return fold_build2_loc (loc, code, type, arg00,
11042 fold_convert_loc (loc, TREE_TYPE (arg00),
11043 arg11));
11044 if (operand_equal_p (arg00, arg11, 0))
11045 return fold_build2_loc (loc, code, type, arg01,
11046 fold_convert_loc (loc, TREE_TYPE (arg01),
11047 arg10));
11048 if (operand_equal_p (arg00, arg10, 0))
11049 return fold_build2_loc (loc, code, type, arg01,
11050 fold_convert_loc (loc, TREE_TYPE (arg01),
11051 arg11));
11052
11053 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
11054 if (TREE_CODE (arg01) == INTEGER_CST
11055 && TREE_CODE (arg11) == INTEGER_CST)
11056 {
11057 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
11058 fold_convert_loc (loc, itype, arg11));
11059 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
11060 return fold_build2_loc (loc, code, type, tem,
11061 fold_convert_loc (loc, itype, arg10));
11062 }
11063 }
11064
11065 /* Attempt to simplify equality/inequality comparisons of complex
11066 values. Only lower the comparison if the result is known or
11067 can be simplified to a single scalar comparison. */
11068 if ((TREE_CODE (arg0) == COMPLEX_EXPR
11069 || TREE_CODE (arg0) == COMPLEX_CST)
11070 && (TREE_CODE (arg1) == COMPLEX_EXPR
11071 || TREE_CODE (arg1) == COMPLEX_CST))
11072 {
11073 tree real0, imag0, real1, imag1;
11074 tree rcond, icond;
11075
11076 if (TREE_CODE (arg0) == COMPLEX_EXPR)
11077 {
11078 real0 = TREE_OPERAND (arg0, 0);
11079 imag0 = TREE_OPERAND (arg0, 1);
11080 }
11081 else
11082 {
11083 real0 = TREE_REALPART (arg0);
11084 imag0 = TREE_IMAGPART (arg0);
11085 }
11086
11087 if (TREE_CODE (arg1) == COMPLEX_EXPR)
11088 {
11089 real1 = TREE_OPERAND (arg1, 0);
11090 imag1 = TREE_OPERAND (arg1, 1);
11091 }
11092 else
11093 {
11094 real1 = TREE_REALPART (arg1);
11095 imag1 = TREE_IMAGPART (arg1);
11096 }
11097
11098 rcond = fold_binary_loc (loc, code, type, real0, real1);
11099 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
11100 {
11101 if (integer_zerop (rcond))
11102 {
11103 if (code == EQ_EXPR)
11104 return omit_two_operands_loc (loc, type, boolean_false_node,
11105 imag0, imag1);
11106 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
11107 }
11108 else
11109 {
11110 if (code == NE_EXPR)
11111 return omit_two_operands_loc (loc, type, boolean_true_node,
11112 imag0, imag1);
11113 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
11114 }
11115 }
11116
11117 icond = fold_binary_loc (loc, code, type, imag0, imag1);
11118 if (icond && TREE_CODE (icond) == INTEGER_CST)
11119 {
11120 if (integer_zerop (icond))
11121 {
11122 if (code == EQ_EXPR)
11123 return omit_two_operands_loc (loc, type, boolean_false_node,
11124 real0, real1);
11125 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
11126 }
11127 else
11128 {
11129 if (code == NE_EXPR)
11130 return omit_two_operands_loc (loc, type, boolean_true_node,
11131 real0, real1);
11132 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
11133 }
11134 }
11135 }
11136
11137 return NULL_TREE;
11138
11139 case LT_EXPR:
11140 case GT_EXPR:
11141 case LE_EXPR:
11142 case GE_EXPR:
11143 tem = fold_comparison (loc, code, type, op0, op1);
11144 if (tem != NULL_TREE)
11145 return tem;
11146
11147 /* Transform comparisons of the form X +- C CMP X. */
11148 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11149 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11150 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
11151 && !HONOR_SNANS (arg0))
11152 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11153 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
11154 {
11155 tree arg01 = TREE_OPERAND (arg0, 1);
11156 enum tree_code code0 = TREE_CODE (arg0);
11157 int is_positive;
11158
11159 if (TREE_CODE (arg01) == REAL_CST)
11160 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
11161 else
11162 is_positive = tree_int_cst_sgn (arg01);
11163
11164 /* (X - c) > X becomes false. */
11165 if (code == GT_EXPR
11166 && ((code0 == MINUS_EXPR && is_positive >= 0)
11167 || (code0 == PLUS_EXPR && is_positive <= 0)))
11168 {
11169 if (TREE_CODE (arg01) == INTEGER_CST
11170 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11171 fold_overflow_warning (("assuming signed overflow does not "
11172 "occur when assuming that (X - c) > X "
11173 "is always false"),
11174 WARN_STRICT_OVERFLOW_ALL);
11175 return constant_boolean_node (0, type);
11176 }
11177
11178 /* Likewise (X + c) < X becomes false. */
11179 if (code == LT_EXPR
11180 && ((code0 == PLUS_EXPR && is_positive >= 0)
11181 || (code0 == MINUS_EXPR && is_positive <= 0)))
11182 {
11183 if (TREE_CODE (arg01) == INTEGER_CST
11184 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11185 fold_overflow_warning (("assuming signed overflow does not "
11186 "occur when assuming that "
11187 "(X + c) < X is always false"),
11188 WARN_STRICT_OVERFLOW_ALL);
11189 return constant_boolean_node (0, type);
11190 }
11191
11192 /* Convert (X - c) <= X to true. */
11193 if (!HONOR_NANS (arg1)
11194 && code == LE_EXPR
11195 && ((code0 == MINUS_EXPR && is_positive >= 0)
11196 || (code0 == PLUS_EXPR && is_positive <= 0)))
11197 {
11198 if (TREE_CODE (arg01) == INTEGER_CST
11199 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11200 fold_overflow_warning (("assuming signed overflow does not "
11201 "occur when assuming that "
11202 "(X - c) <= X is always true"),
11203 WARN_STRICT_OVERFLOW_ALL);
11204 return constant_boolean_node (1, type);
11205 }
11206
11207 /* Convert (X + c) >= X to true. */
11208 if (!HONOR_NANS (arg1)
11209 && code == GE_EXPR
11210 && ((code0 == PLUS_EXPR && is_positive >= 0)
11211 || (code0 == MINUS_EXPR && is_positive <= 0)))
11212 {
11213 if (TREE_CODE (arg01) == INTEGER_CST
11214 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11215 fold_overflow_warning (("assuming signed overflow does not "
11216 "occur when assuming that "
11217 "(X + c) >= X is always true"),
11218 WARN_STRICT_OVERFLOW_ALL);
11219 return constant_boolean_node (1, type);
11220 }
11221
11222 if (TREE_CODE (arg01) == INTEGER_CST)
11223 {
11224 /* Convert X + c > X and X - c < X to true for integers. */
11225 if (code == GT_EXPR
11226 && ((code0 == PLUS_EXPR && is_positive > 0)
11227 || (code0 == MINUS_EXPR && is_positive < 0)))
11228 {
11229 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11230 fold_overflow_warning (("assuming signed overflow does "
11231 "not occur when assuming that "
11232 "(X + c) > X is always true"),
11233 WARN_STRICT_OVERFLOW_ALL);
11234 return constant_boolean_node (1, type);
11235 }
11236
11237 if (code == LT_EXPR
11238 && ((code0 == MINUS_EXPR && is_positive > 0)
11239 || (code0 == PLUS_EXPR && is_positive < 0)))
11240 {
11241 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11242 fold_overflow_warning (("assuming signed overflow does "
11243 "not occur when assuming that "
11244 "(X - c) < X is always true"),
11245 WARN_STRICT_OVERFLOW_ALL);
11246 return constant_boolean_node (1, type);
11247 }
11248
11249 /* Convert X + c <= X and X - c >= X to false for integers. */
11250 if (code == LE_EXPR
11251 && ((code0 == PLUS_EXPR && is_positive > 0)
11252 || (code0 == MINUS_EXPR && is_positive < 0)))
11253 {
11254 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11255 fold_overflow_warning (("assuming signed overflow does "
11256 "not occur when assuming that "
11257 "(X + c) <= X is always false"),
11258 WARN_STRICT_OVERFLOW_ALL);
11259 return constant_boolean_node (0, type);
11260 }
11261
11262 if (code == GE_EXPR
11263 && ((code0 == MINUS_EXPR && is_positive > 0)
11264 || (code0 == PLUS_EXPR && is_positive < 0)))
11265 {
11266 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11267 fold_overflow_warning (("assuming signed overflow does "
11268 "not occur when assuming that "
11269 "(X - c) >= X is always false"),
11270 WARN_STRICT_OVERFLOW_ALL);
11271 return constant_boolean_node (0, type);
11272 }
11273 }
11274 }
11275
11276 /* If we are comparing an ABS_EXPR with a constant, we can
11277 convert all the cases into explicit comparisons, but they may
11278 well not be faster than doing the ABS and one comparison.
11279 But ABS (X) <= C is a range comparison, which becomes a subtraction
11280 and a comparison, and is probably faster. */
11281 if (code == LE_EXPR
11282 && TREE_CODE (arg1) == INTEGER_CST
11283 && TREE_CODE (arg0) == ABS_EXPR
11284 && ! TREE_SIDE_EFFECTS (arg0)
11285 && (0 != (tem = negate_expr (arg1)))
11286 && TREE_CODE (tem) == INTEGER_CST
11287 && !TREE_OVERFLOW (tem))
11288 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
11289 build2 (GE_EXPR, type,
11290 TREE_OPERAND (arg0, 0), tem),
11291 build2 (LE_EXPR, type,
11292 TREE_OPERAND (arg0, 0), arg1));
11293
11294 /* Convert ABS_EXPR<x> >= 0 to true. */
11295 strict_overflow_p = false;
11296 if (code == GE_EXPR
11297 && (integer_zerop (arg1)
11298 || (! HONOR_NANS (arg0)
11299 && real_zerop (arg1)))
11300 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11301 {
11302 if (strict_overflow_p)
11303 fold_overflow_warning (("assuming signed overflow does not occur "
11304 "when simplifying comparison of "
11305 "absolute value and zero"),
11306 WARN_STRICT_OVERFLOW_CONDITIONAL);
11307 return omit_one_operand_loc (loc, type,
11308 constant_boolean_node (true, type),
11309 arg0);
11310 }
11311
11312 /* Convert ABS_EXPR<x> < 0 to false. */
11313 strict_overflow_p = false;
11314 if (code == LT_EXPR
11315 && (integer_zerop (arg1) || real_zerop (arg1))
11316 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11317 {
11318 if (strict_overflow_p)
11319 fold_overflow_warning (("assuming signed overflow does not occur "
11320 "when simplifying comparison of "
11321 "absolute value and zero"),
11322 WARN_STRICT_OVERFLOW_CONDITIONAL);
11323 return omit_one_operand_loc (loc, type,
11324 constant_boolean_node (false, type),
11325 arg0);
11326 }
11327
11328 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11329 and similarly for >= into !=. */
11330 if ((code == LT_EXPR || code == GE_EXPR)
11331 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11332 && TREE_CODE (arg1) == LSHIFT_EXPR
11333 && integer_onep (TREE_OPERAND (arg1, 0)))
11334 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11335 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11336 TREE_OPERAND (arg1, 1)),
11337 build_zero_cst (TREE_TYPE (arg0)));
11338
11339 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
11340 otherwise Y might be >= # of bits in X's type and thus e.g.
11341 (unsigned char) (1 << Y) for Y 15 might be 0.
11342 If the cast is widening, then 1 << Y should have unsigned type,
11343 otherwise if Y is number of bits in the signed shift type minus 1,
11344 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
11345 31 might be 0xffffffff80000000. */
11346 if ((code == LT_EXPR || code == GE_EXPR)
11347 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11348 && CONVERT_EXPR_P (arg1)
11349 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
11350 && (element_precision (TREE_TYPE (arg1))
11351 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
11352 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
11353 || (element_precision (TREE_TYPE (arg1))
11354 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
11355 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11356 {
11357 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11358 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
11359 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11360 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
11361 build_zero_cst (TREE_TYPE (arg0)));
11362 }
11363
11364 return NULL_TREE;
11365
11366 case UNORDERED_EXPR:
11367 case ORDERED_EXPR:
11368 case UNLT_EXPR:
11369 case UNLE_EXPR:
11370 case UNGT_EXPR:
11371 case UNGE_EXPR:
11372 case UNEQ_EXPR:
11373 case LTGT_EXPR:
11374 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
11375 {
11376 tree targ0 = strip_float_extensions (arg0);
11377 tree targ1 = strip_float_extensions (arg1);
11378 tree newtype = TREE_TYPE (targ0);
11379
11380 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
11381 newtype = TREE_TYPE (targ1);
11382
11383 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
11384 return fold_build2_loc (loc, code, type,
11385 fold_convert_loc (loc, newtype, targ0),
11386 fold_convert_loc (loc, newtype, targ1));
11387 }
11388
11389 return NULL_TREE;
11390
11391 case COMPOUND_EXPR:
11392 /* When pedantic, a compound expression can be neither an lvalue
11393 nor an integer constant expression. */
11394 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11395 return NULL_TREE;
11396 /* Don't let (0, 0) be null pointer constant. */
11397 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11398 : fold_convert_loc (loc, type, arg1);
11399 return pedantic_non_lvalue_loc (loc, tem);
11400
11401 case ASSERT_EXPR:
11402 /* An ASSERT_EXPR should never be passed to fold_binary. */
11403 gcc_unreachable ();
11404
11405 default:
11406 return NULL_TREE;
11407 } /* switch (code) */
11408 }
11409
11410 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
11411 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
11412 of GOTO_EXPR. */
11413
11414 static tree
11415 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
11416 {
11417 switch (TREE_CODE (*tp))
11418 {
11419 case LABEL_EXPR:
11420 return *tp;
11421
11422 case GOTO_EXPR:
11423 *walk_subtrees = 0;
11424
11425 /* ... fall through ... */
11426
11427 default:
11428 return NULL_TREE;
11429 }
11430 }
11431
11432 /* Return whether the sub-tree ST contains a label which is accessible from
11433 outside the sub-tree. */
11434
11435 static bool
11436 contains_label_p (tree st)
11437 {
11438 return
11439 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
11440 }
11441
11442 /* Fold a ternary expression of code CODE and type TYPE with operands
11443 OP0, OP1, and OP2. Return the folded expression if folding is
11444 successful. Otherwise, return NULL_TREE. */
11445
11446 tree
11447 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
11448 tree op0, tree op1, tree op2)
11449 {
11450 tree tem;
11451 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
11452 enum tree_code_class kind = TREE_CODE_CLASS (code);
11453
11454 gcc_assert (IS_EXPR_CODE_CLASS (kind)
11455 && TREE_CODE_LENGTH (code) == 3);
11456
11457 /* If this is a commutative operation, and OP0 is a constant, move it
11458 to OP1 to reduce the number of tests below. */
11459 if (commutative_ternary_tree_code (code)
11460 && tree_swap_operands_p (op0, op1, true))
11461 return fold_build3_loc (loc, code, type, op1, op0, op2);
11462
11463 tem = generic_simplify (loc, code, type, op0, op1, op2);
11464 if (tem)
11465 return tem;
11466
11467 /* Strip any conversions that don't change the mode. This is safe
11468 for every expression, except for a comparison expression because
11469 its signedness is derived from its operands. So, in the latter
11470 case, only strip conversions that don't change the signedness.
11471
11472 Note that this is done as an internal manipulation within the
11473 constant folder, in order to find the simplest representation of
11474 the arguments so that their form can be studied. In any cases,
11475 the appropriate type conversions should be put back in the tree
11476 that will get out of the constant folder. */
11477 if (op0)
11478 {
11479 arg0 = op0;
11480 STRIP_NOPS (arg0);
11481 }
11482
11483 if (op1)
11484 {
11485 arg1 = op1;
11486 STRIP_NOPS (arg1);
11487 }
11488
11489 if (op2)
11490 {
11491 arg2 = op2;
11492 STRIP_NOPS (arg2);
11493 }
11494
11495 switch (code)
11496 {
11497 case COMPONENT_REF:
11498 if (TREE_CODE (arg0) == CONSTRUCTOR
11499 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11500 {
11501 unsigned HOST_WIDE_INT idx;
11502 tree field, value;
11503 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11504 if (field == arg1)
11505 return value;
11506 }
11507 return NULL_TREE;
11508
11509 case COND_EXPR:
11510 case VEC_COND_EXPR:
11511 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11512 so all simple results must be passed through pedantic_non_lvalue. */
11513 if (TREE_CODE (arg0) == INTEGER_CST)
11514 {
11515 tree unused_op = integer_zerop (arg0) ? op1 : op2;
11516 tem = integer_zerop (arg0) ? op2 : op1;
11517 /* Only optimize constant conditions when the selected branch
11518 has the same type as the COND_EXPR. This avoids optimizing
11519 away "c ? x : throw", where the throw has a void type.
11520 Avoid throwing away that operand which contains label. */
11521 if ((!TREE_SIDE_EFFECTS (unused_op)
11522 || !contains_label_p (unused_op))
11523 && (! VOID_TYPE_P (TREE_TYPE (tem))
11524 || VOID_TYPE_P (type)))
11525 return pedantic_non_lvalue_loc (loc, tem);
11526 return NULL_TREE;
11527 }
11528 else if (TREE_CODE (arg0) == VECTOR_CST)
11529 {
11530 if ((TREE_CODE (arg1) == VECTOR_CST
11531 || TREE_CODE (arg1) == CONSTRUCTOR)
11532 && (TREE_CODE (arg2) == VECTOR_CST
11533 || TREE_CODE (arg2) == CONSTRUCTOR))
11534 {
11535 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
11536 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
11537 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
11538 for (i = 0; i < nelts; i++)
11539 {
11540 tree val = VECTOR_CST_ELT (arg0, i);
11541 if (integer_all_onesp (val))
11542 sel[i] = i;
11543 else if (integer_zerop (val))
11544 sel[i] = nelts + i;
11545 else /* Currently unreachable. */
11546 return NULL_TREE;
11547 }
11548 tree t = fold_vec_perm (type, arg1, arg2, sel);
11549 if (t != NULL_TREE)
11550 return t;
11551 }
11552 }
11553
11554 /* If we have A op B ? A : C, we may be able to convert this to a
11555 simpler expression, depending on the operation and the values
11556 of B and C. Signed zeros prevent all of these transformations,
11557 for reasons given above each one.
11558
11559 Also try swapping the arguments and inverting the conditional. */
11560 if (COMPARISON_CLASS_P (arg0)
11561 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11562 arg1, TREE_OPERAND (arg0, 1))
11563 && !HONOR_SIGNED_ZEROS (element_mode (arg1)))
11564 {
11565 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
11566 if (tem)
11567 return tem;
11568 }
11569
11570 if (COMPARISON_CLASS_P (arg0)
11571 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11572 op2,
11573 TREE_OPERAND (arg0, 1))
11574 && !HONOR_SIGNED_ZEROS (element_mode (op2)))
11575 {
11576 location_t loc0 = expr_location_or (arg0, loc);
11577 tem = fold_invert_truthvalue (loc0, arg0);
11578 if (tem && COMPARISON_CLASS_P (tem))
11579 {
11580 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
11581 if (tem)
11582 return tem;
11583 }
11584 }
11585
11586 /* If the second operand is simpler than the third, swap them
11587 since that produces better jump optimization results. */
11588 if (truth_value_p (TREE_CODE (arg0))
11589 && tree_swap_operands_p (op1, op2, false))
11590 {
11591 location_t loc0 = expr_location_or (arg0, loc);
11592 /* See if this can be inverted. If it can't, possibly because
11593 it was a floating-point inequality comparison, don't do
11594 anything. */
11595 tem = fold_invert_truthvalue (loc0, arg0);
11596 if (tem)
11597 return fold_build3_loc (loc, code, type, tem, op2, op1);
11598 }
11599
11600 /* Convert A ? 1 : 0 to simply A. */
11601 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
11602 : (integer_onep (op1)
11603 && !VECTOR_TYPE_P (type)))
11604 && integer_zerop (op2)
11605 /* If we try to convert OP0 to our type, the
11606 call to fold will try to move the conversion inside
11607 a COND, which will recurse. In that case, the COND_EXPR
11608 is probably the best choice, so leave it alone. */
11609 && type == TREE_TYPE (arg0))
11610 return pedantic_non_lvalue_loc (loc, arg0);
11611
11612 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11613 over COND_EXPR in cases such as floating point comparisons. */
11614 if (integer_zerop (op1)
11615 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
11616 : (integer_onep (op2)
11617 && !VECTOR_TYPE_P (type)))
11618 && truth_value_p (TREE_CODE (arg0)))
11619 return pedantic_non_lvalue_loc (loc,
11620 fold_convert_loc (loc, type,
11621 invert_truthvalue_loc (loc,
11622 arg0)));
11623
11624 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11625 if (TREE_CODE (arg0) == LT_EXPR
11626 && integer_zerop (TREE_OPERAND (arg0, 1))
11627 && integer_zerop (op2)
11628 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11629 {
11630 /* sign_bit_p looks through both zero and sign extensions,
11631 but for this optimization only sign extensions are
11632 usable. */
11633 tree tem2 = TREE_OPERAND (arg0, 0);
11634 while (tem != tem2)
11635 {
11636 if (TREE_CODE (tem2) != NOP_EXPR
11637 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
11638 {
11639 tem = NULL_TREE;
11640 break;
11641 }
11642 tem2 = TREE_OPERAND (tem2, 0);
11643 }
11644 /* sign_bit_p only checks ARG1 bits within A's precision.
11645 If <sign bit of A> has wider type than A, bits outside
11646 of A's precision in <sign bit of A> need to be checked.
11647 If they are all 0, this optimization needs to be done
11648 in unsigned A's type, if they are all 1 in signed A's type,
11649 otherwise this can't be done. */
11650 if (tem
11651 && TYPE_PRECISION (TREE_TYPE (tem))
11652 < TYPE_PRECISION (TREE_TYPE (arg1))
11653 && TYPE_PRECISION (TREE_TYPE (tem))
11654 < TYPE_PRECISION (type))
11655 {
11656 int inner_width, outer_width;
11657 tree tem_type;
11658
11659 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
11660 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
11661 if (outer_width > TYPE_PRECISION (type))
11662 outer_width = TYPE_PRECISION (type);
11663
11664 wide_int mask = wi::shifted_mask
11665 (inner_width, outer_width - inner_width, false,
11666 TYPE_PRECISION (TREE_TYPE (arg1)));
11667
11668 wide_int common = mask & arg1;
11669 if (common == mask)
11670 {
11671 tem_type = signed_type_for (TREE_TYPE (tem));
11672 tem = fold_convert_loc (loc, tem_type, tem);
11673 }
11674 else if (common == 0)
11675 {
11676 tem_type = unsigned_type_for (TREE_TYPE (tem));
11677 tem = fold_convert_loc (loc, tem_type, tem);
11678 }
11679 else
11680 tem = NULL;
11681 }
11682
11683 if (tem)
11684 return
11685 fold_convert_loc (loc, type,
11686 fold_build2_loc (loc, BIT_AND_EXPR,
11687 TREE_TYPE (tem), tem,
11688 fold_convert_loc (loc,
11689 TREE_TYPE (tem),
11690 arg1)));
11691 }
11692
11693 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11694 already handled above. */
11695 if (TREE_CODE (arg0) == BIT_AND_EXPR
11696 && integer_onep (TREE_OPERAND (arg0, 1))
11697 && integer_zerop (op2)
11698 && integer_pow2p (arg1))
11699 {
11700 tree tem = TREE_OPERAND (arg0, 0);
11701 STRIP_NOPS (tem);
11702 if (TREE_CODE (tem) == RSHIFT_EXPR
11703 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
11704 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
11705 tree_to_uhwi (TREE_OPERAND (tem, 1)))
11706 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11707 TREE_OPERAND (tem, 0), arg1);
11708 }
11709
11710 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11711 is probably obsolete because the first operand should be a
11712 truth value (that's why we have the two cases above), but let's
11713 leave it in until we can confirm this for all front-ends. */
11714 if (integer_zerop (op2)
11715 && TREE_CODE (arg0) == NE_EXPR
11716 && integer_zerop (TREE_OPERAND (arg0, 1))
11717 && integer_pow2p (arg1)
11718 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11719 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11720 arg1, OEP_ONLY_CONST))
11721 return pedantic_non_lvalue_loc (loc,
11722 fold_convert_loc (loc, type,
11723 TREE_OPERAND (arg0, 0)));
11724
11725 /* Disable the transformations below for vectors, since
11726 fold_binary_op_with_conditional_arg may undo them immediately,
11727 yielding an infinite loop. */
11728 if (code == VEC_COND_EXPR)
11729 return NULL_TREE;
11730
11731 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11732 if (integer_zerop (op2)
11733 && truth_value_p (TREE_CODE (arg0))
11734 && truth_value_p (TREE_CODE (arg1))
11735 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11736 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
11737 : TRUTH_ANDIF_EXPR,
11738 type, fold_convert_loc (loc, type, arg0), arg1);
11739
11740 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11741 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
11742 && truth_value_p (TREE_CODE (arg0))
11743 && truth_value_p (TREE_CODE (arg1))
11744 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11745 {
11746 location_t loc0 = expr_location_or (arg0, loc);
11747 /* Only perform transformation if ARG0 is easily inverted. */
11748 tem = fold_invert_truthvalue (loc0, arg0);
11749 if (tem)
11750 return fold_build2_loc (loc, code == VEC_COND_EXPR
11751 ? BIT_IOR_EXPR
11752 : TRUTH_ORIF_EXPR,
11753 type, fold_convert_loc (loc, type, tem),
11754 arg1);
11755 }
11756
11757 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11758 if (integer_zerop (arg1)
11759 && truth_value_p (TREE_CODE (arg0))
11760 && truth_value_p (TREE_CODE (op2))
11761 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11762 {
11763 location_t loc0 = expr_location_or (arg0, loc);
11764 /* Only perform transformation if ARG0 is easily inverted. */
11765 tem = fold_invert_truthvalue (loc0, arg0);
11766 if (tem)
11767 return fold_build2_loc (loc, code == VEC_COND_EXPR
11768 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
11769 type, fold_convert_loc (loc, type, tem),
11770 op2);
11771 }
11772
11773 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11774 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
11775 && truth_value_p (TREE_CODE (arg0))
11776 && truth_value_p (TREE_CODE (op2))
11777 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11778 return fold_build2_loc (loc, code == VEC_COND_EXPR
11779 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
11780 type, fold_convert_loc (loc, type, arg0), op2);
11781
11782 return NULL_TREE;
11783
11784 case CALL_EXPR:
11785 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
11786 of fold_ternary on them. */
11787 gcc_unreachable ();
11788
11789 case BIT_FIELD_REF:
11790 if ((TREE_CODE (arg0) == VECTOR_CST
11791 || (TREE_CODE (arg0) == CONSTRUCTOR
11792 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
11793 && (type == TREE_TYPE (TREE_TYPE (arg0))
11794 || (TREE_CODE (type) == VECTOR_TYPE
11795 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
11796 {
11797 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
11798 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
11799 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
11800 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
11801
11802 if (n != 0
11803 && (idx % width) == 0
11804 && (n % width) == 0
11805 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
11806 {
11807 idx = idx / width;
11808 n = n / width;
11809
11810 if (TREE_CODE (arg0) == VECTOR_CST)
11811 {
11812 if (n == 1)
11813 return VECTOR_CST_ELT (arg0, idx);
11814
11815 tree *vals = XALLOCAVEC (tree, n);
11816 for (unsigned i = 0; i < n; ++i)
11817 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
11818 return build_vector (type, vals);
11819 }
11820
11821 /* Constructor elements can be subvectors. */
11822 unsigned HOST_WIDE_INT k = 1;
11823 if (CONSTRUCTOR_NELTS (arg0) != 0)
11824 {
11825 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
11826 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
11827 k = TYPE_VECTOR_SUBPARTS (cons_elem);
11828 }
11829
11830 /* We keep an exact subset of the constructor elements. */
11831 if ((idx % k) == 0 && (n % k) == 0)
11832 {
11833 if (CONSTRUCTOR_NELTS (arg0) == 0)
11834 return build_constructor (type, NULL);
11835 idx /= k;
11836 n /= k;
11837 if (n == 1)
11838 {
11839 if (idx < CONSTRUCTOR_NELTS (arg0))
11840 return CONSTRUCTOR_ELT (arg0, idx)->value;
11841 return build_zero_cst (type);
11842 }
11843
11844 vec<constructor_elt, va_gc> *vals;
11845 vec_alloc (vals, n);
11846 for (unsigned i = 0;
11847 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
11848 ++i)
11849 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
11850 CONSTRUCTOR_ELT
11851 (arg0, idx + i)->value);
11852 return build_constructor (type, vals);
11853 }
11854 /* The bitfield references a single constructor element. */
11855 else if (idx + n <= (idx / k + 1) * k)
11856 {
11857 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
11858 return build_zero_cst (type);
11859 else if (n == k)
11860 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
11861 else
11862 return fold_build3_loc (loc, code, type,
11863 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
11864 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
11865 }
11866 }
11867 }
11868
11869 /* A bit-field-ref that referenced the full argument can be stripped. */
11870 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11871 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1)
11872 && integer_zerop (op2))
11873 return fold_convert_loc (loc, type, arg0);
11874
11875 /* On constants we can use native encode/interpret to constant
11876 fold (nearly) all BIT_FIELD_REFs. */
11877 if (CONSTANT_CLASS_P (arg0)
11878 && can_native_interpret_type_p (type)
11879 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0)))
11880 /* This limitation should not be necessary, we just need to
11881 round this up to mode size. */
11882 && tree_to_uhwi (op1) % BITS_PER_UNIT == 0
11883 /* Need bit-shifting of the buffer to relax the following. */
11884 && tree_to_uhwi (op2) % BITS_PER_UNIT == 0)
11885 {
11886 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11887 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
11888 unsigned HOST_WIDE_INT clen;
11889 clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0)));
11890 /* ??? We cannot tell native_encode_expr to start at
11891 some random byte only. So limit us to a reasonable amount
11892 of work. */
11893 if (clen <= 4096)
11894 {
11895 unsigned char *b = XALLOCAVEC (unsigned char, clen);
11896 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
11897 if (len > 0
11898 && len * BITS_PER_UNIT >= bitpos + bitsize)
11899 {
11900 tree v = native_interpret_expr (type,
11901 b + bitpos / BITS_PER_UNIT,
11902 bitsize / BITS_PER_UNIT);
11903 if (v)
11904 return v;
11905 }
11906 }
11907 }
11908
11909 return NULL_TREE;
11910
11911 case FMA_EXPR:
11912 /* For integers we can decompose the FMA if possible. */
11913 if (TREE_CODE (arg0) == INTEGER_CST
11914 && TREE_CODE (arg1) == INTEGER_CST)
11915 return fold_build2_loc (loc, PLUS_EXPR, type,
11916 const_binop (MULT_EXPR, arg0, arg1), arg2);
11917 if (integer_zerop (arg2))
11918 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11919
11920 return fold_fma (loc, type, arg0, arg1, arg2);
11921
11922 case VEC_PERM_EXPR:
11923 if (TREE_CODE (arg2) == VECTOR_CST)
11924 {
11925 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask, mask2;
11926 unsigned char *sel = XALLOCAVEC (unsigned char, 2 * nelts);
11927 unsigned char *sel2 = sel + nelts;
11928 bool need_mask_canon = false;
11929 bool need_mask_canon2 = false;
11930 bool all_in_vec0 = true;
11931 bool all_in_vec1 = true;
11932 bool maybe_identity = true;
11933 bool single_arg = (op0 == op1);
11934 bool changed = false;
11935
11936 mask2 = 2 * nelts - 1;
11937 mask = single_arg ? (nelts - 1) : mask2;
11938 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
11939 for (i = 0; i < nelts; i++)
11940 {
11941 tree val = VECTOR_CST_ELT (arg2, i);
11942 if (TREE_CODE (val) != INTEGER_CST)
11943 return NULL_TREE;
11944
11945 /* Make sure that the perm value is in an acceptable
11946 range. */
11947 wide_int t = val;
11948 need_mask_canon |= wi::gtu_p (t, mask);
11949 need_mask_canon2 |= wi::gtu_p (t, mask2);
11950 sel[i] = t.to_uhwi () & mask;
11951 sel2[i] = t.to_uhwi () & mask2;
11952
11953 if (sel[i] < nelts)
11954 all_in_vec1 = false;
11955 else
11956 all_in_vec0 = false;
11957
11958 if ((sel[i] & (nelts-1)) != i)
11959 maybe_identity = false;
11960 }
11961
11962 if (maybe_identity)
11963 {
11964 if (all_in_vec0)
11965 return op0;
11966 if (all_in_vec1)
11967 return op1;
11968 }
11969
11970 if (all_in_vec0)
11971 op1 = op0;
11972 else if (all_in_vec1)
11973 {
11974 op0 = op1;
11975 for (i = 0; i < nelts; i++)
11976 sel[i] -= nelts;
11977 need_mask_canon = true;
11978 }
11979
11980 if ((TREE_CODE (op0) == VECTOR_CST
11981 || TREE_CODE (op0) == CONSTRUCTOR)
11982 && (TREE_CODE (op1) == VECTOR_CST
11983 || TREE_CODE (op1) == CONSTRUCTOR))
11984 {
11985 tree t = fold_vec_perm (type, op0, op1, sel);
11986 if (t != NULL_TREE)
11987 return t;
11988 }
11989
11990 if (op0 == op1 && !single_arg)
11991 changed = true;
11992
11993 /* Some targets are deficient and fail to expand a single
11994 argument permutation while still allowing an equivalent
11995 2-argument version. */
11996 if (need_mask_canon && arg2 == op2
11997 && !can_vec_perm_p (TYPE_MODE (type), false, sel)
11998 && can_vec_perm_p (TYPE_MODE (type), false, sel2))
11999 {
12000 need_mask_canon = need_mask_canon2;
12001 sel = sel2;
12002 }
12003
12004 if (need_mask_canon && arg2 == op2)
12005 {
12006 tree *tsel = XALLOCAVEC (tree, nelts);
12007 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
12008 for (i = 0; i < nelts; i++)
12009 tsel[i] = build_int_cst (eltype, sel[i]);
12010 op2 = build_vector (TREE_TYPE (arg2), tsel);
12011 changed = true;
12012 }
12013
12014 if (changed)
12015 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
12016 }
12017 return NULL_TREE;
12018
12019 default:
12020 return NULL_TREE;
12021 } /* switch (code) */
12022 }
12023
12024 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
12025 of an array (or vector). */
12026
12027 tree
12028 get_array_ctor_element_at_index (tree ctor, offset_int access_index)
12029 {
12030 tree index_type = NULL_TREE;
12031 offset_int low_bound = 0;
12032
12033 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
12034 {
12035 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
12036 if (domain_type && TYPE_MIN_VALUE (domain_type))
12037 {
12038 /* Static constructors for variably sized objects makes no sense. */
12039 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
12040 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
12041 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
12042 }
12043 }
12044
12045 if (index_type)
12046 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
12047 TYPE_SIGN (index_type));
12048
12049 offset_int index = low_bound - 1;
12050 if (index_type)
12051 index = wi::ext (index, TYPE_PRECISION (index_type),
12052 TYPE_SIGN (index_type));
12053
12054 offset_int max_index;
12055 unsigned HOST_WIDE_INT cnt;
12056 tree cfield, cval;
12057
12058 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
12059 {
12060 /* Array constructor might explicitly set index, or specify a range,
12061 or leave index NULL meaning that it is next index after previous
12062 one. */
12063 if (cfield)
12064 {
12065 if (TREE_CODE (cfield) == INTEGER_CST)
12066 max_index = index = wi::to_offset (cfield);
12067 else
12068 {
12069 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
12070 index = wi::to_offset (TREE_OPERAND (cfield, 0));
12071 max_index = wi::to_offset (TREE_OPERAND (cfield, 1));
12072 }
12073 }
12074 else
12075 {
12076 index += 1;
12077 if (index_type)
12078 index = wi::ext (index, TYPE_PRECISION (index_type),
12079 TYPE_SIGN (index_type));
12080 max_index = index;
12081 }
12082
12083 /* Do we have match? */
12084 if (wi::cmpu (access_index, index) >= 0
12085 && wi::cmpu (access_index, max_index) <= 0)
12086 return cval;
12087 }
12088 return NULL_TREE;
12089 }
12090
12091 /* Perform constant folding and related simplification of EXPR.
12092 The related simplifications include x*1 => x, x*0 => 0, etc.,
12093 and application of the associative law.
12094 NOP_EXPR conversions may be removed freely (as long as we
12095 are careful not to change the type of the overall expression).
12096 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
12097 but we can constant-fold them if they have constant operands. */
12098
12099 #ifdef ENABLE_FOLD_CHECKING
12100 # define fold(x) fold_1 (x)
12101 static tree fold_1 (tree);
12102 static
12103 #endif
12104 tree
12105 fold (tree expr)
12106 {
12107 const tree t = expr;
12108 enum tree_code code = TREE_CODE (t);
12109 enum tree_code_class kind = TREE_CODE_CLASS (code);
12110 tree tem;
12111 location_t loc = EXPR_LOCATION (expr);
12112
12113 /* Return right away if a constant. */
12114 if (kind == tcc_constant)
12115 return t;
12116
12117 /* CALL_EXPR-like objects with variable numbers of operands are
12118 treated specially. */
12119 if (kind == tcc_vl_exp)
12120 {
12121 if (code == CALL_EXPR)
12122 {
12123 tem = fold_call_expr (loc, expr, false);
12124 return tem ? tem : expr;
12125 }
12126 return expr;
12127 }
12128
12129 if (IS_EXPR_CODE_CLASS (kind))
12130 {
12131 tree type = TREE_TYPE (t);
12132 tree op0, op1, op2;
12133
12134 switch (TREE_CODE_LENGTH (code))
12135 {
12136 case 1:
12137 op0 = TREE_OPERAND (t, 0);
12138 tem = fold_unary_loc (loc, code, type, op0);
12139 return tem ? tem : expr;
12140 case 2:
12141 op0 = TREE_OPERAND (t, 0);
12142 op1 = TREE_OPERAND (t, 1);
12143 tem = fold_binary_loc (loc, code, type, op0, op1);
12144 return tem ? tem : expr;
12145 case 3:
12146 op0 = TREE_OPERAND (t, 0);
12147 op1 = TREE_OPERAND (t, 1);
12148 op2 = TREE_OPERAND (t, 2);
12149 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12150 return tem ? tem : expr;
12151 default:
12152 break;
12153 }
12154 }
12155
12156 switch (code)
12157 {
12158 case ARRAY_REF:
12159 {
12160 tree op0 = TREE_OPERAND (t, 0);
12161 tree op1 = TREE_OPERAND (t, 1);
12162
12163 if (TREE_CODE (op1) == INTEGER_CST
12164 && TREE_CODE (op0) == CONSTRUCTOR
12165 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
12166 {
12167 tree val = get_array_ctor_element_at_index (op0,
12168 wi::to_offset (op1));
12169 if (val)
12170 return val;
12171 }
12172
12173 return t;
12174 }
12175
12176 /* Return a VECTOR_CST if possible. */
12177 case CONSTRUCTOR:
12178 {
12179 tree type = TREE_TYPE (t);
12180 if (TREE_CODE (type) != VECTOR_TYPE)
12181 return t;
12182
12183 unsigned i;
12184 tree val;
12185 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
12186 if (! CONSTANT_CLASS_P (val))
12187 return t;
12188
12189 return build_vector_from_ctor (type, CONSTRUCTOR_ELTS (t));
12190 }
12191
12192 case CONST_DECL:
12193 return fold (DECL_INITIAL (t));
12194
12195 default:
12196 return t;
12197 } /* switch (code) */
12198 }
12199
12200 #ifdef ENABLE_FOLD_CHECKING
12201 #undef fold
12202
12203 static void fold_checksum_tree (const_tree, struct md5_ctx *,
12204 hash_table<nofree_ptr_hash<const tree_node> > *);
12205 static void fold_check_failed (const_tree, const_tree);
12206 void print_fold_checksum (const_tree);
12207
12208 /* When --enable-checking=fold, compute a digest of expr before
12209 and after actual fold call to see if fold did not accidentally
12210 change original expr. */
12211
12212 tree
12213 fold (tree expr)
12214 {
12215 tree ret;
12216 struct md5_ctx ctx;
12217 unsigned char checksum_before[16], checksum_after[16];
12218 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12219
12220 md5_init_ctx (&ctx);
12221 fold_checksum_tree (expr, &ctx, &ht);
12222 md5_finish_ctx (&ctx, checksum_before);
12223 ht.empty ();
12224
12225 ret = fold_1 (expr);
12226
12227 md5_init_ctx (&ctx);
12228 fold_checksum_tree (expr, &ctx, &ht);
12229 md5_finish_ctx (&ctx, checksum_after);
12230
12231 if (memcmp (checksum_before, checksum_after, 16))
12232 fold_check_failed (expr, ret);
12233
12234 return ret;
12235 }
12236
12237 void
12238 print_fold_checksum (const_tree expr)
12239 {
12240 struct md5_ctx ctx;
12241 unsigned char checksum[16], cnt;
12242 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12243
12244 md5_init_ctx (&ctx);
12245 fold_checksum_tree (expr, &ctx, &ht);
12246 md5_finish_ctx (&ctx, checksum);
12247 for (cnt = 0; cnt < 16; ++cnt)
12248 fprintf (stderr, "%02x", checksum[cnt]);
12249 putc ('\n', stderr);
12250 }
12251
12252 static void
12253 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
12254 {
12255 internal_error ("fold check: original tree changed by fold");
12256 }
12257
12258 static void
12259 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
12260 hash_table<nofree_ptr_hash <const tree_node> > *ht)
12261 {
12262 const tree_node **slot;
12263 enum tree_code code;
12264 union tree_node buf;
12265 int i, len;
12266
12267 recursive_label:
12268 if (expr == NULL)
12269 return;
12270 slot = ht->find_slot (expr, INSERT);
12271 if (*slot != NULL)
12272 return;
12273 *slot = expr;
12274 code = TREE_CODE (expr);
12275 if (TREE_CODE_CLASS (code) == tcc_declaration
12276 && HAS_DECL_ASSEMBLER_NAME_P (expr))
12277 {
12278 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
12279 memcpy ((char *) &buf, expr, tree_size (expr));
12280 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
12281 buf.decl_with_vis.symtab_node = NULL;
12282 expr = (tree) &buf;
12283 }
12284 else if (TREE_CODE_CLASS (code) == tcc_type
12285 && (TYPE_POINTER_TO (expr)
12286 || TYPE_REFERENCE_TO (expr)
12287 || TYPE_CACHED_VALUES_P (expr)
12288 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
12289 || TYPE_NEXT_VARIANT (expr)))
12290 {
12291 /* Allow these fields to be modified. */
12292 tree tmp;
12293 memcpy ((char *) &buf, expr, tree_size (expr));
12294 expr = tmp = (tree) &buf;
12295 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
12296 TYPE_POINTER_TO (tmp) = NULL;
12297 TYPE_REFERENCE_TO (tmp) = NULL;
12298 TYPE_NEXT_VARIANT (tmp) = NULL;
12299 if (TYPE_CACHED_VALUES_P (tmp))
12300 {
12301 TYPE_CACHED_VALUES_P (tmp) = 0;
12302 TYPE_CACHED_VALUES (tmp) = NULL;
12303 }
12304 }
12305 md5_process_bytes (expr, tree_size (expr), ctx);
12306 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
12307 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
12308 if (TREE_CODE_CLASS (code) != tcc_type
12309 && TREE_CODE_CLASS (code) != tcc_declaration
12310 && code != TREE_LIST
12311 && code != SSA_NAME
12312 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
12313 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
12314 switch (TREE_CODE_CLASS (code))
12315 {
12316 case tcc_constant:
12317 switch (code)
12318 {
12319 case STRING_CST:
12320 md5_process_bytes (TREE_STRING_POINTER (expr),
12321 TREE_STRING_LENGTH (expr), ctx);
12322 break;
12323 case COMPLEX_CST:
12324 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
12325 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
12326 break;
12327 case VECTOR_CST:
12328 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
12329 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
12330 break;
12331 default:
12332 break;
12333 }
12334 break;
12335 case tcc_exceptional:
12336 switch (code)
12337 {
12338 case TREE_LIST:
12339 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
12340 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
12341 expr = TREE_CHAIN (expr);
12342 goto recursive_label;
12343 break;
12344 case TREE_VEC:
12345 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
12346 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
12347 break;
12348 default:
12349 break;
12350 }
12351 break;
12352 case tcc_expression:
12353 case tcc_reference:
12354 case tcc_comparison:
12355 case tcc_unary:
12356 case tcc_binary:
12357 case tcc_statement:
12358 case tcc_vl_exp:
12359 len = TREE_OPERAND_LENGTH (expr);
12360 for (i = 0; i < len; ++i)
12361 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
12362 break;
12363 case tcc_declaration:
12364 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
12365 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
12366 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
12367 {
12368 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
12369 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
12370 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
12371 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
12372 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
12373 }
12374
12375 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
12376 {
12377 if (TREE_CODE (expr) == FUNCTION_DECL)
12378 {
12379 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
12380 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
12381 }
12382 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
12383 }
12384 break;
12385 case tcc_type:
12386 if (TREE_CODE (expr) == ENUMERAL_TYPE)
12387 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
12388 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
12389 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
12390 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
12391 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
12392 if (INTEGRAL_TYPE_P (expr)
12393 || SCALAR_FLOAT_TYPE_P (expr))
12394 {
12395 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12396 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
12397 }
12398 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
12399 if (TREE_CODE (expr) == RECORD_TYPE
12400 || TREE_CODE (expr) == UNION_TYPE
12401 || TREE_CODE (expr) == QUAL_UNION_TYPE)
12402 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
12403 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12404 break;
12405 default:
12406 break;
12407 }
12408 }
12409
12410 /* Helper function for outputting the checksum of a tree T. When
12411 debugging with gdb, you can "define mynext" to be "next" followed
12412 by "call debug_fold_checksum (op0)", then just trace down till the
12413 outputs differ. */
12414
12415 DEBUG_FUNCTION void
12416 debug_fold_checksum (const_tree t)
12417 {
12418 int i;
12419 unsigned char checksum[16];
12420 struct md5_ctx ctx;
12421 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12422
12423 md5_init_ctx (&ctx);
12424 fold_checksum_tree (t, &ctx, &ht);
12425 md5_finish_ctx (&ctx, checksum);
12426 ht.empty ();
12427
12428 for (i = 0; i < 16; i++)
12429 fprintf (stderr, "%d ", checksum[i]);
12430
12431 fprintf (stderr, "\n");
12432 }
12433
12434 #endif
12435
12436 /* Fold a unary tree expression with code CODE of type TYPE with an
12437 operand OP0. LOC is the location of the resulting expression.
12438 Return a folded expression if successful. Otherwise, return a tree
12439 expression with code CODE of type TYPE with an operand OP0. */
12440
12441 tree
12442 fold_build1_stat_loc (location_t loc,
12443 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
12444 {
12445 tree tem;
12446 #ifdef ENABLE_FOLD_CHECKING
12447 unsigned char checksum_before[16], checksum_after[16];
12448 struct md5_ctx ctx;
12449 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12450
12451 md5_init_ctx (&ctx);
12452 fold_checksum_tree (op0, &ctx, &ht);
12453 md5_finish_ctx (&ctx, checksum_before);
12454 ht.empty ();
12455 #endif
12456
12457 tem = fold_unary_loc (loc, code, type, op0);
12458 if (!tem)
12459 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
12460
12461 #ifdef ENABLE_FOLD_CHECKING
12462 md5_init_ctx (&ctx);
12463 fold_checksum_tree (op0, &ctx, &ht);
12464 md5_finish_ctx (&ctx, checksum_after);
12465
12466 if (memcmp (checksum_before, checksum_after, 16))
12467 fold_check_failed (op0, tem);
12468 #endif
12469 return tem;
12470 }
12471
12472 /* Fold a binary tree expression with code CODE of type TYPE with
12473 operands OP0 and OP1. LOC is the location of the resulting
12474 expression. Return a folded expression if successful. Otherwise,
12475 return a tree expression with code CODE of type TYPE with operands
12476 OP0 and OP1. */
12477
12478 tree
12479 fold_build2_stat_loc (location_t loc,
12480 enum tree_code code, tree type, tree op0, tree op1
12481 MEM_STAT_DECL)
12482 {
12483 tree tem;
12484 #ifdef ENABLE_FOLD_CHECKING
12485 unsigned char checksum_before_op0[16],
12486 checksum_before_op1[16],
12487 checksum_after_op0[16],
12488 checksum_after_op1[16];
12489 struct md5_ctx ctx;
12490 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12491
12492 md5_init_ctx (&ctx);
12493 fold_checksum_tree (op0, &ctx, &ht);
12494 md5_finish_ctx (&ctx, checksum_before_op0);
12495 ht.empty ();
12496
12497 md5_init_ctx (&ctx);
12498 fold_checksum_tree (op1, &ctx, &ht);
12499 md5_finish_ctx (&ctx, checksum_before_op1);
12500 ht.empty ();
12501 #endif
12502
12503 tem = fold_binary_loc (loc, code, type, op0, op1);
12504 if (!tem)
12505 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
12506
12507 #ifdef ENABLE_FOLD_CHECKING
12508 md5_init_ctx (&ctx);
12509 fold_checksum_tree (op0, &ctx, &ht);
12510 md5_finish_ctx (&ctx, checksum_after_op0);
12511 ht.empty ();
12512
12513 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12514 fold_check_failed (op0, tem);
12515
12516 md5_init_ctx (&ctx);
12517 fold_checksum_tree (op1, &ctx, &ht);
12518 md5_finish_ctx (&ctx, checksum_after_op1);
12519
12520 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12521 fold_check_failed (op1, tem);
12522 #endif
12523 return tem;
12524 }
12525
12526 /* Fold a ternary tree expression with code CODE of type TYPE with
12527 operands OP0, OP1, and OP2. Return a folded expression if
12528 successful. Otherwise, return a tree expression with code CODE of
12529 type TYPE with operands OP0, OP1, and OP2. */
12530
12531 tree
12532 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
12533 tree op0, tree op1, tree op2 MEM_STAT_DECL)
12534 {
12535 tree tem;
12536 #ifdef ENABLE_FOLD_CHECKING
12537 unsigned char checksum_before_op0[16],
12538 checksum_before_op1[16],
12539 checksum_before_op2[16],
12540 checksum_after_op0[16],
12541 checksum_after_op1[16],
12542 checksum_after_op2[16];
12543 struct md5_ctx ctx;
12544 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12545
12546 md5_init_ctx (&ctx);
12547 fold_checksum_tree (op0, &ctx, &ht);
12548 md5_finish_ctx (&ctx, checksum_before_op0);
12549 ht.empty ();
12550
12551 md5_init_ctx (&ctx);
12552 fold_checksum_tree (op1, &ctx, &ht);
12553 md5_finish_ctx (&ctx, checksum_before_op1);
12554 ht.empty ();
12555
12556 md5_init_ctx (&ctx);
12557 fold_checksum_tree (op2, &ctx, &ht);
12558 md5_finish_ctx (&ctx, checksum_before_op2);
12559 ht.empty ();
12560 #endif
12561
12562 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
12563 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12564 if (!tem)
12565 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
12566
12567 #ifdef ENABLE_FOLD_CHECKING
12568 md5_init_ctx (&ctx);
12569 fold_checksum_tree (op0, &ctx, &ht);
12570 md5_finish_ctx (&ctx, checksum_after_op0);
12571 ht.empty ();
12572
12573 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12574 fold_check_failed (op0, tem);
12575
12576 md5_init_ctx (&ctx);
12577 fold_checksum_tree (op1, &ctx, &ht);
12578 md5_finish_ctx (&ctx, checksum_after_op1);
12579 ht.empty ();
12580
12581 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12582 fold_check_failed (op1, tem);
12583
12584 md5_init_ctx (&ctx);
12585 fold_checksum_tree (op2, &ctx, &ht);
12586 md5_finish_ctx (&ctx, checksum_after_op2);
12587
12588 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12589 fold_check_failed (op2, tem);
12590 #endif
12591 return tem;
12592 }
12593
12594 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
12595 arguments in ARGARRAY, and a null static chain.
12596 Return a folded expression if successful. Otherwise, return a CALL_EXPR
12597 of type TYPE from the given operands as constructed by build_call_array. */
12598
12599 tree
12600 fold_build_call_array_loc (location_t loc, tree type, tree fn,
12601 int nargs, tree *argarray)
12602 {
12603 tree tem;
12604 #ifdef ENABLE_FOLD_CHECKING
12605 unsigned char checksum_before_fn[16],
12606 checksum_before_arglist[16],
12607 checksum_after_fn[16],
12608 checksum_after_arglist[16];
12609 struct md5_ctx ctx;
12610 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12611 int i;
12612
12613 md5_init_ctx (&ctx);
12614 fold_checksum_tree (fn, &ctx, &ht);
12615 md5_finish_ctx (&ctx, checksum_before_fn);
12616 ht.empty ();
12617
12618 md5_init_ctx (&ctx);
12619 for (i = 0; i < nargs; i++)
12620 fold_checksum_tree (argarray[i], &ctx, &ht);
12621 md5_finish_ctx (&ctx, checksum_before_arglist);
12622 ht.empty ();
12623 #endif
12624
12625 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
12626 if (!tem)
12627 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
12628
12629 #ifdef ENABLE_FOLD_CHECKING
12630 md5_init_ctx (&ctx);
12631 fold_checksum_tree (fn, &ctx, &ht);
12632 md5_finish_ctx (&ctx, checksum_after_fn);
12633 ht.empty ();
12634
12635 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
12636 fold_check_failed (fn, tem);
12637
12638 md5_init_ctx (&ctx);
12639 for (i = 0; i < nargs; i++)
12640 fold_checksum_tree (argarray[i], &ctx, &ht);
12641 md5_finish_ctx (&ctx, checksum_after_arglist);
12642
12643 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
12644 fold_check_failed (NULL_TREE, tem);
12645 #endif
12646 return tem;
12647 }
12648
12649 /* Perform constant folding and related simplification of initializer
12650 expression EXPR. These behave identically to "fold_buildN" but ignore
12651 potential run-time traps and exceptions that fold must preserve. */
12652
12653 #define START_FOLD_INIT \
12654 int saved_signaling_nans = flag_signaling_nans;\
12655 int saved_trapping_math = flag_trapping_math;\
12656 int saved_rounding_math = flag_rounding_math;\
12657 int saved_trapv = flag_trapv;\
12658 int saved_folding_initializer = folding_initializer;\
12659 flag_signaling_nans = 0;\
12660 flag_trapping_math = 0;\
12661 flag_rounding_math = 0;\
12662 flag_trapv = 0;\
12663 folding_initializer = 1;
12664
12665 #define END_FOLD_INIT \
12666 flag_signaling_nans = saved_signaling_nans;\
12667 flag_trapping_math = saved_trapping_math;\
12668 flag_rounding_math = saved_rounding_math;\
12669 flag_trapv = saved_trapv;\
12670 folding_initializer = saved_folding_initializer;
12671
12672 tree
12673 fold_build1_initializer_loc (location_t loc, enum tree_code code,
12674 tree type, tree op)
12675 {
12676 tree result;
12677 START_FOLD_INIT;
12678
12679 result = fold_build1_loc (loc, code, type, op);
12680
12681 END_FOLD_INIT;
12682 return result;
12683 }
12684
12685 tree
12686 fold_build2_initializer_loc (location_t loc, enum tree_code code,
12687 tree type, tree op0, tree op1)
12688 {
12689 tree result;
12690 START_FOLD_INIT;
12691
12692 result = fold_build2_loc (loc, code, type, op0, op1);
12693
12694 END_FOLD_INIT;
12695 return result;
12696 }
12697
12698 tree
12699 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
12700 int nargs, tree *argarray)
12701 {
12702 tree result;
12703 START_FOLD_INIT;
12704
12705 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
12706
12707 END_FOLD_INIT;
12708 return result;
12709 }
12710
12711 #undef START_FOLD_INIT
12712 #undef END_FOLD_INIT
12713
12714 /* Determine if first argument is a multiple of second argument. Return 0 if
12715 it is not, or we cannot easily determined it to be.
12716
12717 An example of the sort of thing we care about (at this point; this routine
12718 could surely be made more general, and expanded to do what the *_DIV_EXPR's
12719 fold cases do now) is discovering that
12720
12721 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12722
12723 is a multiple of
12724
12725 SAVE_EXPR (J * 8)
12726
12727 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12728
12729 This code also handles discovering that
12730
12731 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12732
12733 is a multiple of 8 so we don't have to worry about dealing with a
12734 possible remainder.
12735
12736 Note that we *look* inside a SAVE_EXPR only to determine how it was
12737 calculated; it is not safe for fold to do much of anything else with the
12738 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12739 at run time. For example, the latter example above *cannot* be implemented
12740 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12741 evaluation time of the original SAVE_EXPR is not necessarily the same at
12742 the time the new expression is evaluated. The only optimization of this
12743 sort that would be valid is changing
12744
12745 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12746
12747 divided by 8 to
12748
12749 SAVE_EXPR (I) * SAVE_EXPR (J)
12750
12751 (where the same SAVE_EXPR (J) is used in the original and the
12752 transformed version). */
12753
12754 int
12755 multiple_of_p (tree type, const_tree top, const_tree bottom)
12756 {
12757 if (operand_equal_p (top, bottom, 0))
12758 return 1;
12759
12760 if (TREE_CODE (type) != INTEGER_TYPE)
12761 return 0;
12762
12763 switch (TREE_CODE (top))
12764 {
12765 case BIT_AND_EXPR:
12766 /* Bitwise and provides a power of two multiple. If the mask is
12767 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
12768 if (!integer_pow2p (bottom))
12769 return 0;
12770 /* FALLTHRU */
12771
12772 case MULT_EXPR:
12773 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12774 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12775
12776 case PLUS_EXPR:
12777 case MINUS_EXPR:
12778 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12779 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12780
12781 case LSHIFT_EXPR:
12782 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
12783 {
12784 tree op1, t1;
12785
12786 op1 = TREE_OPERAND (top, 1);
12787 /* const_binop may not detect overflow correctly,
12788 so check for it explicitly here. */
12789 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
12790 && 0 != (t1 = fold_convert (type,
12791 const_binop (LSHIFT_EXPR,
12792 size_one_node,
12793 op1)))
12794 && !TREE_OVERFLOW (t1))
12795 return multiple_of_p (type, t1, bottom);
12796 }
12797 return 0;
12798
12799 case NOP_EXPR:
12800 /* Can't handle conversions from non-integral or wider integral type. */
12801 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
12802 || (TYPE_PRECISION (type)
12803 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
12804 return 0;
12805
12806 /* .. fall through ... */
12807
12808 case SAVE_EXPR:
12809 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
12810
12811 case COND_EXPR:
12812 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12813 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
12814
12815 case INTEGER_CST:
12816 if (TREE_CODE (bottom) != INTEGER_CST
12817 || integer_zerop (bottom)
12818 || (TYPE_UNSIGNED (type)
12819 && (tree_int_cst_sgn (top) < 0
12820 || tree_int_cst_sgn (bottom) < 0)))
12821 return 0;
12822 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
12823 SIGNED);
12824
12825 default:
12826 return 0;
12827 }
12828 }
12829
12830 #define tree_expr_nonnegative_warnv_p(X, Y) \
12831 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
12832
12833 #define RECURSE(X) \
12834 ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
12835
12836 /* Return true if CODE or TYPE is known to be non-negative. */
12837
12838 static bool
12839 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
12840 {
12841 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
12842 && truth_value_p (code))
12843 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
12844 have a signed:1 type (where the value is -1 and 0). */
12845 return true;
12846 return false;
12847 }
12848
12849 /* Return true if (CODE OP0) is known to be non-negative. If the return
12850 value is based on the assumption that signed overflow is undefined,
12851 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12852 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12853
12854 bool
12855 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12856 bool *strict_overflow_p, int depth)
12857 {
12858 if (TYPE_UNSIGNED (type))
12859 return true;
12860
12861 switch (code)
12862 {
12863 case ABS_EXPR:
12864 /* We can't return 1 if flag_wrapv is set because
12865 ABS_EXPR<INT_MIN> = INT_MIN. */
12866 if (!ANY_INTEGRAL_TYPE_P (type))
12867 return true;
12868 if (TYPE_OVERFLOW_UNDEFINED (type))
12869 {
12870 *strict_overflow_p = true;
12871 return true;
12872 }
12873 break;
12874
12875 case NON_LVALUE_EXPR:
12876 case FLOAT_EXPR:
12877 case FIX_TRUNC_EXPR:
12878 return RECURSE (op0);
12879
12880 CASE_CONVERT:
12881 {
12882 tree inner_type = TREE_TYPE (op0);
12883 tree outer_type = type;
12884
12885 if (TREE_CODE (outer_type) == REAL_TYPE)
12886 {
12887 if (TREE_CODE (inner_type) == REAL_TYPE)
12888 return RECURSE (op0);
12889 if (INTEGRAL_TYPE_P (inner_type))
12890 {
12891 if (TYPE_UNSIGNED (inner_type))
12892 return true;
12893 return RECURSE (op0);
12894 }
12895 }
12896 else if (INTEGRAL_TYPE_P (outer_type))
12897 {
12898 if (TREE_CODE (inner_type) == REAL_TYPE)
12899 return RECURSE (op0);
12900 if (INTEGRAL_TYPE_P (inner_type))
12901 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
12902 && TYPE_UNSIGNED (inner_type);
12903 }
12904 }
12905 break;
12906
12907 default:
12908 return tree_simple_nonnegative_warnv_p (code, type);
12909 }
12910
12911 /* We don't know sign of `t', so be conservative and return false. */
12912 return false;
12913 }
12914
12915 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
12916 value is based on the assumption that signed overflow is undefined,
12917 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12918 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12919
12920 bool
12921 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12922 tree op1, bool *strict_overflow_p,
12923 int depth)
12924 {
12925 if (TYPE_UNSIGNED (type))
12926 return true;
12927
12928 switch (code)
12929 {
12930 case POINTER_PLUS_EXPR:
12931 case PLUS_EXPR:
12932 if (FLOAT_TYPE_P (type))
12933 return RECURSE (op0) && RECURSE (op1);
12934
12935 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12936 both unsigned and at least 2 bits shorter than the result. */
12937 if (TREE_CODE (type) == INTEGER_TYPE
12938 && TREE_CODE (op0) == NOP_EXPR
12939 && TREE_CODE (op1) == NOP_EXPR)
12940 {
12941 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
12942 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
12943 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12944 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12945 {
12946 unsigned int prec = MAX (TYPE_PRECISION (inner1),
12947 TYPE_PRECISION (inner2)) + 1;
12948 return prec < TYPE_PRECISION (type);
12949 }
12950 }
12951 break;
12952
12953 case MULT_EXPR:
12954 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12955 {
12956 /* x * x is always non-negative for floating point x
12957 or without overflow. */
12958 if (operand_equal_p (op0, op1, 0)
12959 || (RECURSE (op0) && RECURSE (op1)))
12960 {
12961 if (ANY_INTEGRAL_TYPE_P (type)
12962 && TYPE_OVERFLOW_UNDEFINED (type))
12963 *strict_overflow_p = true;
12964 return true;
12965 }
12966 }
12967
12968 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12969 both unsigned and their total bits is shorter than the result. */
12970 if (TREE_CODE (type) == INTEGER_TYPE
12971 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
12972 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
12973 {
12974 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
12975 ? TREE_TYPE (TREE_OPERAND (op0, 0))
12976 : TREE_TYPE (op0);
12977 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
12978 ? TREE_TYPE (TREE_OPERAND (op1, 0))
12979 : TREE_TYPE (op1);
12980
12981 bool unsigned0 = TYPE_UNSIGNED (inner0);
12982 bool unsigned1 = TYPE_UNSIGNED (inner1);
12983
12984 if (TREE_CODE (op0) == INTEGER_CST)
12985 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
12986
12987 if (TREE_CODE (op1) == INTEGER_CST)
12988 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
12989
12990 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
12991 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
12992 {
12993 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
12994 ? tree_int_cst_min_precision (op0, UNSIGNED)
12995 : TYPE_PRECISION (inner0);
12996
12997 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
12998 ? tree_int_cst_min_precision (op1, UNSIGNED)
12999 : TYPE_PRECISION (inner1);
13000
13001 return precision0 + precision1 < TYPE_PRECISION (type);
13002 }
13003 }
13004 return false;
13005
13006 case BIT_AND_EXPR:
13007 case MAX_EXPR:
13008 return RECURSE (op0) || RECURSE (op1);
13009
13010 case BIT_IOR_EXPR:
13011 case BIT_XOR_EXPR:
13012 case MIN_EXPR:
13013 case RDIV_EXPR:
13014 case TRUNC_DIV_EXPR:
13015 case CEIL_DIV_EXPR:
13016 case FLOOR_DIV_EXPR:
13017 case ROUND_DIV_EXPR:
13018 return RECURSE (op0) && RECURSE (op1);
13019
13020 case TRUNC_MOD_EXPR:
13021 return RECURSE (op0);
13022
13023 case FLOOR_MOD_EXPR:
13024 return RECURSE (op1);
13025
13026 case CEIL_MOD_EXPR:
13027 case ROUND_MOD_EXPR:
13028 default:
13029 return tree_simple_nonnegative_warnv_p (code, type);
13030 }
13031
13032 /* We don't know sign of `t', so be conservative and return false. */
13033 return false;
13034 }
13035
13036 /* Return true if T is known to be non-negative. If the return
13037 value is based on the assumption that signed overflow is undefined,
13038 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13039 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13040
13041 bool
13042 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13043 {
13044 if (TYPE_UNSIGNED (TREE_TYPE (t)))
13045 return true;
13046
13047 switch (TREE_CODE (t))
13048 {
13049 case INTEGER_CST:
13050 return tree_int_cst_sgn (t) >= 0;
13051
13052 case REAL_CST:
13053 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
13054
13055 case FIXED_CST:
13056 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
13057
13058 case COND_EXPR:
13059 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
13060
13061 case SSA_NAME:
13062 /* Limit the depth of recursion to avoid quadratic behavior.
13063 This is expected to catch almost all occurrences in practice.
13064 If this code misses important cases that unbounded recursion
13065 would not, passes that need this information could be revised
13066 to provide it through dataflow propagation. */
13067 return (!name_registered_for_update_p (t)
13068 && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
13069 && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
13070 strict_overflow_p, depth));
13071
13072 default:
13073 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
13074 }
13075 }
13076
13077 /* Return true if T is known to be non-negative. If the return
13078 value is based on the assumption that signed overflow is undefined,
13079 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13080 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13081
13082 bool
13083 tree_call_nonnegative_warnv_p (tree type, combined_fn fn, tree arg0, tree arg1,
13084 bool *strict_overflow_p, int depth)
13085 {
13086 switch (fn)
13087 {
13088 CASE_CFN_ACOS:
13089 CASE_CFN_ACOSH:
13090 CASE_CFN_CABS:
13091 CASE_CFN_COSH:
13092 CASE_CFN_ERFC:
13093 CASE_CFN_EXP:
13094 CASE_CFN_EXP10:
13095 CASE_CFN_EXP2:
13096 CASE_CFN_FABS:
13097 CASE_CFN_FDIM:
13098 CASE_CFN_HYPOT:
13099 CASE_CFN_POW10:
13100 CASE_CFN_FFS:
13101 CASE_CFN_PARITY:
13102 CASE_CFN_POPCOUNT:
13103 CASE_CFN_CLZ:
13104 CASE_CFN_CLRSB:
13105 case CFN_BUILT_IN_BSWAP32:
13106 case CFN_BUILT_IN_BSWAP64:
13107 /* Always true. */
13108 return true;
13109
13110 CASE_CFN_SQRT:
13111 /* sqrt(-0.0) is -0.0. */
13112 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
13113 return true;
13114 return RECURSE (arg0);
13115
13116 CASE_CFN_ASINH:
13117 CASE_CFN_ATAN:
13118 CASE_CFN_ATANH:
13119 CASE_CFN_CBRT:
13120 CASE_CFN_CEIL:
13121 CASE_CFN_ERF:
13122 CASE_CFN_EXPM1:
13123 CASE_CFN_FLOOR:
13124 CASE_CFN_FMOD:
13125 CASE_CFN_FREXP:
13126 CASE_CFN_ICEIL:
13127 CASE_CFN_IFLOOR:
13128 CASE_CFN_IRINT:
13129 CASE_CFN_IROUND:
13130 CASE_CFN_LCEIL:
13131 CASE_CFN_LDEXP:
13132 CASE_CFN_LFLOOR:
13133 CASE_CFN_LLCEIL:
13134 CASE_CFN_LLFLOOR:
13135 CASE_CFN_LLRINT:
13136 CASE_CFN_LLROUND:
13137 CASE_CFN_LRINT:
13138 CASE_CFN_LROUND:
13139 CASE_CFN_MODF:
13140 CASE_CFN_NEARBYINT:
13141 CASE_CFN_RINT:
13142 CASE_CFN_ROUND:
13143 CASE_CFN_SCALB:
13144 CASE_CFN_SCALBLN:
13145 CASE_CFN_SCALBN:
13146 CASE_CFN_SIGNBIT:
13147 CASE_CFN_SIGNIFICAND:
13148 CASE_CFN_SINH:
13149 CASE_CFN_TANH:
13150 CASE_CFN_TRUNC:
13151 /* True if the 1st argument is nonnegative. */
13152 return RECURSE (arg0);
13153
13154 CASE_CFN_FMAX:
13155 /* True if the 1st OR 2nd arguments are nonnegative. */
13156 return RECURSE (arg0) || RECURSE (arg1);
13157
13158 CASE_CFN_FMIN:
13159 /* True if the 1st AND 2nd arguments are nonnegative. */
13160 return RECURSE (arg0) && RECURSE (arg1);
13161
13162 CASE_CFN_COPYSIGN:
13163 /* True if the 2nd argument is nonnegative. */
13164 return RECURSE (arg1);
13165
13166 CASE_CFN_POWI:
13167 /* True if the 1st argument is nonnegative or the second
13168 argument is an even integer. */
13169 if (TREE_CODE (arg1) == INTEGER_CST
13170 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
13171 return true;
13172 return RECURSE (arg0);
13173
13174 CASE_CFN_POW:
13175 /* True if the 1st argument is nonnegative or the second
13176 argument is an even integer valued real. */
13177 if (TREE_CODE (arg1) == REAL_CST)
13178 {
13179 REAL_VALUE_TYPE c;
13180 HOST_WIDE_INT n;
13181
13182 c = TREE_REAL_CST (arg1);
13183 n = real_to_integer (&c);
13184 if ((n & 1) == 0)
13185 {
13186 REAL_VALUE_TYPE cint;
13187 real_from_integer (&cint, VOIDmode, n, SIGNED);
13188 if (real_identical (&c, &cint))
13189 return true;
13190 }
13191 }
13192 return RECURSE (arg0);
13193
13194 default:
13195 break;
13196 }
13197 return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
13198 }
13199
13200 /* Return true if T is known to be non-negative. If the return
13201 value is based on the assumption that signed overflow is undefined,
13202 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13203 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13204
13205 static bool
13206 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13207 {
13208 enum tree_code code = TREE_CODE (t);
13209 if (TYPE_UNSIGNED (TREE_TYPE (t)))
13210 return true;
13211
13212 switch (code)
13213 {
13214 case TARGET_EXPR:
13215 {
13216 tree temp = TARGET_EXPR_SLOT (t);
13217 t = TARGET_EXPR_INITIAL (t);
13218
13219 /* If the initializer is non-void, then it's a normal expression
13220 that will be assigned to the slot. */
13221 if (!VOID_TYPE_P (t))
13222 return RECURSE (t);
13223
13224 /* Otherwise, the initializer sets the slot in some way. One common
13225 way is an assignment statement at the end of the initializer. */
13226 while (1)
13227 {
13228 if (TREE_CODE (t) == BIND_EXPR)
13229 t = expr_last (BIND_EXPR_BODY (t));
13230 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
13231 || TREE_CODE (t) == TRY_CATCH_EXPR)
13232 t = expr_last (TREE_OPERAND (t, 0));
13233 else if (TREE_CODE (t) == STATEMENT_LIST)
13234 t = expr_last (t);
13235 else
13236 break;
13237 }
13238 if (TREE_CODE (t) == MODIFY_EXPR
13239 && TREE_OPERAND (t, 0) == temp)
13240 return RECURSE (TREE_OPERAND (t, 1));
13241
13242 return false;
13243 }
13244
13245 case CALL_EXPR:
13246 {
13247 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
13248 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
13249
13250 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
13251 get_call_combined_fn (t),
13252 arg0,
13253 arg1,
13254 strict_overflow_p, depth);
13255 }
13256 case COMPOUND_EXPR:
13257 case MODIFY_EXPR:
13258 return RECURSE (TREE_OPERAND (t, 1));
13259
13260 case BIND_EXPR:
13261 return RECURSE (expr_last (TREE_OPERAND (t, 1)));
13262
13263 case SAVE_EXPR:
13264 return RECURSE (TREE_OPERAND (t, 0));
13265
13266 default:
13267 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
13268 }
13269 }
13270
13271 #undef RECURSE
13272 #undef tree_expr_nonnegative_warnv_p
13273
13274 /* Return true if T is known to be non-negative. If the return
13275 value is based on the assumption that signed overflow is undefined,
13276 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13277 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13278
13279 bool
13280 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13281 {
13282 enum tree_code code;
13283 if (t == error_mark_node)
13284 return false;
13285
13286 code = TREE_CODE (t);
13287 switch (TREE_CODE_CLASS (code))
13288 {
13289 case tcc_binary:
13290 case tcc_comparison:
13291 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13292 TREE_TYPE (t),
13293 TREE_OPERAND (t, 0),
13294 TREE_OPERAND (t, 1),
13295 strict_overflow_p, depth);
13296
13297 case tcc_unary:
13298 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13299 TREE_TYPE (t),
13300 TREE_OPERAND (t, 0),
13301 strict_overflow_p, depth);
13302
13303 case tcc_constant:
13304 case tcc_declaration:
13305 case tcc_reference:
13306 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13307
13308 default:
13309 break;
13310 }
13311
13312 switch (code)
13313 {
13314 case TRUTH_AND_EXPR:
13315 case TRUTH_OR_EXPR:
13316 case TRUTH_XOR_EXPR:
13317 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13318 TREE_TYPE (t),
13319 TREE_OPERAND (t, 0),
13320 TREE_OPERAND (t, 1),
13321 strict_overflow_p, depth);
13322 case TRUTH_NOT_EXPR:
13323 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13324 TREE_TYPE (t),
13325 TREE_OPERAND (t, 0),
13326 strict_overflow_p, depth);
13327
13328 case COND_EXPR:
13329 case CONSTRUCTOR:
13330 case OBJ_TYPE_REF:
13331 case ASSERT_EXPR:
13332 case ADDR_EXPR:
13333 case WITH_SIZE_EXPR:
13334 case SSA_NAME:
13335 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13336
13337 default:
13338 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
13339 }
13340 }
13341
13342 /* Return true if `t' is known to be non-negative. Handle warnings
13343 about undefined signed overflow. */
13344
13345 bool
13346 tree_expr_nonnegative_p (tree t)
13347 {
13348 bool ret, strict_overflow_p;
13349
13350 strict_overflow_p = false;
13351 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
13352 if (strict_overflow_p)
13353 fold_overflow_warning (("assuming signed overflow does not occur when "
13354 "determining that expression is always "
13355 "non-negative"),
13356 WARN_STRICT_OVERFLOW_MISC);
13357 return ret;
13358 }
13359
13360
13361 /* Return true when (CODE OP0) is an address and is known to be nonzero.
13362 For floating point we further ensure that T is not denormal.
13363 Similar logic is present in nonzero_address in rtlanal.h.
13364
13365 If the return value is based on the assumption that signed overflow
13366 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13367 change *STRICT_OVERFLOW_P. */
13368
13369 bool
13370 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
13371 bool *strict_overflow_p)
13372 {
13373 switch (code)
13374 {
13375 case ABS_EXPR:
13376 return tree_expr_nonzero_warnv_p (op0,
13377 strict_overflow_p);
13378
13379 case NOP_EXPR:
13380 {
13381 tree inner_type = TREE_TYPE (op0);
13382 tree outer_type = type;
13383
13384 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
13385 && tree_expr_nonzero_warnv_p (op0,
13386 strict_overflow_p));
13387 }
13388 break;
13389
13390 case NON_LVALUE_EXPR:
13391 return tree_expr_nonzero_warnv_p (op0,
13392 strict_overflow_p);
13393
13394 default:
13395 break;
13396 }
13397
13398 return false;
13399 }
13400
13401 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
13402 For floating point we further ensure that T is not denormal.
13403 Similar logic is present in nonzero_address in rtlanal.h.
13404
13405 If the return value is based on the assumption that signed overflow
13406 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13407 change *STRICT_OVERFLOW_P. */
13408
13409 bool
13410 tree_binary_nonzero_warnv_p (enum tree_code code,
13411 tree type,
13412 tree op0,
13413 tree op1, bool *strict_overflow_p)
13414 {
13415 bool sub_strict_overflow_p;
13416 switch (code)
13417 {
13418 case POINTER_PLUS_EXPR:
13419 case PLUS_EXPR:
13420 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
13421 {
13422 /* With the presence of negative values it is hard
13423 to say something. */
13424 sub_strict_overflow_p = false;
13425 if (!tree_expr_nonnegative_warnv_p (op0,
13426 &sub_strict_overflow_p)
13427 || !tree_expr_nonnegative_warnv_p (op1,
13428 &sub_strict_overflow_p))
13429 return false;
13430 /* One of operands must be positive and the other non-negative. */
13431 /* We don't set *STRICT_OVERFLOW_P here: even if this value
13432 overflows, on a twos-complement machine the sum of two
13433 nonnegative numbers can never be zero. */
13434 return (tree_expr_nonzero_warnv_p (op0,
13435 strict_overflow_p)
13436 || tree_expr_nonzero_warnv_p (op1,
13437 strict_overflow_p));
13438 }
13439 break;
13440
13441 case MULT_EXPR:
13442 if (TYPE_OVERFLOW_UNDEFINED (type))
13443 {
13444 if (tree_expr_nonzero_warnv_p (op0,
13445 strict_overflow_p)
13446 && tree_expr_nonzero_warnv_p (op1,
13447 strict_overflow_p))
13448 {
13449 *strict_overflow_p = true;
13450 return true;
13451 }
13452 }
13453 break;
13454
13455 case MIN_EXPR:
13456 sub_strict_overflow_p = false;
13457 if (tree_expr_nonzero_warnv_p (op0,
13458 &sub_strict_overflow_p)
13459 && tree_expr_nonzero_warnv_p (op1,
13460 &sub_strict_overflow_p))
13461 {
13462 if (sub_strict_overflow_p)
13463 *strict_overflow_p = true;
13464 }
13465 break;
13466
13467 case MAX_EXPR:
13468 sub_strict_overflow_p = false;
13469 if (tree_expr_nonzero_warnv_p (op0,
13470 &sub_strict_overflow_p))
13471 {
13472 if (sub_strict_overflow_p)
13473 *strict_overflow_p = true;
13474
13475 /* When both operands are nonzero, then MAX must be too. */
13476 if (tree_expr_nonzero_warnv_p (op1,
13477 strict_overflow_p))
13478 return true;
13479
13480 /* MAX where operand 0 is positive is positive. */
13481 return tree_expr_nonnegative_warnv_p (op0,
13482 strict_overflow_p);
13483 }
13484 /* MAX where operand 1 is positive is positive. */
13485 else if (tree_expr_nonzero_warnv_p (op1,
13486 &sub_strict_overflow_p)
13487 && tree_expr_nonnegative_warnv_p (op1,
13488 &sub_strict_overflow_p))
13489 {
13490 if (sub_strict_overflow_p)
13491 *strict_overflow_p = true;
13492 return true;
13493 }
13494 break;
13495
13496 case BIT_IOR_EXPR:
13497 return (tree_expr_nonzero_warnv_p (op1,
13498 strict_overflow_p)
13499 || tree_expr_nonzero_warnv_p (op0,
13500 strict_overflow_p));
13501
13502 default:
13503 break;
13504 }
13505
13506 return false;
13507 }
13508
13509 /* Return true when T is an address and is known to be nonzero.
13510 For floating point we further ensure that T is not denormal.
13511 Similar logic is present in nonzero_address in rtlanal.h.
13512
13513 If the return value is based on the assumption that signed overflow
13514 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13515 change *STRICT_OVERFLOW_P. */
13516
13517 bool
13518 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
13519 {
13520 bool sub_strict_overflow_p;
13521 switch (TREE_CODE (t))
13522 {
13523 case INTEGER_CST:
13524 return !integer_zerop (t);
13525
13526 case ADDR_EXPR:
13527 {
13528 tree base = TREE_OPERAND (t, 0);
13529
13530 if (!DECL_P (base))
13531 base = get_base_address (base);
13532
13533 if (!base)
13534 return false;
13535
13536 /* For objects in symbol table check if we know they are non-zero.
13537 Don't do anything for variables and functions before symtab is built;
13538 it is quite possible that they will be declared weak later. */
13539 int nonzero_addr = maybe_nonzero_address (base);
13540 if (nonzero_addr >= 0)
13541 return nonzero_addr;
13542
13543 /* Function local objects are never NULL. */
13544 if (DECL_P (base)
13545 && (DECL_CONTEXT (base)
13546 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
13547 && auto_var_in_fn_p (base, DECL_CONTEXT (base))))
13548 return true;
13549
13550 /* Constants are never weak. */
13551 if (CONSTANT_CLASS_P (base))
13552 return true;
13553
13554 return false;
13555 }
13556
13557 case COND_EXPR:
13558 sub_strict_overflow_p = false;
13559 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13560 &sub_strict_overflow_p)
13561 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
13562 &sub_strict_overflow_p))
13563 {
13564 if (sub_strict_overflow_p)
13565 *strict_overflow_p = true;
13566 return true;
13567 }
13568 break;
13569
13570 default:
13571 break;
13572 }
13573 return false;
13574 }
13575
13576 #define integer_valued_real_p(X) \
13577 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
13578
13579 #define RECURSE(X) \
13580 ((integer_valued_real_p) (X, depth + 1))
13581
13582 /* Return true if the floating point result of (CODE OP0) has an
13583 integer value. We also allow +Inf, -Inf and NaN to be considered
13584 integer values. Return false for signaling NaN.
13585
13586 DEPTH is the current nesting depth of the query. */
13587
13588 bool
13589 integer_valued_real_unary_p (tree_code code, tree op0, int depth)
13590 {
13591 switch (code)
13592 {
13593 case FLOAT_EXPR:
13594 return true;
13595
13596 case ABS_EXPR:
13597 return RECURSE (op0);
13598
13599 CASE_CONVERT:
13600 {
13601 tree type = TREE_TYPE (op0);
13602 if (TREE_CODE (type) == INTEGER_TYPE)
13603 return true;
13604 if (TREE_CODE (type) == REAL_TYPE)
13605 return RECURSE (op0);
13606 break;
13607 }
13608
13609 default:
13610 break;
13611 }
13612 return false;
13613 }
13614
13615 /* Return true if the floating point result of (CODE OP0 OP1) has an
13616 integer value. We also allow +Inf, -Inf and NaN to be considered
13617 integer values. Return false for signaling NaN.
13618
13619 DEPTH is the current nesting depth of the query. */
13620
13621 bool
13622 integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth)
13623 {
13624 switch (code)
13625 {
13626 case PLUS_EXPR:
13627 case MINUS_EXPR:
13628 case MULT_EXPR:
13629 case MIN_EXPR:
13630 case MAX_EXPR:
13631 return RECURSE (op0) && RECURSE (op1);
13632
13633 default:
13634 break;
13635 }
13636 return false;
13637 }
13638
13639 /* Return true if the floating point result of calling FNDECL with arguments
13640 ARG0 and ARG1 has an integer value. We also allow +Inf, -Inf and NaN to be
13641 considered integer values. Return false for signaling NaN. If FNDECL
13642 takes fewer than 2 arguments, the remaining ARGn are null.
13643
13644 DEPTH is the current nesting depth of the query. */
13645
13646 bool
13647 integer_valued_real_call_p (combined_fn fn, tree arg0, tree arg1, int depth)
13648 {
13649 switch (fn)
13650 {
13651 CASE_CFN_CEIL:
13652 CASE_CFN_FLOOR:
13653 CASE_CFN_NEARBYINT:
13654 CASE_CFN_RINT:
13655 CASE_CFN_ROUND:
13656 CASE_CFN_TRUNC:
13657 return true;
13658
13659 CASE_CFN_FMIN:
13660 CASE_CFN_FMAX:
13661 return RECURSE (arg0) && RECURSE (arg1);
13662
13663 default:
13664 break;
13665 }
13666 return false;
13667 }
13668
13669 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
13670 has an integer value. We also allow +Inf, -Inf and NaN to be
13671 considered integer values. Return false for signaling NaN.
13672
13673 DEPTH is the current nesting depth of the query. */
13674
13675 bool
13676 integer_valued_real_single_p (tree t, int depth)
13677 {
13678 switch (TREE_CODE (t))
13679 {
13680 case REAL_CST:
13681 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
13682
13683 case COND_EXPR:
13684 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
13685
13686 case SSA_NAME:
13687 /* Limit the depth of recursion to avoid quadratic behavior.
13688 This is expected to catch almost all occurrences in practice.
13689 If this code misses important cases that unbounded recursion
13690 would not, passes that need this information could be revised
13691 to provide it through dataflow propagation. */
13692 return (!name_registered_for_update_p (t)
13693 && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
13694 && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t),
13695 depth));
13696
13697 default:
13698 break;
13699 }
13700 return false;
13701 }
13702
13703 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
13704 has an integer value. We also allow +Inf, -Inf and NaN to be
13705 considered integer values. Return false for signaling NaN.
13706
13707 DEPTH is the current nesting depth of the query. */
13708
13709 static bool
13710 integer_valued_real_invalid_p (tree t, int depth)
13711 {
13712 switch (TREE_CODE (t))
13713 {
13714 case COMPOUND_EXPR:
13715 case MODIFY_EXPR:
13716 case BIND_EXPR:
13717 return RECURSE (TREE_OPERAND (t, 1));
13718
13719 case SAVE_EXPR:
13720 return RECURSE (TREE_OPERAND (t, 0));
13721
13722 default:
13723 break;
13724 }
13725 return false;
13726 }
13727
13728 #undef RECURSE
13729 #undef integer_valued_real_p
13730
13731 /* Return true if the floating point expression T has an integer value.
13732 We also allow +Inf, -Inf and NaN to be considered integer values.
13733 Return false for signaling NaN.
13734
13735 DEPTH is the current nesting depth of the query. */
13736
13737 bool
13738 integer_valued_real_p (tree t, int depth)
13739 {
13740 if (t == error_mark_node)
13741 return false;
13742
13743 tree_code code = TREE_CODE (t);
13744 switch (TREE_CODE_CLASS (code))
13745 {
13746 case tcc_binary:
13747 case tcc_comparison:
13748 return integer_valued_real_binary_p (code, TREE_OPERAND (t, 0),
13749 TREE_OPERAND (t, 1), depth);
13750
13751 case tcc_unary:
13752 return integer_valued_real_unary_p (code, TREE_OPERAND (t, 0), depth);
13753
13754 case tcc_constant:
13755 case tcc_declaration:
13756 case tcc_reference:
13757 return integer_valued_real_single_p (t, depth);
13758
13759 default:
13760 break;
13761 }
13762
13763 switch (code)
13764 {
13765 case COND_EXPR:
13766 case SSA_NAME:
13767 return integer_valued_real_single_p (t, depth);
13768
13769 case CALL_EXPR:
13770 {
13771 tree arg0 = (call_expr_nargs (t) > 0
13772 ? CALL_EXPR_ARG (t, 0)
13773 : NULL_TREE);
13774 tree arg1 = (call_expr_nargs (t) > 1
13775 ? CALL_EXPR_ARG (t, 1)
13776 : NULL_TREE);
13777 return integer_valued_real_call_p (get_call_combined_fn (t),
13778 arg0, arg1, depth);
13779 }
13780
13781 default:
13782 return integer_valued_real_invalid_p (t, depth);
13783 }
13784 }
13785
13786 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
13787 attempt to fold the expression to a constant without modifying TYPE,
13788 OP0 or OP1.
13789
13790 If the expression could be simplified to a constant, then return
13791 the constant. If the expression would not be simplified to a
13792 constant, then return NULL_TREE. */
13793
13794 tree
13795 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
13796 {
13797 tree tem = fold_binary (code, type, op0, op1);
13798 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13799 }
13800
13801 /* Given the components of a unary expression CODE, TYPE and OP0,
13802 attempt to fold the expression to a constant without modifying
13803 TYPE or OP0.
13804
13805 If the expression could be simplified to a constant, then return
13806 the constant. If the expression would not be simplified to a
13807 constant, then return NULL_TREE. */
13808
13809 tree
13810 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
13811 {
13812 tree tem = fold_unary (code, type, op0);
13813 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13814 }
13815
13816 /* If EXP represents referencing an element in a constant string
13817 (either via pointer arithmetic or array indexing), return the
13818 tree representing the value accessed, otherwise return NULL. */
13819
13820 tree
13821 fold_read_from_constant_string (tree exp)
13822 {
13823 if ((TREE_CODE (exp) == INDIRECT_REF
13824 || TREE_CODE (exp) == ARRAY_REF)
13825 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
13826 {
13827 tree exp1 = TREE_OPERAND (exp, 0);
13828 tree index;
13829 tree string;
13830 location_t loc = EXPR_LOCATION (exp);
13831
13832 if (TREE_CODE (exp) == INDIRECT_REF)
13833 string = string_constant (exp1, &index);
13834 else
13835 {
13836 tree low_bound = array_ref_low_bound (exp);
13837 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
13838
13839 /* Optimize the special-case of a zero lower bound.
13840
13841 We convert the low_bound to sizetype to avoid some problems
13842 with constant folding. (E.g. suppose the lower bound is 1,
13843 and its mode is QI. Without the conversion,l (ARRAY
13844 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
13845 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
13846 if (! integer_zerop (low_bound))
13847 index = size_diffop_loc (loc, index,
13848 fold_convert_loc (loc, sizetype, low_bound));
13849
13850 string = exp1;
13851 }
13852
13853 if (string
13854 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
13855 && TREE_CODE (string) == STRING_CST
13856 && TREE_CODE (index) == INTEGER_CST
13857 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
13858 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
13859 == MODE_INT)
13860 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
13861 return build_int_cst_type (TREE_TYPE (exp),
13862 (TREE_STRING_POINTER (string)
13863 [TREE_INT_CST_LOW (index)]));
13864 }
13865 return NULL;
13866 }
13867
13868 /* Return the tree for neg (ARG0) when ARG0 is known to be either
13869 an integer constant, real, or fixed-point constant.
13870
13871 TYPE is the type of the result. */
13872
13873 static tree
13874 fold_negate_const (tree arg0, tree type)
13875 {
13876 tree t = NULL_TREE;
13877
13878 switch (TREE_CODE (arg0))
13879 {
13880 case INTEGER_CST:
13881 {
13882 bool overflow;
13883 wide_int val = wi::neg (arg0, &overflow);
13884 t = force_fit_type (type, val, 1,
13885 (overflow | TREE_OVERFLOW (arg0))
13886 && !TYPE_UNSIGNED (type));
13887 break;
13888 }
13889
13890 case REAL_CST:
13891 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13892 break;
13893
13894 case FIXED_CST:
13895 {
13896 FIXED_VALUE_TYPE f;
13897 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
13898 &(TREE_FIXED_CST (arg0)), NULL,
13899 TYPE_SATURATING (type));
13900 t = build_fixed (type, f);
13901 /* Propagate overflow flags. */
13902 if (overflow_p | TREE_OVERFLOW (arg0))
13903 TREE_OVERFLOW (t) = 1;
13904 break;
13905 }
13906
13907 default:
13908 gcc_unreachable ();
13909 }
13910
13911 return t;
13912 }
13913
13914 /* Return the tree for abs (ARG0) when ARG0 is known to be either
13915 an integer constant or real constant.
13916
13917 TYPE is the type of the result. */
13918
13919 tree
13920 fold_abs_const (tree arg0, tree type)
13921 {
13922 tree t = NULL_TREE;
13923
13924 switch (TREE_CODE (arg0))
13925 {
13926 case INTEGER_CST:
13927 {
13928 /* If the value is unsigned or non-negative, then the absolute value
13929 is the same as the ordinary value. */
13930 if (!wi::neg_p (arg0, TYPE_SIGN (type)))
13931 t = arg0;
13932
13933 /* If the value is negative, then the absolute value is
13934 its negation. */
13935 else
13936 {
13937 bool overflow;
13938 wide_int val = wi::neg (arg0, &overflow);
13939 t = force_fit_type (type, val, -1,
13940 overflow | TREE_OVERFLOW (arg0));
13941 }
13942 }
13943 break;
13944
13945 case REAL_CST:
13946 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
13947 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13948 else
13949 t = arg0;
13950 break;
13951
13952 default:
13953 gcc_unreachable ();
13954 }
13955
13956 return t;
13957 }
13958
13959 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13960 constant. TYPE is the type of the result. */
13961
13962 static tree
13963 fold_not_const (const_tree arg0, tree type)
13964 {
13965 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
13966
13967 return force_fit_type (type, wi::bit_not (arg0), 0, TREE_OVERFLOW (arg0));
13968 }
13969
13970 /* Given CODE, a relational operator, the target type, TYPE and two
13971 constant operands OP0 and OP1, return the result of the
13972 relational operation. If the result is not a compile time
13973 constant, then return NULL_TREE. */
13974
13975 static tree
13976 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
13977 {
13978 int result, invert;
13979
13980 /* From here on, the only cases we handle are when the result is
13981 known to be a constant. */
13982
13983 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
13984 {
13985 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
13986 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
13987
13988 /* Handle the cases where either operand is a NaN. */
13989 if (real_isnan (c0) || real_isnan (c1))
13990 {
13991 switch (code)
13992 {
13993 case EQ_EXPR:
13994 case ORDERED_EXPR:
13995 result = 0;
13996 break;
13997
13998 case NE_EXPR:
13999 case UNORDERED_EXPR:
14000 case UNLT_EXPR:
14001 case UNLE_EXPR:
14002 case UNGT_EXPR:
14003 case UNGE_EXPR:
14004 case UNEQ_EXPR:
14005 result = 1;
14006 break;
14007
14008 case LT_EXPR:
14009 case LE_EXPR:
14010 case GT_EXPR:
14011 case GE_EXPR:
14012 case LTGT_EXPR:
14013 if (flag_trapping_math)
14014 return NULL_TREE;
14015 result = 0;
14016 break;
14017
14018 default:
14019 gcc_unreachable ();
14020 }
14021
14022 return constant_boolean_node (result, type);
14023 }
14024
14025 return constant_boolean_node (real_compare (code, c0, c1), type);
14026 }
14027
14028 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
14029 {
14030 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
14031 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
14032 return constant_boolean_node (fixed_compare (code, c0, c1), type);
14033 }
14034
14035 /* Handle equality/inequality of complex constants. */
14036 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
14037 {
14038 tree rcond = fold_relational_const (code, type,
14039 TREE_REALPART (op0),
14040 TREE_REALPART (op1));
14041 tree icond = fold_relational_const (code, type,
14042 TREE_IMAGPART (op0),
14043 TREE_IMAGPART (op1));
14044 if (code == EQ_EXPR)
14045 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
14046 else if (code == NE_EXPR)
14047 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
14048 else
14049 return NULL_TREE;
14050 }
14051
14052 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
14053 {
14054 if (!VECTOR_TYPE_P (type))
14055 {
14056 /* Have vector comparison with scalar boolean result. */
14057 bool result = true;
14058 gcc_assert ((code == EQ_EXPR || code == NE_EXPR)
14059 && VECTOR_CST_NELTS (op0) == VECTOR_CST_NELTS (op1));
14060 for (unsigned i = 0; i < VECTOR_CST_NELTS (op0); i++)
14061 {
14062 tree elem0 = VECTOR_CST_ELT (op0, i);
14063 tree elem1 = VECTOR_CST_ELT (op1, i);
14064 tree tmp = fold_relational_const (code, type, elem0, elem1);
14065 result &= integer_onep (tmp);
14066 }
14067 if (code == NE_EXPR)
14068 result = !result;
14069 return constant_boolean_node (result, type);
14070 }
14071 unsigned count = VECTOR_CST_NELTS (op0);
14072 tree *elts = XALLOCAVEC (tree, count);
14073 gcc_assert (VECTOR_CST_NELTS (op1) == count
14074 && TYPE_VECTOR_SUBPARTS (type) == count);
14075
14076 for (unsigned i = 0; i < count; i++)
14077 {
14078 tree elem_type = TREE_TYPE (type);
14079 tree elem0 = VECTOR_CST_ELT (op0, i);
14080 tree elem1 = VECTOR_CST_ELT (op1, i);
14081
14082 tree tem = fold_relational_const (code, elem_type,
14083 elem0, elem1);
14084
14085 if (tem == NULL_TREE)
14086 return NULL_TREE;
14087
14088 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
14089 }
14090
14091 return build_vector (type, elts);
14092 }
14093
14094 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
14095
14096 To compute GT, swap the arguments and do LT.
14097 To compute GE, do LT and invert the result.
14098 To compute LE, swap the arguments, do LT and invert the result.
14099 To compute NE, do EQ and invert the result.
14100
14101 Therefore, the code below must handle only EQ and LT. */
14102
14103 if (code == LE_EXPR || code == GT_EXPR)
14104 {
14105 std::swap (op0, op1);
14106 code = swap_tree_comparison (code);
14107 }
14108
14109 /* Note that it is safe to invert for real values here because we
14110 have already handled the one case that it matters. */
14111
14112 invert = 0;
14113 if (code == NE_EXPR || code == GE_EXPR)
14114 {
14115 invert = 1;
14116 code = invert_tree_comparison (code, false);
14117 }
14118
14119 /* Compute a result for LT or EQ if args permit;
14120 Otherwise return T. */
14121 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
14122 {
14123 if (code == EQ_EXPR)
14124 result = tree_int_cst_equal (op0, op1);
14125 else
14126 result = tree_int_cst_lt (op0, op1);
14127 }
14128 else
14129 return NULL_TREE;
14130
14131 if (invert)
14132 result ^= 1;
14133 return constant_boolean_node (result, type);
14134 }
14135
14136 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
14137 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
14138 itself. */
14139
14140 tree
14141 fold_build_cleanup_point_expr (tree type, tree expr)
14142 {
14143 /* If the expression does not have side effects then we don't have to wrap
14144 it with a cleanup point expression. */
14145 if (!TREE_SIDE_EFFECTS (expr))
14146 return expr;
14147
14148 /* If the expression is a return, check to see if the expression inside the
14149 return has no side effects or the right hand side of the modify expression
14150 inside the return. If either don't have side effects set we don't need to
14151 wrap the expression in a cleanup point expression. Note we don't check the
14152 left hand side of the modify because it should always be a return decl. */
14153 if (TREE_CODE (expr) == RETURN_EXPR)
14154 {
14155 tree op = TREE_OPERAND (expr, 0);
14156 if (!op || !TREE_SIDE_EFFECTS (op))
14157 return expr;
14158 op = TREE_OPERAND (op, 1);
14159 if (!TREE_SIDE_EFFECTS (op))
14160 return expr;
14161 }
14162
14163 return build1 (CLEANUP_POINT_EXPR, type, expr);
14164 }
14165
14166 /* Given a pointer value OP0 and a type TYPE, return a simplified version
14167 of an indirection through OP0, or NULL_TREE if no simplification is
14168 possible. */
14169
14170 tree
14171 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
14172 {
14173 tree sub = op0;
14174 tree subtype;
14175
14176 STRIP_NOPS (sub);
14177 subtype = TREE_TYPE (sub);
14178 if (!POINTER_TYPE_P (subtype))
14179 return NULL_TREE;
14180
14181 if (TREE_CODE (sub) == ADDR_EXPR)
14182 {
14183 tree op = TREE_OPERAND (sub, 0);
14184 tree optype = TREE_TYPE (op);
14185 /* *&CONST_DECL -> to the value of the const decl. */
14186 if (TREE_CODE (op) == CONST_DECL)
14187 return DECL_INITIAL (op);
14188 /* *&p => p; make sure to handle *&"str"[cst] here. */
14189 if (type == optype)
14190 {
14191 tree fop = fold_read_from_constant_string (op);
14192 if (fop)
14193 return fop;
14194 else
14195 return op;
14196 }
14197 /* *(foo *)&fooarray => fooarray[0] */
14198 else if (TREE_CODE (optype) == ARRAY_TYPE
14199 && type == TREE_TYPE (optype)
14200 && (!in_gimple_form
14201 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14202 {
14203 tree type_domain = TYPE_DOMAIN (optype);
14204 tree min_val = size_zero_node;
14205 if (type_domain && TYPE_MIN_VALUE (type_domain))
14206 min_val = TYPE_MIN_VALUE (type_domain);
14207 if (in_gimple_form
14208 && TREE_CODE (min_val) != INTEGER_CST)
14209 return NULL_TREE;
14210 return build4_loc (loc, ARRAY_REF, type, op, min_val,
14211 NULL_TREE, NULL_TREE);
14212 }
14213 /* *(foo *)&complexfoo => __real__ complexfoo */
14214 else if (TREE_CODE (optype) == COMPLEX_TYPE
14215 && type == TREE_TYPE (optype))
14216 return fold_build1_loc (loc, REALPART_EXPR, type, op);
14217 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
14218 else if (TREE_CODE (optype) == VECTOR_TYPE
14219 && type == TREE_TYPE (optype))
14220 {
14221 tree part_width = TYPE_SIZE (type);
14222 tree index = bitsize_int (0);
14223 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
14224 }
14225 }
14226
14227 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
14228 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
14229 {
14230 tree op00 = TREE_OPERAND (sub, 0);
14231 tree op01 = TREE_OPERAND (sub, 1);
14232
14233 STRIP_NOPS (op00);
14234 if (TREE_CODE (op00) == ADDR_EXPR)
14235 {
14236 tree op00type;
14237 op00 = TREE_OPERAND (op00, 0);
14238 op00type = TREE_TYPE (op00);
14239
14240 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
14241 if (TREE_CODE (op00type) == VECTOR_TYPE
14242 && type == TREE_TYPE (op00type))
14243 {
14244 tree part_width = TYPE_SIZE (type);
14245 unsigned HOST_WIDE_INT max_offset
14246 = (tree_to_uhwi (part_width) / BITS_PER_UNIT
14247 * TYPE_VECTOR_SUBPARTS (op00type));
14248 if (tree_int_cst_sign_bit (op01) == 0
14249 && compare_tree_int (op01, max_offset) == -1)
14250 {
14251 unsigned HOST_WIDE_INT offset = tree_to_uhwi (op01);
14252 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
14253 tree index = bitsize_int (indexi);
14254 return fold_build3_loc (loc,
14255 BIT_FIELD_REF, type, op00,
14256 part_width, index);
14257 }
14258 }
14259 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
14260 else if (TREE_CODE (op00type) == COMPLEX_TYPE
14261 && type == TREE_TYPE (op00type))
14262 {
14263 tree size = TYPE_SIZE_UNIT (type);
14264 if (tree_int_cst_equal (size, op01))
14265 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
14266 }
14267 /* ((foo *)&fooarray)[1] => fooarray[1] */
14268 else if (TREE_CODE (op00type) == ARRAY_TYPE
14269 && type == TREE_TYPE (op00type))
14270 {
14271 tree type_domain = TYPE_DOMAIN (op00type);
14272 tree min_val = size_zero_node;
14273 if (type_domain && TYPE_MIN_VALUE (type_domain))
14274 min_val = TYPE_MIN_VALUE (type_domain);
14275 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
14276 TYPE_SIZE_UNIT (type));
14277 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
14278 return build4_loc (loc, ARRAY_REF, type, op00, op01,
14279 NULL_TREE, NULL_TREE);
14280 }
14281 }
14282 }
14283
14284 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
14285 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
14286 && type == TREE_TYPE (TREE_TYPE (subtype))
14287 && (!in_gimple_form
14288 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14289 {
14290 tree type_domain;
14291 tree min_val = size_zero_node;
14292 sub = build_fold_indirect_ref_loc (loc, sub);
14293 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
14294 if (type_domain && TYPE_MIN_VALUE (type_domain))
14295 min_val = TYPE_MIN_VALUE (type_domain);
14296 if (in_gimple_form
14297 && TREE_CODE (min_val) != INTEGER_CST)
14298 return NULL_TREE;
14299 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
14300 NULL_TREE);
14301 }
14302
14303 return NULL_TREE;
14304 }
14305
14306 /* Builds an expression for an indirection through T, simplifying some
14307 cases. */
14308
14309 tree
14310 build_fold_indirect_ref_loc (location_t loc, tree t)
14311 {
14312 tree type = TREE_TYPE (TREE_TYPE (t));
14313 tree sub = fold_indirect_ref_1 (loc, type, t);
14314
14315 if (sub)
14316 return sub;
14317
14318 return build1_loc (loc, INDIRECT_REF, type, t);
14319 }
14320
14321 /* Given an INDIRECT_REF T, return either T or a simplified version. */
14322
14323 tree
14324 fold_indirect_ref_loc (location_t loc, tree t)
14325 {
14326 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
14327
14328 if (sub)
14329 return sub;
14330 else
14331 return t;
14332 }
14333
14334 /* Strip non-trapping, non-side-effecting tree nodes from an expression
14335 whose result is ignored. The type of the returned tree need not be
14336 the same as the original expression. */
14337
14338 tree
14339 fold_ignored_result (tree t)
14340 {
14341 if (!TREE_SIDE_EFFECTS (t))
14342 return integer_zero_node;
14343
14344 for (;;)
14345 switch (TREE_CODE_CLASS (TREE_CODE (t)))
14346 {
14347 case tcc_unary:
14348 t = TREE_OPERAND (t, 0);
14349 break;
14350
14351 case tcc_binary:
14352 case tcc_comparison:
14353 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14354 t = TREE_OPERAND (t, 0);
14355 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
14356 t = TREE_OPERAND (t, 1);
14357 else
14358 return t;
14359 break;
14360
14361 case tcc_expression:
14362 switch (TREE_CODE (t))
14363 {
14364 case COMPOUND_EXPR:
14365 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14366 return t;
14367 t = TREE_OPERAND (t, 0);
14368 break;
14369
14370 case COND_EXPR:
14371 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
14372 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
14373 return t;
14374 t = TREE_OPERAND (t, 0);
14375 break;
14376
14377 default:
14378 return t;
14379 }
14380 break;
14381
14382 default:
14383 return t;
14384 }
14385 }
14386
14387 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
14388
14389 tree
14390 round_up_loc (location_t loc, tree value, unsigned int divisor)
14391 {
14392 tree div = NULL_TREE;
14393
14394 if (divisor == 1)
14395 return value;
14396
14397 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14398 have to do anything. Only do this when we are not given a const,
14399 because in that case, this check is more expensive than just
14400 doing it. */
14401 if (TREE_CODE (value) != INTEGER_CST)
14402 {
14403 div = build_int_cst (TREE_TYPE (value), divisor);
14404
14405 if (multiple_of_p (TREE_TYPE (value), value, div))
14406 return value;
14407 }
14408
14409 /* If divisor is a power of two, simplify this to bit manipulation. */
14410 if (divisor == (divisor & -divisor))
14411 {
14412 if (TREE_CODE (value) == INTEGER_CST)
14413 {
14414 wide_int val = value;
14415 bool overflow_p;
14416
14417 if ((val & (divisor - 1)) == 0)
14418 return value;
14419
14420 overflow_p = TREE_OVERFLOW (value);
14421 val += divisor - 1;
14422 val &= - (int) divisor;
14423 if (val == 0)
14424 overflow_p = true;
14425
14426 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
14427 }
14428 else
14429 {
14430 tree t;
14431
14432 t = build_int_cst (TREE_TYPE (value), divisor - 1);
14433 value = size_binop_loc (loc, PLUS_EXPR, value, t);
14434 t = build_int_cst (TREE_TYPE (value), - (int) divisor);
14435 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14436 }
14437 }
14438 else
14439 {
14440 if (!div)
14441 div = build_int_cst (TREE_TYPE (value), divisor);
14442 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
14443 value = size_binop_loc (loc, MULT_EXPR, value, div);
14444 }
14445
14446 return value;
14447 }
14448
14449 /* Likewise, but round down. */
14450
14451 tree
14452 round_down_loc (location_t loc, tree value, int divisor)
14453 {
14454 tree div = NULL_TREE;
14455
14456 gcc_assert (divisor > 0);
14457 if (divisor == 1)
14458 return value;
14459
14460 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14461 have to do anything. Only do this when we are not given a const,
14462 because in that case, this check is more expensive than just
14463 doing it. */
14464 if (TREE_CODE (value) != INTEGER_CST)
14465 {
14466 div = build_int_cst (TREE_TYPE (value), divisor);
14467
14468 if (multiple_of_p (TREE_TYPE (value), value, div))
14469 return value;
14470 }
14471
14472 /* If divisor is a power of two, simplify this to bit manipulation. */
14473 if (divisor == (divisor & -divisor))
14474 {
14475 tree t;
14476
14477 t = build_int_cst (TREE_TYPE (value), -divisor);
14478 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14479 }
14480 else
14481 {
14482 if (!div)
14483 div = build_int_cst (TREE_TYPE (value), divisor);
14484 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
14485 value = size_binop_loc (loc, MULT_EXPR, value, div);
14486 }
14487
14488 return value;
14489 }
14490
14491 /* Returns the pointer to the base of the object addressed by EXP and
14492 extracts the information about the offset of the access, storing it
14493 to PBITPOS and POFFSET. */
14494
14495 static tree
14496 split_address_to_core_and_offset (tree exp,
14497 HOST_WIDE_INT *pbitpos, tree *poffset)
14498 {
14499 tree core;
14500 machine_mode mode;
14501 int unsignedp, reversep, volatilep;
14502 HOST_WIDE_INT bitsize;
14503 location_t loc = EXPR_LOCATION (exp);
14504
14505 if (TREE_CODE (exp) == ADDR_EXPR)
14506 {
14507 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
14508 poffset, &mode, &unsignedp, &reversep,
14509 &volatilep, false);
14510 core = build_fold_addr_expr_loc (loc, core);
14511 }
14512 else
14513 {
14514 core = exp;
14515 *pbitpos = 0;
14516 *poffset = NULL_TREE;
14517 }
14518
14519 return core;
14520 }
14521
14522 /* Returns true if addresses of E1 and E2 differ by a constant, false
14523 otherwise. If they do, E1 - E2 is stored in *DIFF. */
14524
14525 bool
14526 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
14527 {
14528 tree core1, core2;
14529 HOST_WIDE_INT bitpos1, bitpos2;
14530 tree toffset1, toffset2, tdiff, type;
14531
14532 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
14533 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
14534
14535 if (bitpos1 % BITS_PER_UNIT != 0
14536 || bitpos2 % BITS_PER_UNIT != 0
14537 || !operand_equal_p (core1, core2, 0))
14538 return false;
14539
14540 if (toffset1 && toffset2)
14541 {
14542 type = TREE_TYPE (toffset1);
14543 if (type != TREE_TYPE (toffset2))
14544 toffset2 = fold_convert (type, toffset2);
14545
14546 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
14547 if (!cst_and_fits_in_hwi (tdiff))
14548 return false;
14549
14550 *diff = int_cst_value (tdiff);
14551 }
14552 else if (toffset1 || toffset2)
14553 {
14554 /* If only one of the offsets is non-constant, the difference cannot
14555 be a constant. */
14556 return false;
14557 }
14558 else
14559 *diff = 0;
14560
14561 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
14562 return true;
14563 }
14564
14565 /* Return OFF converted to a pointer offset type suitable as offset for
14566 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
14567 tree
14568 convert_to_ptrofftype_loc (location_t loc, tree off)
14569 {
14570 return fold_convert_loc (loc, sizetype, off);
14571 }
14572
14573 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14574 tree
14575 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
14576 {
14577 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14578 ptr, convert_to_ptrofftype_loc (loc, off));
14579 }
14580
14581 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14582 tree
14583 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
14584 {
14585 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14586 ptr, size_int (off));
14587 }
14588
14589 /* Return a char pointer for a C string if it is a string constant
14590 or sum of string constant and integer constant. */
14591
14592 const char *
14593 c_getstr (tree src)
14594 {
14595 tree offset_node;
14596
14597 src = string_constant (src, &offset_node);
14598 if (src == 0)
14599 return 0;
14600
14601 if (offset_node == 0)
14602 return TREE_STRING_POINTER (src);
14603 else if (!tree_fits_uhwi_p (offset_node)
14604 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
14605 return 0;
14606
14607 return TREE_STRING_POINTER (src) + tree_to_uhwi (offset_node);
14608 }