re PR target/70333 (Test miscompiled with -O0.)
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2016 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
27
28 /* The entry points in this file are fold, size_int_wide and size_binop.
29
30 fold takes a tree as argument and returns a simplified tree.
31
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
35
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
38
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
42
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "backend.h"
47 #include "target.h"
48 #include "rtl.h"
49 #include "tree.h"
50 #include "gimple.h"
51 #include "predict.h"
52 #include "tm_p.h"
53 #include "tree-ssa-operands.h"
54 #include "optabs-query.h"
55 #include "cgraph.h"
56 #include "diagnostic-core.h"
57 #include "flags.h"
58 #include "alias.h"
59 #include "fold-const.h"
60 #include "fold-const-call.h"
61 #include "stor-layout.h"
62 #include "calls.h"
63 #include "tree-iterator.h"
64 #include "expr.h"
65 #include "intl.h"
66 #include "langhooks.h"
67 #include "tree-eh.h"
68 #include "gimplify.h"
69 #include "tree-dfa.h"
70 #include "builtins.h"
71 #include "generic-match.h"
72 #include "gimple-fold.h"
73 #include "params.h"
74 #include "tree-into-ssa.h"
75 #include "md5.h"
76 #include "case-cfn-macros.h"
77 #include "stringpool.h"
78 #include "tree-ssanames.h"
79
80 #ifndef LOAD_EXTEND_OP
81 #define LOAD_EXTEND_OP(M) UNKNOWN
82 #endif
83
84 /* Nonzero if we are folding constants inside an initializer; zero
85 otherwise. */
86 int folding_initializer = 0;
87
88 /* The following constants represent a bit based encoding of GCC's
89 comparison operators. This encoding simplifies transformations
90 on relational comparison operators, such as AND and OR. */
91 enum comparison_code {
92 COMPCODE_FALSE = 0,
93 COMPCODE_LT = 1,
94 COMPCODE_EQ = 2,
95 COMPCODE_LE = 3,
96 COMPCODE_GT = 4,
97 COMPCODE_LTGT = 5,
98 COMPCODE_GE = 6,
99 COMPCODE_ORD = 7,
100 COMPCODE_UNORD = 8,
101 COMPCODE_UNLT = 9,
102 COMPCODE_UNEQ = 10,
103 COMPCODE_UNLE = 11,
104 COMPCODE_UNGT = 12,
105 COMPCODE_NE = 13,
106 COMPCODE_UNGE = 14,
107 COMPCODE_TRUE = 15
108 };
109
110 static bool negate_expr_p (tree);
111 static tree negate_expr (tree);
112 static tree split_tree (location_t, tree, tree, enum tree_code,
113 tree *, tree *, tree *, int);
114 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
115 static enum comparison_code comparison_to_compcode (enum tree_code);
116 static enum tree_code compcode_to_comparison (enum comparison_code);
117 static int operand_equal_for_comparison_p (tree, tree, tree);
118 static int twoval_comparison_p (tree, tree *, tree *, int *);
119 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
120 static tree make_bit_field_ref (location_t, tree, tree,
121 HOST_WIDE_INT, HOST_WIDE_INT, int, int);
122 static tree optimize_bit_field_compare (location_t, enum tree_code,
123 tree, tree, tree);
124 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
125 HOST_WIDE_INT *,
126 machine_mode *, int *, int *, int *,
127 tree *, tree *);
128 static int simple_operand_p (const_tree);
129 static bool simple_operand_p_2 (tree);
130 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
131 static tree range_predecessor (tree);
132 static tree range_successor (tree);
133 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
134 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
135 static tree unextend (tree, int, int, tree);
136 static tree optimize_minmax_comparison (location_t, enum tree_code,
137 tree, tree, tree);
138 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
139 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
140 static tree fold_binary_op_with_conditional_arg (location_t,
141 enum tree_code, tree,
142 tree, tree,
143 tree, tree, int);
144 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
145 static bool reorder_operands_p (const_tree, const_tree);
146 static tree fold_negate_const (tree, tree);
147 static tree fold_not_const (const_tree, tree);
148 static tree fold_relational_const (enum tree_code, tree, tree, tree);
149 static tree fold_convert_const (enum tree_code, tree, tree);
150 static tree fold_view_convert_expr (tree, tree);
151 static bool vec_cst_ctor_to_array (tree, tree *);
152
153
154 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
155 Otherwise, return LOC. */
156
157 static location_t
158 expr_location_or (tree t, location_t loc)
159 {
160 location_t tloc = EXPR_LOCATION (t);
161 return tloc == UNKNOWN_LOCATION ? loc : tloc;
162 }
163
164 /* Similar to protected_set_expr_location, but never modify x in place,
165 if location can and needs to be set, unshare it. */
166
167 static inline tree
168 protected_set_expr_location_unshare (tree x, location_t loc)
169 {
170 if (CAN_HAVE_LOCATION_P (x)
171 && EXPR_LOCATION (x) != loc
172 && !(TREE_CODE (x) == SAVE_EXPR
173 || TREE_CODE (x) == TARGET_EXPR
174 || TREE_CODE (x) == BIND_EXPR))
175 {
176 x = copy_node (x);
177 SET_EXPR_LOCATION (x, loc);
178 }
179 return x;
180 }
181 \f
182 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
183 division and returns the quotient. Otherwise returns
184 NULL_TREE. */
185
186 tree
187 div_if_zero_remainder (const_tree arg1, const_tree arg2)
188 {
189 widest_int quo;
190
191 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
192 SIGNED, &quo))
193 return wide_int_to_tree (TREE_TYPE (arg1), quo);
194
195 return NULL_TREE;
196 }
197 \f
198 /* This is nonzero if we should defer warnings about undefined
199 overflow. This facility exists because these warnings are a
200 special case. The code to estimate loop iterations does not want
201 to issue any warnings, since it works with expressions which do not
202 occur in user code. Various bits of cleanup code call fold(), but
203 only use the result if it has certain characteristics (e.g., is a
204 constant); that code only wants to issue a warning if the result is
205 used. */
206
207 static int fold_deferring_overflow_warnings;
208
209 /* If a warning about undefined overflow is deferred, this is the
210 warning. Note that this may cause us to turn two warnings into
211 one, but that is fine since it is sufficient to only give one
212 warning per expression. */
213
214 static const char* fold_deferred_overflow_warning;
215
216 /* If a warning about undefined overflow is deferred, this is the
217 level at which the warning should be emitted. */
218
219 static enum warn_strict_overflow_code fold_deferred_overflow_code;
220
221 /* Start deferring overflow warnings. We could use a stack here to
222 permit nested calls, but at present it is not necessary. */
223
224 void
225 fold_defer_overflow_warnings (void)
226 {
227 ++fold_deferring_overflow_warnings;
228 }
229
230 /* Stop deferring overflow warnings. If there is a pending warning,
231 and ISSUE is true, then issue the warning if appropriate. STMT is
232 the statement with which the warning should be associated (used for
233 location information); STMT may be NULL. CODE is the level of the
234 warning--a warn_strict_overflow_code value. This function will use
235 the smaller of CODE and the deferred code when deciding whether to
236 issue the warning. CODE may be zero to mean to always use the
237 deferred code. */
238
239 void
240 fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
241 {
242 const char *warnmsg;
243 location_t locus;
244
245 gcc_assert (fold_deferring_overflow_warnings > 0);
246 --fold_deferring_overflow_warnings;
247 if (fold_deferring_overflow_warnings > 0)
248 {
249 if (fold_deferred_overflow_warning != NULL
250 && code != 0
251 && code < (int) fold_deferred_overflow_code)
252 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
253 return;
254 }
255
256 warnmsg = fold_deferred_overflow_warning;
257 fold_deferred_overflow_warning = NULL;
258
259 if (!issue || warnmsg == NULL)
260 return;
261
262 if (gimple_no_warning_p (stmt))
263 return;
264
265 /* Use the smallest code level when deciding to issue the
266 warning. */
267 if (code == 0 || code > (int) fold_deferred_overflow_code)
268 code = fold_deferred_overflow_code;
269
270 if (!issue_strict_overflow_warning (code))
271 return;
272
273 if (stmt == NULL)
274 locus = input_location;
275 else
276 locus = gimple_location (stmt);
277 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
278 }
279
280 /* Stop deferring overflow warnings, ignoring any deferred
281 warnings. */
282
283 void
284 fold_undefer_and_ignore_overflow_warnings (void)
285 {
286 fold_undefer_overflow_warnings (false, NULL, 0);
287 }
288
289 /* Whether we are deferring overflow warnings. */
290
291 bool
292 fold_deferring_overflow_warnings_p (void)
293 {
294 return fold_deferring_overflow_warnings > 0;
295 }
296
297 /* This is called when we fold something based on the fact that signed
298 overflow is undefined. */
299
300 static void
301 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
302 {
303 if (fold_deferring_overflow_warnings > 0)
304 {
305 if (fold_deferred_overflow_warning == NULL
306 || wc < fold_deferred_overflow_code)
307 {
308 fold_deferred_overflow_warning = gmsgid;
309 fold_deferred_overflow_code = wc;
310 }
311 }
312 else if (issue_strict_overflow_warning (wc))
313 warning (OPT_Wstrict_overflow, gmsgid);
314 }
315 \f
316 /* Return true if the built-in mathematical function specified by CODE
317 is odd, i.e. -f(x) == f(-x). */
318
319 bool
320 negate_mathfn_p (combined_fn fn)
321 {
322 switch (fn)
323 {
324 CASE_CFN_ASIN:
325 CASE_CFN_ASINH:
326 CASE_CFN_ATAN:
327 CASE_CFN_ATANH:
328 CASE_CFN_CASIN:
329 CASE_CFN_CASINH:
330 CASE_CFN_CATAN:
331 CASE_CFN_CATANH:
332 CASE_CFN_CBRT:
333 CASE_CFN_CPROJ:
334 CASE_CFN_CSIN:
335 CASE_CFN_CSINH:
336 CASE_CFN_CTAN:
337 CASE_CFN_CTANH:
338 CASE_CFN_ERF:
339 CASE_CFN_LLROUND:
340 CASE_CFN_LROUND:
341 CASE_CFN_ROUND:
342 CASE_CFN_SIN:
343 CASE_CFN_SINH:
344 CASE_CFN_TAN:
345 CASE_CFN_TANH:
346 CASE_CFN_TRUNC:
347 return true;
348
349 CASE_CFN_LLRINT:
350 CASE_CFN_LRINT:
351 CASE_CFN_NEARBYINT:
352 CASE_CFN_RINT:
353 return !flag_rounding_math;
354
355 default:
356 break;
357 }
358 return false;
359 }
360
361 /* Check whether we may negate an integer constant T without causing
362 overflow. */
363
364 bool
365 may_negate_without_overflow_p (const_tree t)
366 {
367 tree type;
368
369 gcc_assert (TREE_CODE (t) == INTEGER_CST);
370
371 type = TREE_TYPE (t);
372 if (TYPE_UNSIGNED (type))
373 return false;
374
375 return !wi::only_sign_bit_p (t);
376 }
377
378 /* Determine whether an expression T can be cheaply negated using
379 the function negate_expr without introducing undefined overflow. */
380
381 static bool
382 negate_expr_p (tree t)
383 {
384 tree type;
385
386 if (t == 0)
387 return false;
388
389 type = TREE_TYPE (t);
390
391 STRIP_SIGN_NOPS (t);
392 switch (TREE_CODE (t))
393 {
394 case INTEGER_CST:
395 if (INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_WRAPS (type))
396 return true;
397
398 /* Check that -CST will not overflow type. */
399 return may_negate_without_overflow_p (t);
400 case BIT_NOT_EXPR:
401 return (INTEGRAL_TYPE_P (type)
402 && TYPE_OVERFLOW_WRAPS (type));
403
404 case FIXED_CST:
405 return true;
406
407 case NEGATE_EXPR:
408 return !TYPE_OVERFLOW_SANITIZED (type);
409
410 case REAL_CST:
411 /* We want to canonicalize to positive real constants. Pretend
412 that only negative ones can be easily negated. */
413 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
414
415 case COMPLEX_CST:
416 return negate_expr_p (TREE_REALPART (t))
417 && negate_expr_p (TREE_IMAGPART (t));
418
419 case VECTOR_CST:
420 {
421 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
422 return true;
423
424 int count = TYPE_VECTOR_SUBPARTS (type), i;
425
426 for (i = 0; i < count; i++)
427 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
428 return false;
429
430 return true;
431 }
432
433 case COMPLEX_EXPR:
434 return negate_expr_p (TREE_OPERAND (t, 0))
435 && negate_expr_p (TREE_OPERAND (t, 1));
436
437 case CONJ_EXPR:
438 return negate_expr_p (TREE_OPERAND (t, 0));
439
440 case PLUS_EXPR:
441 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
442 || HONOR_SIGNED_ZEROS (element_mode (type))
443 || (INTEGRAL_TYPE_P (type)
444 && ! TYPE_OVERFLOW_WRAPS (type)))
445 return false;
446 /* -(A + B) -> (-B) - A. */
447 if (negate_expr_p (TREE_OPERAND (t, 1))
448 && reorder_operands_p (TREE_OPERAND (t, 0),
449 TREE_OPERAND (t, 1)))
450 return true;
451 /* -(A + B) -> (-A) - B. */
452 return negate_expr_p (TREE_OPERAND (t, 0));
453
454 case MINUS_EXPR:
455 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
456 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
457 && !HONOR_SIGNED_ZEROS (element_mode (type))
458 && (! INTEGRAL_TYPE_P (type)
459 || TYPE_OVERFLOW_WRAPS (type))
460 && reorder_operands_p (TREE_OPERAND (t, 0),
461 TREE_OPERAND (t, 1));
462
463 case MULT_EXPR:
464 if (TYPE_UNSIGNED (type))
465 break;
466 /* INT_MIN/n * n doesn't overflow while negating one operand it does
467 if n is a power of two. */
468 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
469 && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
470 && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
471 && ! integer_pow2p (TREE_OPERAND (t, 0)))
472 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
473 && ! integer_pow2p (TREE_OPERAND (t, 1)))))
474 break;
475
476 /* Fall through. */
477
478 case RDIV_EXPR:
479 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
480 return negate_expr_p (TREE_OPERAND (t, 1))
481 || negate_expr_p (TREE_OPERAND (t, 0));
482 break;
483
484 case TRUNC_DIV_EXPR:
485 case ROUND_DIV_EXPR:
486 case EXACT_DIV_EXPR:
487 if (TYPE_UNSIGNED (type))
488 break;
489 if (negate_expr_p (TREE_OPERAND (t, 0)))
490 return true;
491 /* In general we can't negate B in A / B, because if A is INT_MIN and
492 B is 1, we may turn this into INT_MIN / -1 which is undefined
493 and actually traps on some architectures. */
494 if (! INTEGRAL_TYPE_P (TREE_TYPE (t))
495 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
496 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
497 && ! integer_onep (TREE_OPERAND (t, 1))))
498 return negate_expr_p (TREE_OPERAND (t, 1));
499 break;
500
501 case NOP_EXPR:
502 /* Negate -((double)float) as (double)(-float). */
503 if (TREE_CODE (type) == REAL_TYPE)
504 {
505 tree tem = strip_float_extensions (t);
506 if (tem != t)
507 return negate_expr_p (tem);
508 }
509 break;
510
511 case CALL_EXPR:
512 /* Negate -f(x) as f(-x). */
513 if (negate_mathfn_p (get_call_combined_fn (t)))
514 return negate_expr_p (CALL_EXPR_ARG (t, 0));
515 break;
516
517 case RSHIFT_EXPR:
518 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
519 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
520 {
521 tree op1 = TREE_OPERAND (t, 1);
522 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
523 return true;
524 }
525 break;
526
527 default:
528 break;
529 }
530 return false;
531 }
532
533 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
534 simplification is possible.
535 If negate_expr_p would return true for T, NULL_TREE will never be
536 returned. */
537
538 static tree
539 fold_negate_expr (location_t loc, tree t)
540 {
541 tree type = TREE_TYPE (t);
542 tree tem;
543
544 switch (TREE_CODE (t))
545 {
546 /* Convert - (~A) to A + 1. */
547 case BIT_NOT_EXPR:
548 if (INTEGRAL_TYPE_P (type))
549 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
550 build_one_cst (type));
551 break;
552
553 case INTEGER_CST:
554 tem = fold_negate_const (t, type);
555 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
556 || (ANY_INTEGRAL_TYPE_P (type)
557 && !TYPE_OVERFLOW_TRAPS (type)
558 && TYPE_OVERFLOW_WRAPS (type))
559 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
560 return tem;
561 break;
562
563 case REAL_CST:
564 tem = fold_negate_const (t, type);
565 return tem;
566
567 case FIXED_CST:
568 tem = fold_negate_const (t, type);
569 return tem;
570
571 case COMPLEX_CST:
572 {
573 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
574 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
575 if (rpart && ipart)
576 return build_complex (type, rpart, ipart);
577 }
578 break;
579
580 case VECTOR_CST:
581 {
582 int count = TYPE_VECTOR_SUBPARTS (type), i;
583 tree *elts = XALLOCAVEC (tree, count);
584
585 for (i = 0; i < count; i++)
586 {
587 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
588 if (elts[i] == NULL_TREE)
589 return NULL_TREE;
590 }
591
592 return build_vector (type, elts);
593 }
594
595 case COMPLEX_EXPR:
596 if (negate_expr_p (t))
597 return fold_build2_loc (loc, COMPLEX_EXPR, type,
598 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
599 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
600 break;
601
602 case CONJ_EXPR:
603 if (negate_expr_p (t))
604 return fold_build1_loc (loc, CONJ_EXPR, type,
605 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
606 break;
607
608 case NEGATE_EXPR:
609 if (!TYPE_OVERFLOW_SANITIZED (type))
610 return TREE_OPERAND (t, 0);
611 break;
612
613 case PLUS_EXPR:
614 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
615 && !HONOR_SIGNED_ZEROS (element_mode (type)))
616 {
617 /* -(A + B) -> (-B) - A. */
618 if (negate_expr_p (TREE_OPERAND (t, 1))
619 && reorder_operands_p (TREE_OPERAND (t, 0),
620 TREE_OPERAND (t, 1)))
621 {
622 tem = negate_expr (TREE_OPERAND (t, 1));
623 return fold_build2_loc (loc, MINUS_EXPR, type,
624 tem, TREE_OPERAND (t, 0));
625 }
626
627 /* -(A + B) -> (-A) - B. */
628 if (negate_expr_p (TREE_OPERAND (t, 0)))
629 {
630 tem = negate_expr (TREE_OPERAND (t, 0));
631 return fold_build2_loc (loc, MINUS_EXPR, type,
632 tem, TREE_OPERAND (t, 1));
633 }
634 }
635 break;
636
637 case MINUS_EXPR:
638 /* - (A - B) -> B - A */
639 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
640 && !HONOR_SIGNED_ZEROS (element_mode (type))
641 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
642 return fold_build2_loc (loc, MINUS_EXPR, type,
643 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
644 break;
645
646 case MULT_EXPR:
647 if (TYPE_UNSIGNED (type))
648 break;
649
650 /* Fall through. */
651
652 case RDIV_EXPR:
653 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
654 {
655 tem = TREE_OPERAND (t, 1);
656 if (negate_expr_p (tem))
657 return fold_build2_loc (loc, TREE_CODE (t), type,
658 TREE_OPERAND (t, 0), negate_expr (tem));
659 tem = TREE_OPERAND (t, 0);
660 if (negate_expr_p (tem))
661 return fold_build2_loc (loc, TREE_CODE (t), type,
662 negate_expr (tem), TREE_OPERAND (t, 1));
663 }
664 break;
665
666 case TRUNC_DIV_EXPR:
667 case ROUND_DIV_EXPR:
668 case EXACT_DIV_EXPR:
669 if (TYPE_UNSIGNED (type))
670 break;
671 if (negate_expr_p (TREE_OPERAND (t, 0)))
672 return fold_build2_loc (loc, TREE_CODE (t), type,
673 negate_expr (TREE_OPERAND (t, 0)),
674 TREE_OPERAND (t, 1));
675 /* In general we can't negate B in A / B, because if A is INT_MIN and
676 B is 1, we may turn this into INT_MIN / -1 which is undefined
677 and actually traps on some architectures. */
678 if ((! INTEGRAL_TYPE_P (TREE_TYPE (t))
679 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
680 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
681 && ! integer_onep (TREE_OPERAND (t, 1))))
682 && negate_expr_p (TREE_OPERAND (t, 1)))
683 return fold_build2_loc (loc, TREE_CODE (t), type,
684 TREE_OPERAND (t, 0),
685 negate_expr (TREE_OPERAND (t, 1)));
686 break;
687
688 case NOP_EXPR:
689 /* Convert -((double)float) into (double)(-float). */
690 if (TREE_CODE (type) == REAL_TYPE)
691 {
692 tem = strip_float_extensions (t);
693 if (tem != t && negate_expr_p (tem))
694 return fold_convert_loc (loc, type, negate_expr (tem));
695 }
696 break;
697
698 case CALL_EXPR:
699 /* Negate -f(x) as f(-x). */
700 if (negate_mathfn_p (get_call_combined_fn (t))
701 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
702 {
703 tree fndecl, arg;
704
705 fndecl = get_callee_fndecl (t);
706 arg = negate_expr (CALL_EXPR_ARG (t, 0));
707 return build_call_expr_loc (loc, fndecl, 1, arg);
708 }
709 break;
710
711 case RSHIFT_EXPR:
712 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
713 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
714 {
715 tree op1 = TREE_OPERAND (t, 1);
716 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
717 {
718 tree ntype = TYPE_UNSIGNED (type)
719 ? signed_type_for (type)
720 : unsigned_type_for (type);
721 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
722 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
723 return fold_convert_loc (loc, type, temp);
724 }
725 }
726 break;
727
728 default:
729 break;
730 }
731
732 return NULL_TREE;
733 }
734
735 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
736 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
737 return NULL_TREE. */
738
739 static tree
740 negate_expr (tree t)
741 {
742 tree type, tem;
743 location_t loc;
744
745 if (t == NULL_TREE)
746 return NULL_TREE;
747
748 loc = EXPR_LOCATION (t);
749 type = TREE_TYPE (t);
750 STRIP_SIGN_NOPS (t);
751
752 tem = fold_negate_expr (loc, t);
753 if (!tem)
754 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
755 return fold_convert_loc (loc, type, tem);
756 }
757 \f
758 /* Split a tree IN into a constant, literal and variable parts that could be
759 combined with CODE to make IN. "constant" means an expression with
760 TREE_CONSTANT but that isn't an actual constant. CODE must be a
761 commutative arithmetic operation. Store the constant part into *CONP,
762 the literal in *LITP and return the variable part. If a part isn't
763 present, set it to null. If the tree does not decompose in this way,
764 return the entire tree as the variable part and the other parts as null.
765
766 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
767 case, we negate an operand that was subtracted. Except if it is a
768 literal for which we use *MINUS_LITP instead.
769
770 If NEGATE_P is true, we are negating all of IN, again except a literal
771 for which we use *MINUS_LITP instead. If a variable part is of pointer
772 type, it is negated after converting to TYPE. This prevents us from
773 generating illegal MINUS pointer expression. LOC is the location of
774 the converted variable part.
775
776 If IN is itself a literal or constant, return it as appropriate.
777
778 Note that we do not guarantee that any of the three values will be the
779 same type as IN, but they will have the same signedness and mode. */
780
781 static tree
782 split_tree (location_t loc, tree in, tree type, enum tree_code code,
783 tree *conp, tree *litp, tree *minus_litp, int negate_p)
784 {
785 tree var = 0;
786
787 *conp = 0;
788 *litp = 0;
789 *minus_litp = 0;
790
791 /* Strip any conversions that don't change the machine mode or signedness. */
792 STRIP_SIGN_NOPS (in);
793
794 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
795 || TREE_CODE (in) == FIXED_CST)
796 *litp = in;
797 else if (TREE_CODE (in) == code
798 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
799 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
800 /* We can associate addition and subtraction together (even
801 though the C standard doesn't say so) for integers because
802 the value is not affected. For reals, the value might be
803 affected, so we can't. */
804 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
805 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
806 {
807 tree op0 = TREE_OPERAND (in, 0);
808 tree op1 = TREE_OPERAND (in, 1);
809 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
810 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
811
812 /* First see if either of the operands is a literal, then a constant. */
813 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
814 || TREE_CODE (op0) == FIXED_CST)
815 *litp = op0, op0 = 0;
816 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
817 || TREE_CODE (op1) == FIXED_CST)
818 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
819
820 if (op0 != 0 && TREE_CONSTANT (op0))
821 *conp = op0, op0 = 0;
822 else if (op1 != 0 && TREE_CONSTANT (op1))
823 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
824
825 /* If we haven't dealt with either operand, this is not a case we can
826 decompose. Otherwise, VAR is either of the ones remaining, if any. */
827 if (op0 != 0 && op1 != 0)
828 var = in;
829 else if (op0 != 0)
830 var = op0;
831 else
832 var = op1, neg_var_p = neg1_p;
833
834 /* Now do any needed negations. */
835 if (neg_litp_p)
836 *minus_litp = *litp, *litp = 0;
837 if (neg_conp_p)
838 *conp = negate_expr (*conp);
839 if (neg_var_p)
840 {
841 /* Convert to TYPE before negating a pointer type expr. */
842 if (var && POINTER_TYPE_P (TREE_TYPE (var)))
843 var = fold_convert_loc (loc, type, var);
844 var = negate_expr (var);
845 }
846 }
847 else if (TREE_CODE (in) == BIT_NOT_EXPR
848 && code == PLUS_EXPR)
849 {
850 /* -X - 1 is folded to ~X, undo that here. */
851 *minus_litp = build_one_cst (TREE_TYPE (in));
852 var = negate_expr (TREE_OPERAND (in, 0));
853 }
854 else if (TREE_CONSTANT (in))
855 *conp = in;
856 else
857 var = in;
858
859 if (negate_p)
860 {
861 if (*litp)
862 *minus_litp = *litp, *litp = 0;
863 else if (*minus_litp)
864 *litp = *minus_litp, *minus_litp = 0;
865 *conp = negate_expr (*conp);
866 /* Convert to TYPE before negating a pointer type expr. */
867 if (var && POINTER_TYPE_P (TREE_TYPE (var)))
868 var = fold_convert_loc (loc, type, var);
869 var = negate_expr (var);
870 }
871
872 return var;
873 }
874
875 /* Re-associate trees split by the above function. T1 and T2 are
876 either expressions to associate or null. Return the new
877 expression, if any. LOC is the location of the new expression. If
878 we build an operation, do it in TYPE and with CODE. */
879
880 static tree
881 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
882 {
883 if (t1 == 0)
884 return t2;
885 else if (t2 == 0)
886 return t1;
887
888 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
889 try to fold this since we will have infinite recursion. But do
890 deal with any NEGATE_EXPRs. */
891 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
892 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
893 {
894 if (code == PLUS_EXPR)
895 {
896 if (TREE_CODE (t1) == NEGATE_EXPR)
897 return build2_loc (loc, MINUS_EXPR, type,
898 fold_convert_loc (loc, type, t2),
899 fold_convert_loc (loc, type,
900 TREE_OPERAND (t1, 0)));
901 else if (TREE_CODE (t2) == NEGATE_EXPR)
902 return build2_loc (loc, MINUS_EXPR, type,
903 fold_convert_loc (loc, type, t1),
904 fold_convert_loc (loc, type,
905 TREE_OPERAND (t2, 0)));
906 else if (integer_zerop (t2))
907 return fold_convert_loc (loc, type, t1);
908 }
909 else if (code == MINUS_EXPR)
910 {
911 if (integer_zerop (t2))
912 return fold_convert_loc (loc, type, t1);
913 }
914
915 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
916 fold_convert_loc (loc, type, t2));
917 }
918
919 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
920 fold_convert_loc (loc, type, t2));
921 }
922 \f
923 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
924 for use in int_const_binop, size_binop and size_diffop. */
925
926 static bool
927 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
928 {
929 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
930 return false;
931 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
932 return false;
933
934 switch (code)
935 {
936 case LSHIFT_EXPR:
937 case RSHIFT_EXPR:
938 case LROTATE_EXPR:
939 case RROTATE_EXPR:
940 return true;
941
942 default:
943 break;
944 }
945
946 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
947 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
948 && TYPE_MODE (type1) == TYPE_MODE (type2);
949 }
950
951
952 /* Combine two integer constants ARG1 and ARG2 under operation CODE
953 to produce a new constant. Return NULL_TREE if we don't know how
954 to evaluate CODE at compile-time. */
955
956 static tree
957 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree parg2,
958 int overflowable)
959 {
960 wide_int res;
961 tree t;
962 tree type = TREE_TYPE (arg1);
963 signop sign = TYPE_SIGN (type);
964 bool overflow = false;
965
966 wide_int arg2 = wide_int::from (parg2, TYPE_PRECISION (type),
967 TYPE_SIGN (TREE_TYPE (parg2)));
968
969 switch (code)
970 {
971 case BIT_IOR_EXPR:
972 res = wi::bit_or (arg1, arg2);
973 break;
974
975 case BIT_XOR_EXPR:
976 res = wi::bit_xor (arg1, arg2);
977 break;
978
979 case BIT_AND_EXPR:
980 res = wi::bit_and (arg1, arg2);
981 break;
982
983 case RSHIFT_EXPR:
984 case LSHIFT_EXPR:
985 if (wi::neg_p (arg2))
986 {
987 arg2 = -arg2;
988 if (code == RSHIFT_EXPR)
989 code = LSHIFT_EXPR;
990 else
991 code = RSHIFT_EXPR;
992 }
993
994 if (code == RSHIFT_EXPR)
995 /* It's unclear from the C standard whether shifts can overflow.
996 The following code ignores overflow; perhaps a C standard
997 interpretation ruling is needed. */
998 res = wi::rshift (arg1, arg2, sign);
999 else
1000 res = wi::lshift (arg1, arg2);
1001 break;
1002
1003 case RROTATE_EXPR:
1004 case LROTATE_EXPR:
1005 if (wi::neg_p (arg2))
1006 {
1007 arg2 = -arg2;
1008 if (code == RROTATE_EXPR)
1009 code = LROTATE_EXPR;
1010 else
1011 code = RROTATE_EXPR;
1012 }
1013
1014 if (code == RROTATE_EXPR)
1015 res = wi::rrotate (arg1, arg2);
1016 else
1017 res = wi::lrotate (arg1, arg2);
1018 break;
1019
1020 case PLUS_EXPR:
1021 res = wi::add (arg1, arg2, sign, &overflow);
1022 break;
1023
1024 case MINUS_EXPR:
1025 res = wi::sub (arg1, arg2, sign, &overflow);
1026 break;
1027
1028 case MULT_EXPR:
1029 res = wi::mul (arg1, arg2, sign, &overflow);
1030 break;
1031
1032 case MULT_HIGHPART_EXPR:
1033 res = wi::mul_high (arg1, arg2, sign);
1034 break;
1035
1036 case TRUNC_DIV_EXPR:
1037 case EXACT_DIV_EXPR:
1038 if (arg2 == 0)
1039 return NULL_TREE;
1040 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1041 break;
1042
1043 case FLOOR_DIV_EXPR:
1044 if (arg2 == 0)
1045 return NULL_TREE;
1046 res = wi::div_floor (arg1, arg2, sign, &overflow);
1047 break;
1048
1049 case CEIL_DIV_EXPR:
1050 if (arg2 == 0)
1051 return NULL_TREE;
1052 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1053 break;
1054
1055 case ROUND_DIV_EXPR:
1056 if (arg2 == 0)
1057 return NULL_TREE;
1058 res = wi::div_round (arg1, arg2, sign, &overflow);
1059 break;
1060
1061 case TRUNC_MOD_EXPR:
1062 if (arg2 == 0)
1063 return NULL_TREE;
1064 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1065 break;
1066
1067 case FLOOR_MOD_EXPR:
1068 if (arg2 == 0)
1069 return NULL_TREE;
1070 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1071 break;
1072
1073 case CEIL_MOD_EXPR:
1074 if (arg2 == 0)
1075 return NULL_TREE;
1076 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1077 break;
1078
1079 case ROUND_MOD_EXPR:
1080 if (arg2 == 0)
1081 return NULL_TREE;
1082 res = wi::mod_round (arg1, arg2, sign, &overflow);
1083 break;
1084
1085 case MIN_EXPR:
1086 res = wi::min (arg1, arg2, sign);
1087 break;
1088
1089 case MAX_EXPR:
1090 res = wi::max (arg1, arg2, sign);
1091 break;
1092
1093 default:
1094 return NULL_TREE;
1095 }
1096
1097 t = force_fit_type (type, res, overflowable,
1098 (((sign == SIGNED || overflowable == -1)
1099 && overflow)
1100 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (parg2)));
1101
1102 return t;
1103 }
1104
1105 tree
1106 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1107 {
1108 return int_const_binop_1 (code, arg1, arg2, 1);
1109 }
1110
1111 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1112 constant. We assume ARG1 and ARG2 have the same data type, or at least
1113 are the same kind of constant and the same machine mode. Return zero if
1114 combining the constants is not allowed in the current operating mode. */
1115
1116 static tree
1117 const_binop (enum tree_code code, tree arg1, tree arg2)
1118 {
1119 /* Sanity check for the recursive cases. */
1120 if (!arg1 || !arg2)
1121 return NULL_TREE;
1122
1123 STRIP_NOPS (arg1);
1124 STRIP_NOPS (arg2);
1125
1126 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1127 {
1128 if (code == POINTER_PLUS_EXPR)
1129 return int_const_binop (PLUS_EXPR,
1130 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1131
1132 return int_const_binop (code, arg1, arg2);
1133 }
1134
1135 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1136 {
1137 machine_mode mode;
1138 REAL_VALUE_TYPE d1;
1139 REAL_VALUE_TYPE d2;
1140 REAL_VALUE_TYPE value;
1141 REAL_VALUE_TYPE result;
1142 bool inexact;
1143 tree t, type;
1144
1145 /* The following codes are handled by real_arithmetic. */
1146 switch (code)
1147 {
1148 case PLUS_EXPR:
1149 case MINUS_EXPR:
1150 case MULT_EXPR:
1151 case RDIV_EXPR:
1152 case MIN_EXPR:
1153 case MAX_EXPR:
1154 break;
1155
1156 default:
1157 return NULL_TREE;
1158 }
1159
1160 d1 = TREE_REAL_CST (arg1);
1161 d2 = TREE_REAL_CST (arg2);
1162
1163 type = TREE_TYPE (arg1);
1164 mode = TYPE_MODE (type);
1165
1166 /* Don't perform operation if we honor signaling NaNs and
1167 either operand is a signaling NaN. */
1168 if (HONOR_SNANS (mode)
1169 && (REAL_VALUE_ISSIGNALING_NAN (d1)
1170 || REAL_VALUE_ISSIGNALING_NAN (d2)))
1171 return NULL_TREE;
1172
1173 /* Don't perform operation if it would raise a division
1174 by zero exception. */
1175 if (code == RDIV_EXPR
1176 && real_equal (&d2, &dconst0)
1177 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1178 return NULL_TREE;
1179
1180 /* If either operand is a NaN, just return it. Otherwise, set up
1181 for floating-point trap; we return an overflow. */
1182 if (REAL_VALUE_ISNAN (d1))
1183 {
1184 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1185 is off. */
1186 d1.signalling = 0;
1187 t = build_real (type, d1);
1188 return t;
1189 }
1190 else if (REAL_VALUE_ISNAN (d2))
1191 {
1192 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1193 is off. */
1194 d2.signalling = 0;
1195 t = build_real (type, d2);
1196 return t;
1197 }
1198
1199 inexact = real_arithmetic (&value, code, &d1, &d2);
1200 real_convert (&result, mode, &value);
1201
1202 /* Don't constant fold this floating point operation if
1203 the result has overflowed and flag_trapping_math. */
1204 if (flag_trapping_math
1205 && MODE_HAS_INFINITIES (mode)
1206 && REAL_VALUE_ISINF (result)
1207 && !REAL_VALUE_ISINF (d1)
1208 && !REAL_VALUE_ISINF (d2))
1209 return NULL_TREE;
1210
1211 /* Don't constant fold this floating point operation if the
1212 result may dependent upon the run-time rounding mode and
1213 flag_rounding_math is set, or if GCC's software emulation
1214 is unable to accurately represent the result. */
1215 if ((flag_rounding_math
1216 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1217 && (inexact || !real_identical (&result, &value)))
1218 return NULL_TREE;
1219
1220 t = build_real (type, result);
1221
1222 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1223 return t;
1224 }
1225
1226 if (TREE_CODE (arg1) == FIXED_CST)
1227 {
1228 FIXED_VALUE_TYPE f1;
1229 FIXED_VALUE_TYPE f2;
1230 FIXED_VALUE_TYPE result;
1231 tree t, type;
1232 int sat_p;
1233 bool overflow_p;
1234
1235 /* The following codes are handled by fixed_arithmetic. */
1236 switch (code)
1237 {
1238 case PLUS_EXPR:
1239 case MINUS_EXPR:
1240 case MULT_EXPR:
1241 case TRUNC_DIV_EXPR:
1242 if (TREE_CODE (arg2) != FIXED_CST)
1243 return NULL_TREE;
1244 f2 = TREE_FIXED_CST (arg2);
1245 break;
1246
1247 case LSHIFT_EXPR:
1248 case RSHIFT_EXPR:
1249 {
1250 if (TREE_CODE (arg2) != INTEGER_CST)
1251 return NULL_TREE;
1252 wide_int w2 = arg2;
1253 f2.data.high = w2.elt (1);
1254 f2.data.low = w2.elt (0);
1255 f2.mode = SImode;
1256 }
1257 break;
1258
1259 default:
1260 return NULL_TREE;
1261 }
1262
1263 f1 = TREE_FIXED_CST (arg1);
1264 type = TREE_TYPE (arg1);
1265 sat_p = TYPE_SATURATING (type);
1266 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1267 t = build_fixed (type, result);
1268 /* Propagate overflow flags. */
1269 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1270 TREE_OVERFLOW (t) = 1;
1271 return t;
1272 }
1273
1274 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1275 {
1276 tree type = TREE_TYPE (arg1);
1277 tree r1 = TREE_REALPART (arg1);
1278 tree i1 = TREE_IMAGPART (arg1);
1279 tree r2 = TREE_REALPART (arg2);
1280 tree i2 = TREE_IMAGPART (arg2);
1281 tree real, imag;
1282
1283 switch (code)
1284 {
1285 case PLUS_EXPR:
1286 case MINUS_EXPR:
1287 real = const_binop (code, r1, r2);
1288 imag = const_binop (code, i1, i2);
1289 break;
1290
1291 case MULT_EXPR:
1292 if (COMPLEX_FLOAT_TYPE_P (type))
1293 return do_mpc_arg2 (arg1, arg2, type,
1294 /* do_nonfinite= */ folding_initializer,
1295 mpc_mul);
1296
1297 real = const_binop (MINUS_EXPR,
1298 const_binop (MULT_EXPR, r1, r2),
1299 const_binop (MULT_EXPR, i1, i2));
1300 imag = const_binop (PLUS_EXPR,
1301 const_binop (MULT_EXPR, r1, i2),
1302 const_binop (MULT_EXPR, i1, r2));
1303 break;
1304
1305 case RDIV_EXPR:
1306 if (COMPLEX_FLOAT_TYPE_P (type))
1307 return do_mpc_arg2 (arg1, arg2, type,
1308 /* do_nonfinite= */ folding_initializer,
1309 mpc_div);
1310 /* Fallthru ... */
1311 case TRUNC_DIV_EXPR:
1312 case CEIL_DIV_EXPR:
1313 case FLOOR_DIV_EXPR:
1314 case ROUND_DIV_EXPR:
1315 if (flag_complex_method == 0)
1316 {
1317 /* Keep this algorithm in sync with
1318 tree-complex.c:expand_complex_div_straight().
1319
1320 Expand complex division to scalars, straightforward algorithm.
1321 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1322 t = br*br + bi*bi
1323 */
1324 tree magsquared
1325 = const_binop (PLUS_EXPR,
1326 const_binop (MULT_EXPR, r2, r2),
1327 const_binop (MULT_EXPR, i2, i2));
1328 tree t1
1329 = const_binop (PLUS_EXPR,
1330 const_binop (MULT_EXPR, r1, r2),
1331 const_binop (MULT_EXPR, i1, i2));
1332 tree t2
1333 = const_binop (MINUS_EXPR,
1334 const_binop (MULT_EXPR, i1, r2),
1335 const_binop (MULT_EXPR, r1, i2));
1336
1337 real = const_binop (code, t1, magsquared);
1338 imag = const_binop (code, t2, magsquared);
1339 }
1340 else
1341 {
1342 /* Keep this algorithm in sync with
1343 tree-complex.c:expand_complex_div_wide().
1344
1345 Expand complex division to scalars, modified algorithm to minimize
1346 overflow with wide input ranges. */
1347 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1348 fold_abs_const (r2, TREE_TYPE (type)),
1349 fold_abs_const (i2, TREE_TYPE (type)));
1350
1351 if (integer_nonzerop (compare))
1352 {
1353 /* In the TRUE branch, we compute
1354 ratio = br/bi;
1355 div = (br * ratio) + bi;
1356 tr = (ar * ratio) + ai;
1357 ti = (ai * ratio) - ar;
1358 tr = tr / div;
1359 ti = ti / div; */
1360 tree ratio = const_binop (code, r2, i2);
1361 tree div = const_binop (PLUS_EXPR, i2,
1362 const_binop (MULT_EXPR, r2, ratio));
1363 real = const_binop (MULT_EXPR, r1, ratio);
1364 real = const_binop (PLUS_EXPR, real, i1);
1365 real = const_binop (code, real, div);
1366
1367 imag = const_binop (MULT_EXPR, i1, ratio);
1368 imag = const_binop (MINUS_EXPR, imag, r1);
1369 imag = const_binop (code, imag, div);
1370 }
1371 else
1372 {
1373 /* In the FALSE branch, we compute
1374 ratio = d/c;
1375 divisor = (d * ratio) + c;
1376 tr = (b * ratio) + a;
1377 ti = b - (a * ratio);
1378 tr = tr / div;
1379 ti = ti / div; */
1380 tree ratio = const_binop (code, i2, r2);
1381 tree div = const_binop (PLUS_EXPR, r2,
1382 const_binop (MULT_EXPR, i2, ratio));
1383
1384 real = const_binop (MULT_EXPR, i1, ratio);
1385 real = const_binop (PLUS_EXPR, real, r1);
1386 real = const_binop (code, real, div);
1387
1388 imag = const_binop (MULT_EXPR, r1, ratio);
1389 imag = const_binop (MINUS_EXPR, i1, imag);
1390 imag = const_binop (code, imag, div);
1391 }
1392 }
1393 break;
1394
1395 default:
1396 return NULL_TREE;
1397 }
1398
1399 if (real && imag)
1400 return build_complex (type, real, imag);
1401 }
1402
1403 if (TREE_CODE (arg1) == VECTOR_CST
1404 && TREE_CODE (arg2) == VECTOR_CST)
1405 {
1406 tree type = TREE_TYPE (arg1);
1407 int count = TYPE_VECTOR_SUBPARTS (type), i;
1408 tree *elts = XALLOCAVEC (tree, count);
1409
1410 for (i = 0; i < count; i++)
1411 {
1412 tree elem1 = VECTOR_CST_ELT (arg1, i);
1413 tree elem2 = VECTOR_CST_ELT (arg2, i);
1414
1415 elts[i] = const_binop (code, elem1, elem2);
1416
1417 /* It is possible that const_binop cannot handle the given
1418 code and return NULL_TREE */
1419 if (elts[i] == NULL_TREE)
1420 return NULL_TREE;
1421 }
1422
1423 return build_vector (type, elts);
1424 }
1425
1426 /* Shifts allow a scalar offset for a vector. */
1427 if (TREE_CODE (arg1) == VECTOR_CST
1428 && TREE_CODE (arg2) == INTEGER_CST)
1429 {
1430 tree type = TREE_TYPE (arg1);
1431 int count = TYPE_VECTOR_SUBPARTS (type), i;
1432 tree *elts = XALLOCAVEC (tree, count);
1433
1434 for (i = 0; i < count; i++)
1435 {
1436 tree elem1 = VECTOR_CST_ELT (arg1, i);
1437
1438 elts[i] = const_binop (code, elem1, arg2);
1439
1440 /* It is possible that const_binop cannot handle the given
1441 code and return NULL_TREE. */
1442 if (elts[i] == NULL_TREE)
1443 return NULL_TREE;
1444 }
1445
1446 return build_vector (type, elts);
1447 }
1448 return NULL_TREE;
1449 }
1450
1451 /* Overload that adds a TYPE parameter to be able to dispatch
1452 to fold_relational_const. */
1453
1454 tree
1455 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1456 {
1457 if (TREE_CODE_CLASS (code) == tcc_comparison)
1458 return fold_relational_const (code, type, arg1, arg2);
1459
1460 /* ??? Until we make the const_binop worker take the type of the
1461 result as argument put those cases that need it here. */
1462 switch (code)
1463 {
1464 case COMPLEX_EXPR:
1465 if ((TREE_CODE (arg1) == REAL_CST
1466 && TREE_CODE (arg2) == REAL_CST)
1467 || (TREE_CODE (arg1) == INTEGER_CST
1468 && TREE_CODE (arg2) == INTEGER_CST))
1469 return build_complex (type, arg1, arg2);
1470 return NULL_TREE;
1471
1472 case VEC_PACK_TRUNC_EXPR:
1473 case VEC_PACK_FIX_TRUNC_EXPR:
1474 {
1475 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1476 tree *elts;
1477
1478 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2
1479 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts / 2);
1480 if (TREE_CODE (arg1) != VECTOR_CST
1481 || TREE_CODE (arg2) != VECTOR_CST)
1482 return NULL_TREE;
1483
1484 elts = XALLOCAVEC (tree, nelts);
1485 if (!vec_cst_ctor_to_array (arg1, elts)
1486 || !vec_cst_ctor_to_array (arg2, elts + nelts / 2))
1487 return NULL_TREE;
1488
1489 for (i = 0; i < nelts; i++)
1490 {
1491 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1492 ? NOP_EXPR : FIX_TRUNC_EXPR,
1493 TREE_TYPE (type), elts[i]);
1494 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1495 return NULL_TREE;
1496 }
1497
1498 return build_vector (type, elts);
1499 }
1500
1501 case VEC_WIDEN_MULT_LO_EXPR:
1502 case VEC_WIDEN_MULT_HI_EXPR:
1503 case VEC_WIDEN_MULT_EVEN_EXPR:
1504 case VEC_WIDEN_MULT_ODD_EXPR:
1505 {
1506 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
1507 unsigned int out, ofs, scale;
1508 tree *elts;
1509
1510 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2
1511 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts * 2);
1512 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1513 return NULL_TREE;
1514
1515 elts = XALLOCAVEC (tree, nelts * 4);
1516 if (!vec_cst_ctor_to_array (arg1, elts)
1517 || !vec_cst_ctor_to_array (arg2, elts + nelts * 2))
1518 return NULL_TREE;
1519
1520 if (code == VEC_WIDEN_MULT_LO_EXPR)
1521 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
1522 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1523 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
1524 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1525 scale = 1, ofs = 0;
1526 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1527 scale = 1, ofs = 1;
1528
1529 for (out = 0; out < nelts; out++)
1530 {
1531 unsigned int in1 = (out << scale) + ofs;
1532 unsigned int in2 = in1 + nelts * 2;
1533 tree t1, t2;
1534
1535 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
1536 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
1537
1538 if (t1 == NULL_TREE || t2 == NULL_TREE)
1539 return NULL_TREE;
1540 elts[out] = const_binop (MULT_EXPR, t1, t2);
1541 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
1542 return NULL_TREE;
1543 }
1544
1545 return build_vector (type, elts);
1546 }
1547
1548 default:;
1549 }
1550
1551 if (TREE_CODE_CLASS (code) != tcc_binary)
1552 return NULL_TREE;
1553
1554 /* Make sure type and arg0 have the same saturating flag. */
1555 gcc_checking_assert (TYPE_SATURATING (type)
1556 == TYPE_SATURATING (TREE_TYPE (arg1)));
1557
1558 return const_binop (code, arg1, arg2);
1559 }
1560
1561 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1562 Return zero if computing the constants is not possible. */
1563
1564 tree
1565 const_unop (enum tree_code code, tree type, tree arg0)
1566 {
1567 /* Don't perform the operation, other than NEGATE and ABS, if
1568 flag_signaling_nans is on and the operand is a signaling NaN. */
1569 if (TREE_CODE (arg0) == REAL_CST
1570 && HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
1571 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0))
1572 && code != NEGATE_EXPR
1573 && code != ABS_EXPR)
1574 return NULL_TREE;
1575
1576 switch (code)
1577 {
1578 CASE_CONVERT:
1579 case FLOAT_EXPR:
1580 case FIX_TRUNC_EXPR:
1581 case FIXED_CONVERT_EXPR:
1582 return fold_convert_const (code, type, arg0);
1583
1584 case ADDR_SPACE_CONVERT_EXPR:
1585 /* If the source address is 0, and the source address space
1586 cannot have a valid object at 0, fold to dest type null. */
1587 if (integer_zerop (arg0)
1588 && !(targetm.addr_space.zero_address_valid
1589 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))))))
1590 return fold_convert_const (code, type, arg0);
1591 break;
1592
1593 case VIEW_CONVERT_EXPR:
1594 return fold_view_convert_expr (type, arg0);
1595
1596 case NEGATE_EXPR:
1597 {
1598 /* Can't call fold_negate_const directly here as that doesn't
1599 handle all cases and we might not be able to negate some
1600 constants. */
1601 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1602 if (tem && CONSTANT_CLASS_P (tem))
1603 return tem;
1604 break;
1605 }
1606
1607 case ABS_EXPR:
1608 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1609 return fold_abs_const (arg0, type);
1610 break;
1611
1612 case CONJ_EXPR:
1613 if (TREE_CODE (arg0) == COMPLEX_CST)
1614 {
1615 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1616 TREE_TYPE (type));
1617 return build_complex (type, TREE_REALPART (arg0), ipart);
1618 }
1619 break;
1620
1621 case BIT_NOT_EXPR:
1622 if (TREE_CODE (arg0) == INTEGER_CST)
1623 return fold_not_const (arg0, type);
1624 /* Perform BIT_NOT_EXPR on each element individually. */
1625 else if (TREE_CODE (arg0) == VECTOR_CST)
1626 {
1627 tree *elements;
1628 tree elem;
1629 unsigned count = VECTOR_CST_NELTS (arg0), i;
1630
1631 elements = XALLOCAVEC (tree, count);
1632 for (i = 0; i < count; i++)
1633 {
1634 elem = VECTOR_CST_ELT (arg0, i);
1635 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1636 if (elem == NULL_TREE)
1637 break;
1638 elements[i] = elem;
1639 }
1640 if (i == count)
1641 return build_vector (type, elements);
1642 }
1643 break;
1644
1645 case TRUTH_NOT_EXPR:
1646 if (TREE_CODE (arg0) == INTEGER_CST)
1647 return constant_boolean_node (integer_zerop (arg0), type);
1648 break;
1649
1650 case REALPART_EXPR:
1651 if (TREE_CODE (arg0) == COMPLEX_CST)
1652 return fold_convert (type, TREE_REALPART (arg0));
1653 break;
1654
1655 case IMAGPART_EXPR:
1656 if (TREE_CODE (arg0) == COMPLEX_CST)
1657 return fold_convert (type, TREE_IMAGPART (arg0));
1658 break;
1659
1660 case VEC_UNPACK_LO_EXPR:
1661 case VEC_UNPACK_HI_EXPR:
1662 case VEC_UNPACK_FLOAT_LO_EXPR:
1663 case VEC_UNPACK_FLOAT_HI_EXPR:
1664 {
1665 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1666 tree *elts;
1667 enum tree_code subcode;
1668
1669 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
1670 if (TREE_CODE (arg0) != VECTOR_CST)
1671 return NULL_TREE;
1672
1673 elts = XALLOCAVEC (tree, nelts * 2);
1674 if (!vec_cst_ctor_to_array (arg0, elts))
1675 return NULL_TREE;
1676
1677 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1678 || code == VEC_UNPACK_FLOAT_LO_EXPR))
1679 elts += nelts;
1680
1681 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1682 subcode = NOP_EXPR;
1683 else
1684 subcode = FLOAT_EXPR;
1685
1686 for (i = 0; i < nelts; i++)
1687 {
1688 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
1689 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1690 return NULL_TREE;
1691 }
1692
1693 return build_vector (type, elts);
1694 }
1695
1696 case REDUC_MIN_EXPR:
1697 case REDUC_MAX_EXPR:
1698 case REDUC_PLUS_EXPR:
1699 {
1700 unsigned int nelts, i;
1701 tree *elts;
1702 enum tree_code subcode;
1703
1704 if (TREE_CODE (arg0) != VECTOR_CST)
1705 return NULL_TREE;
1706 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
1707
1708 elts = XALLOCAVEC (tree, nelts);
1709 if (!vec_cst_ctor_to_array (arg0, elts))
1710 return NULL_TREE;
1711
1712 switch (code)
1713 {
1714 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
1715 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
1716 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
1717 default: gcc_unreachable ();
1718 }
1719
1720 for (i = 1; i < nelts; i++)
1721 {
1722 elts[0] = const_binop (subcode, elts[0], elts[i]);
1723 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
1724 return NULL_TREE;
1725 }
1726
1727 return elts[0];
1728 }
1729
1730 default:
1731 break;
1732 }
1733
1734 return NULL_TREE;
1735 }
1736
1737 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1738 indicates which particular sizetype to create. */
1739
1740 tree
1741 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1742 {
1743 return build_int_cst (sizetype_tab[(int) kind], number);
1744 }
1745 \f
1746 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1747 is a tree code. The type of the result is taken from the operands.
1748 Both must be equivalent integer types, ala int_binop_types_match_p.
1749 If the operands are constant, so is the result. */
1750
1751 tree
1752 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1753 {
1754 tree type = TREE_TYPE (arg0);
1755
1756 if (arg0 == error_mark_node || arg1 == error_mark_node)
1757 return error_mark_node;
1758
1759 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1760 TREE_TYPE (arg1)));
1761
1762 /* Handle the special case of two integer constants faster. */
1763 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1764 {
1765 /* And some specific cases even faster than that. */
1766 if (code == PLUS_EXPR)
1767 {
1768 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1769 return arg1;
1770 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1771 return arg0;
1772 }
1773 else if (code == MINUS_EXPR)
1774 {
1775 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1776 return arg0;
1777 }
1778 else if (code == MULT_EXPR)
1779 {
1780 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1781 return arg1;
1782 }
1783
1784 /* Handle general case of two integer constants. For sizetype
1785 constant calculations we always want to know about overflow,
1786 even in the unsigned case. */
1787 return int_const_binop_1 (code, arg0, arg1, -1);
1788 }
1789
1790 return fold_build2_loc (loc, code, type, arg0, arg1);
1791 }
1792
1793 /* Given two values, either both of sizetype or both of bitsizetype,
1794 compute the difference between the two values. Return the value
1795 in signed type corresponding to the type of the operands. */
1796
1797 tree
1798 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1799 {
1800 tree type = TREE_TYPE (arg0);
1801 tree ctype;
1802
1803 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1804 TREE_TYPE (arg1)));
1805
1806 /* If the type is already signed, just do the simple thing. */
1807 if (!TYPE_UNSIGNED (type))
1808 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1809
1810 if (type == sizetype)
1811 ctype = ssizetype;
1812 else if (type == bitsizetype)
1813 ctype = sbitsizetype;
1814 else
1815 ctype = signed_type_for (type);
1816
1817 /* If either operand is not a constant, do the conversions to the signed
1818 type and subtract. The hardware will do the right thing with any
1819 overflow in the subtraction. */
1820 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1821 return size_binop_loc (loc, MINUS_EXPR,
1822 fold_convert_loc (loc, ctype, arg0),
1823 fold_convert_loc (loc, ctype, arg1));
1824
1825 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1826 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1827 overflow) and negate (which can't either). Special-case a result
1828 of zero while we're here. */
1829 if (tree_int_cst_equal (arg0, arg1))
1830 return build_int_cst (ctype, 0);
1831 else if (tree_int_cst_lt (arg1, arg0))
1832 return fold_convert_loc (loc, ctype,
1833 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1834 else
1835 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1836 fold_convert_loc (loc, ctype,
1837 size_binop_loc (loc,
1838 MINUS_EXPR,
1839 arg1, arg0)));
1840 }
1841 \f
1842 /* A subroutine of fold_convert_const handling conversions of an
1843 INTEGER_CST to another integer type. */
1844
1845 static tree
1846 fold_convert_const_int_from_int (tree type, const_tree arg1)
1847 {
1848 /* Given an integer constant, make new constant with new type,
1849 appropriately sign-extended or truncated. Use widest_int
1850 so that any extension is done according ARG1's type. */
1851 return force_fit_type (type, wi::to_widest (arg1),
1852 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1853 TREE_OVERFLOW (arg1));
1854 }
1855
1856 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1857 to an integer type. */
1858
1859 static tree
1860 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1861 {
1862 bool overflow = false;
1863 tree t;
1864
1865 /* The following code implements the floating point to integer
1866 conversion rules required by the Java Language Specification,
1867 that IEEE NaNs are mapped to zero and values that overflow
1868 the target precision saturate, i.e. values greater than
1869 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1870 are mapped to INT_MIN. These semantics are allowed by the
1871 C and C++ standards that simply state that the behavior of
1872 FP-to-integer conversion is unspecified upon overflow. */
1873
1874 wide_int val;
1875 REAL_VALUE_TYPE r;
1876 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1877
1878 switch (code)
1879 {
1880 case FIX_TRUNC_EXPR:
1881 real_trunc (&r, VOIDmode, &x);
1882 break;
1883
1884 default:
1885 gcc_unreachable ();
1886 }
1887
1888 /* If R is NaN, return zero and show we have an overflow. */
1889 if (REAL_VALUE_ISNAN (r))
1890 {
1891 overflow = true;
1892 val = wi::zero (TYPE_PRECISION (type));
1893 }
1894
1895 /* See if R is less than the lower bound or greater than the
1896 upper bound. */
1897
1898 if (! overflow)
1899 {
1900 tree lt = TYPE_MIN_VALUE (type);
1901 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1902 if (real_less (&r, &l))
1903 {
1904 overflow = true;
1905 val = lt;
1906 }
1907 }
1908
1909 if (! overflow)
1910 {
1911 tree ut = TYPE_MAX_VALUE (type);
1912 if (ut)
1913 {
1914 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1915 if (real_less (&u, &r))
1916 {
1917 overflow = true;
1918 val = ut;
1919 }
1920 }
1921 }
1922
1923 if (! overflow)
1924 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1925
1926 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1927 return t;
1928 }
1929
1930 /* A subroutine of fold_convert_const handling conversions of a
1931 FIXED_CST to an integer type. */
1932
1933 static tree
1934 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1935 {
1936 tree t;
1937 double_int temp, temp_trunc;
1938 unsigned int mode;
1939
1940 /* Right shift FIXED_CST to temp by fbit. */
1941 temp = TREE_FIXED_CST (arg1).data;
1942 mode = TREE_FIXED_CST (arg1).mode;
1943 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1944 {
1945 temp = temp.rshift (GET_MODE_FBIT (mode),
1946 HOST_BITS_PER_DOUBLE_INT,
1947 SIGNED_FIXED_POINT_MODE_P (mode));
1948
1949 /* Left shift temp to temp_trunc by fbit. */
1950 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1951 HOST_BITS_PER_DOUBLE_INT,
1952 SIGNED_FIXED_POINT_MODE_P (mode));
1953 }
1954 else
1955 {
1956 temp = double_int_zero;
1957 temp_trunc = double_int_zero;
1958 }
1959
1960 /* If FIXED_CST is negative, we need to round the value toward 0.
1961 By checking if the fractional bits are not zero to add 1 to temp. */
1962 if (SIGNED_FIXED_POINT_MODE_P (mode)
1963 && temp_trunc.is_negative ()
1964 && TREE_FIXED_CST (arg1).data != temp_trunc)
1965 temp += double_int_one;
1966
1967 /* Given a fixed-point constant, make new constant with new type,
1968 appropriately sign-extended or truncated. */
1969 t = force_fit_type (type, temp, -1,
1970 (temp.is_negative ()
1971 && (TYPE_UNSIGNED (type)
1972 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1973 | TREE_OVERFLOW (arg1));
1974
1975 return t;
1976 }
1977
1978 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1979 to another floating point type. */
1980
1981 static tree
1982 fold_convert_const_real_from_real (tree type, const_tree arg1)
1983 {
1984 REAL_VALUE_TYPE value;
1985 tree t;
1986
1987 /* Don't perform the operation if flag_signaling_nans is on
1988 and the operand is a signaling NaN. */
1989 if (HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
1990 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1)))
1991 return NULL_TREE;
1992
1993 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1994 t = build_real (type, value);
1995
1996 /* If converting an infinity or NAN to a representation that doesn't
1997 have one, set the overflow bit so that we can produce some kind of
1998 error message at the appropriate point if necessary. It's not the
1999 most user-friendly message, but it's better than nothing. */
2000 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2001 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2002 TREE_OVERFLOW (t) = 1;
2003 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2004 && !MODE_HAS_NANS (TYPE_MODE (type)))
2005 TREE_OVERFLOW (t) = 1;
2006 /* Regular overflow, conversion produced an infinity in a mode that
2007 can't represent them. */
2008 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2009 && REAL_VALUE_ISINF (value)
2010 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2011 TREE_OVERFLOW (t) = 1;
2012 else
2013 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2014 return t;
2015 }
2016
2017 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2018 to a floating point type. */
2019
2020 static tree
2021 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2022 {
2023 REAL_VALUE_TYPE value;
2024 tree t;
2025
2026 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2027 t = build_real (type, value);
2028
2029 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2030 return t;
2031 }
2032
2033 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2034 to another fixed-point type. */
2035
2036 static tree
2037 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2038 {
2039 FIXED_VALUE_TYPE value;
2040 tree t;
2041 bool overflow_p;
2042
2043 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2044 TYPE_SATURATING (type));
2045 t = build_fixed (type, value);
2046
2047 /* Propagate overflow flags. */
2048 if (overflow_p | TREE_OVERFLOW (arg1))
2049 TREE_OVERFLOW (t) = 1;
2050 return t;
2051 }
2052
2053 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2054 to a fixed-point type. */
2055
2056 static tree
2057 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2058 {
2059 FIXED_VALUE_TYPE value;
2060 tree t;
2061 bool overflow_p;
2062 double_int di;
2063
2064 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2065
2066 di.low = TREE_INT_CST_ELT (arg1, 0);
2067 if (TREE_INT_CST_NUNITS (arg1) == 1)
2068 di.high = (HOST_WIDE_INT) di.low < 0 ? (HOST_WIDE_INT) -1 : 0;
2069 else
2070 di.high = TREE_INT_CST_ELT (arg1, 1);
2071
2072 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di,
2073 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2074 TYPE_SATURATING (type));
2075 t = build_fixed (type, value);
2076
2077 /* Propagate overflow flags. */
2078 if (overflow_p | TREE_OVERFLOW (arg1))
2079 TREE_OVERFLOW (t) = 1;
2080 return t;
2081 }
2082
2083 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2084 to a fixed-point type. */
2085
2086 static tree
2087 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2088 {
2089 FIXED_VALUE_TYPE value;
2090 tree t;
2091 bool overflow_p;
2092
2093 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2094 &TREE_REAL_CST (arg1),
2095 TYPE_SATURATING (type));
2096 t = build_fixed (type, value);
2097
2098 /* Propagate overflow flags. */
2099 if (overflow_p | TREE_OVERFLOW (arg1))
2100 TREE_OVERFLOW (t) = 1;
2101 return t;
2102 }
2103
2104 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2105 type TYPE. If no simplification can be done return NULL_TREE. */
2106
2107 static tree
2108 fold_convert_const (enum tree_code code, tree type, tree arg1)
2109 {
2110 if (TREE_TYPE (arg1) == type)
2111 return arg1;
2112
2113 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2114 || TREE_CODE (type) == OFFSET_TYPE)
2115 {
2116 if (TREE_CODE (arg1) == INTEGER_CST)
2117 return fold_convert_const_int_from_int (type, arg1);
2118 else if (TREE_CODE (arg1) == REAL_CST)
2119 return fold_convert_const_int_from_real (code, type, arg1);
2120 else if (TREE_CODE (arg1) == FIXED_CST)
2121 return fold_convert_const_int_from_fixed (type, arg1);
2122 }
2123 else if (TREE_CODE (type) == REAL_TYPE)
2124 {
2125 if (TREE_CODE (arg1) == INTEGER_CST)
2126 return build_real_from_int_cst (type, arg1);
2127 else if (TREE_CODE (arg1) == REAL_CST)
2128 return fold_convert_const_real_from_real (type, arg1);
2129 else if (TREE_CODE (arg1) == FIXED_CST)
2130 return fold_convert_const_real_from_fixed (type, arg1);
2131 }
2132 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2133 {
2134 if (TREE_CODE (arg1) == FIXED_CST)
2135 return fold_convert_const_fixed_from_fixed (type, arg1);
2136 else if (TREE_CODE (arg1) == INTEGER_CST)
2137 return fold_convert_const_fixed_from_int (type, arg1);
2138 else if (TREE_CODE (arg1) == REAL_CST)
2139 return fold_convert_const_fixed_from_real (type, arg1);
2140 }
2141 else if (TREE_CODE (type) == VECTOR_TYPE)
2142 {
2143 if (TREE_CODE (arg1) == VECTOR_CST
2144 && TYPE_VECTOR_SUBPARTS (type) == VECTOR_CST_NELTS (arg1))
2145 {
2146 int len = TYPE_VECTOR_SUBPARTS (type);
2147 tree elttype = TREE_TYPE (type);
2148 tree *v = XALLOCAVEC (tree, len);
2149 for (int i = 0; i < len; ++i)
2150 {
2151 tree elt = VECTOR_CST_ELT (arg1, i);
2152 tree cvt = fold_convert_const (code, elttype, elt);
2153 if (cvt == NULL_TREE)
2154 return NULL_TREE;
2155 v[i] = cvt;
2156 }
2157 return build_vector (type, v);
2158 }
2159 }
2160 return NULL_TREE;
2161 }
2162
2163 /* Construct a vector of zero elements of vector type TYPE. */
2164
2165 static tree
2166 build_zero_vector (tree type)
2167 {
2168 tree t;
2169
2170 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2171 return build_vector_from_val (type, t);
2172 }
2173
2174 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2175
2176 bool
2177 fold_convertible_p (const_tree type, const_tree arg)
2178 {
2179 tree orig = TREE_TYPE (arg);
2180
2181 if (type == orig)
2182 return true;
2183
2184 if (TREE_CODE (arg) == ERROR_MARK
2185 || TREE_CODE (type) == ERROR_MARK
2186 || TREE_CODE (orig) == ERROR_MARK)
2187 return false;
2188
2189 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2190 return true;
2191
2192 switch (TREE_CODE (type))
2193 {
2194 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2195 case POINTER_TYPE: case REFERENCE_TYPE:
2196 case OFFSET_TYPE:
2197 return (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2198 || TREE_CODE (orig) == OFFSET_TYPE);
2199
2200 case REAL_TYPE:
2201 case FIXED_POINT_TYPE:
2202 case COMPLEX_TYPE:
2203 case VECTOR_TYPE:
2204 case VOID_TYPE:
2205 return TREE_CODE (type) == TREE_CODE (orig);
2206
2207 default:
2208 return false;
2209 }
2210 }
2211
2212 /* Convert expression ARG to type TYPE. Used by the middle-end for
2213 simple conversions in preference to calling the front-end's convert. */
2214
2215 tree
2216 fold_convert_loc (location_t loc, tree type, tree arg)
2217 {
2218 tree orig = TREE_TYPE (arg);
2219 tree tem;
2220
2221 if (type == orig)
2222 return arg;
2223
2224 if (TREE_CODE (arg) == ERROR_MARK
2225 || TREE_CODE (type) == ERROR_MARK
2226 || TREE_CODE (orig) == ERROR_MARK)
2227 return error_mark_node;
2228
2229 switch (TREE_CODE (type))
2230 {
2231 case POINTER_TYPE:
2232 case REFERENCE_TYPE:
2233 /* Handle conversions between pointers to different address spaces. */
2234 if (POINTER_TYPE_P (orig)
2235 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2236 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2237 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2238 /* fall through */
2239
2240 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2241 case OFFSET_TYPE:
2242 if (TREE_CODE (arg) == INTEGER_CST)
2243 {
2244 tem = fold_convert_const (NOP_EXPR, type, arg);
2245 if (tem != NULL_TREE)
2246 return tem;
2247 }
2248 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2249 || TREE_CODE (orig) == OFFSET_TYPE)
2250 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2251 if (TREE_CODE (orig) == COMPLEX_TYPE)
2252 return fold_convert_loc (loc, type,
2253 fold_build1_loc (loc, REALPART_EXPR,
2254 TREE_TYPE (orig), arg));
2255 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2256 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2257 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2258
2259 case REAL_TYPE:
2260 if (TREE_CODE (arg) == INTEGER_CST)
2261 {
2262 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2263 if (tem != NULL_TREE)
2264 return tem;
2265 }
2266 else if (TREE_CODE (arg) == REAL_CST)
2267 {
2268 tem = fold_convert_const (NOP_EXPR, type, arg);
2269 if (tem != NULL_TREE)
2270 return tem;
2271 }
2272 else if (TREE_CODE (arg) == FIXED_CST)
2273 {
2274 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2275 if (tem != NULL_TREE)
2276 return tem;
2277 }
2278
2279 switch (TREE_CODE (orig))
2280 {
2281 case INTEGER_TYPE:
2282 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2283 case POINTER_TYPE: case REFERENCE_TYPE:
2284 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2285
2286 case REAL_TYPE:
2287 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2288
2289 case FIXED_POINT_TYPE:
2290 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2291
2292 case COMPLEX_TYPE:
2293 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2294 return fold_convert_loc (loc, type, tem);
2295
2296 default:
2297 gcc_unreachable ();
2298 }
2299
2300 case FIXED_POINT_TYPE:
2301 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2302 || TREE_CODE (arg) == REAL_CST)
2303 {
2304 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2305 if (tem != NULL_TREE)
2306 goto fold_convert_exit;
2307 }
2308
2309 switch (TREE_CODE (orig))
2310 {
2311 case FIXED_POINT_TYPE:
2312 case INTEGER_TYPE:
2313 case ENUMERAL_TYPE:
2314 case BOOLEAN_TYPE:
2315 case REAL_TYPE:
2316 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2317
2318 case COMPLEX_TYPE:
2319 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2320 return fold_convert_loc (loc, type, tem);
2321
2322 default:
2323 gcc_unreachable ();
2324 }
2325
2326 case COMPLEX_TYPE:
2327 switch (TREE_CODE (orig))
2328 {
2329 case INTEGER_TYPE:
2330 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2331 case POINTER_TYPE: case REFERENCE_TYPE:
2332 case REAL_TYPE:
2333 case FIXED_POINT_TYPE:
2334 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2335 fold_convert_loc (loc, TREE_TYPE (type), arg),
2336 fold_convert_loc (loc, TREE_TYPE (type),
2337 integer_zero_node));
2338 case COMPLEX_TYPE:
2339 {
2340 tree rpart, ipart;
2341
2342 if (TREE_CODE (arg) == COMPLEX_EXPR)
2343 {
2344 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2345 TREE_OPERAND (arg, 0));
2346 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2347 TREE_OPERAND (arg, 1));
2348 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2349 }
2350
2351 arg = save_expr (arg);
2352 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2353 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2354 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2355 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2356 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2357 }
2358
2359 default:
2360 gcc_unreachable ();
2361 }
2362
2363 case VECTOR_TYPE:
2364 if (integer_zerop (arg))
2365 return build_zero_vector (type);
2366 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2367 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2368 || TREE_CODE (orig) == VECTOR_TYPE);
2369 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2370
2371 case VOID_TYPE:
2372 tem = fold_ignored_result (arg);
2373 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2374
2375 default:
2376 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2377 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2378 gcc_unreachable ();
2379 }
2380 fold_convert_exit:
2381 protected_set_expr_location_unshare (tem, loc);
2382 return tem;
2383 }
2384 \f
2385 /* Return false if expr can be assumed not to be an lvalue, true
2386 otherwise. */
2387
2388 static bool
2389 maybe_lvalue_p (const_tree x)
2390 {
2391 /* We only need to wrap lvalue tree codes. */
2392 switch (TREE_CODE (x))
2393 {
2394 case VAR_DECL:
2395 case PARM_DECL:
2396 case RESULT_DECL:
2397 case LABEL_DECL:
2398 case FUNCTION_DECL:
2399 case SSA_NAME:
2400
2401 case COMPONENT_REF:
2402 case MEM_REF:
2403 case INDIRECT_REF:
2404 case ARRAY_REF:
2405 case ARRAY_RANGE_REF:
2406 case BIT_FIELD_REF:
2407 case OBJ_TYPE_REF:
2408
2409 case REALPART_EXPR:
2410 case IMAGPART_EXPR:
2411 case PREINCREMENT_EXPR:
2412 case PREDECREMENT_EXPR:
2413 case SAVE_EXPR:
2414 case TRY_CATCH_EXPR:
2415 case WITH_CLEANUP_EXPR:
2416 case COMPOUND_EXPR:
2417 case MODIFY_EXPR:
2418 case TARGET_EXPR:
2419 case COND_EXPR:
2420 case BIND_EXPR:
2421 break;
2422
2423 default:
2424 /* Assume the worst for front-end tree codes. */
2425 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2426 break;
2427 return false;
2428 }
2429
2430 return true;
2431 }
2432
2433 /* Return an expr equal to X but certainly not valid as an lvalue. */
2434
2435 tree
2436 non_lvalue_loc (location_t loc, tree x)
2437 {
2438 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2439 us. */
2440 if (in_gimple_form)
2441 return x;
2442
2443 if (! maybe_lvalue_p (x))
2444 return x;
2445 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2446 }
2447
2448 /* When pedantic, return an expr equal to X but certainly not valid as a
2449 pedantic lvalue. Otherwise, return X. */
2450
2451 static tree
2452 pedantic_non_lvalue_loc (location_t loc, tree x)
2453 {
2454 return protected_set_expr_location_unshare (x, loc);
2455 }
2456 \f
2457 /* Given a tree comparison code, return the code that is the logical inverse.
2458 It is generally not safe to do this for floating-point comparisons, except
2459 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2460 ERROR_MARK in this case. */
2461
2462 enum tree_code
2463 invert_tree_comparison (enum tree_code code, bool honor_nans)
2464 {
2465 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2466 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2467 return ERROR_MARK;
2468
2469 switch (code)
2470 {
2471 case EQ_EXPR:
2472 return NE_EXPR;
2473 case NE_EXPR:
2474 return EQ_EXPR;
2475 case GT_EXPR:
2476 return honor_nans ? UNLE_EXPR : LE_EXPR;
2477 case GE_EXPR:
2478 return honor_nans ? UNLT_EXPR : LT_EXPR;
2479 case LT_EXPR:
2480 return honor_nans ? UNGE_EXPR : GE_EXPR;
2481 case LE_EXPR:
2482 return honor_nans ? UNGT_EXPR : GT_EXPR;
2483 case LTGT_EXPR:
2484 return UNEQ_EXPR;
2485 case UNEQ_EXPR:
2486 return LTGT_EXPR;
2487 case UNGT_EXPR:
2488 return LE_EXPR;
2489 case UNGE_EXPR:
2490 return LT_EXPR;
2491 case UNLT_EXPR:
2492 return GE_EXPR;
2493 case UNLE_EXPR:
2494 return GT_EXPR;
2495 case ORDERED_EXPR:
2496 return UNORDERED_EXPR;
2497 case UNORDERED_EXPR:
2498 return ORDERED_EXPR;
2499 default:
2500 gcc_unreachable ();
2501 }
2502 }
2503
2504 /* Similar, but return the comparison that results if the operands are
2505 swapped. This is safe for floating-point. */
2506
2507 enum tree_code
2508 swap_tree_comparison (enum tree_code code)
2509 {
2510 switch (code)
2511 {
2512 case EQ_EXPR:
2513 case NE_EXPR:
2514 case ORDERED_EXPR:
2515 case UNORDERED_EXPR:
2516 case LTGT_EXPR:
2517 case UNEQ_EXPR:
2518 return code;
2519 case GT_EXPR:
2520 return LT_EXPR;
2521 case GE_EXPR:
2522 return LE_EXPR;
2523 case LT_EXPR:
2524 return GT_EXPR;
2525 case LE_EXPR:
2526 return GE_EXPR;
2527 case UNGT_EXPR:
2528 return UNLT_EXPR;
2529 case UNGE_EXPR:
2530 return UNLE_EXPR;
2531 case UNLT_EXPR:
2532 return UNGT_EXPR;
2533 case UNLE_EXPR:
2534 return UNGE_EXPR;
2535 default:
2536 gcc_unreachable ();
2537 }
2538 }
2539
2540
2541 /* Convert a comparison tree code from an enum tree_code representation
2542 into a compcode bit-based encoding. This function is the inverse of
2543 compcode_to_comparison. */
2544
2545 static enum comparison_code
2546 comparison_to_compcode (enum tree_code code)
2547 {
2548 switch (code)
2549 {
2550 case LT_EXPR:
2551 return COMPCODE_LT;
2552 case EQ_EXPR:
2553 return COMPCODE_EQ;
2554 case LE_EXPR:
2555 return COMPCODE_LE;
2556 case GT_EXPR:
2557 return COMPCODE_GT;
2558 case NE_EXPR:
2559 return COMPCODE_NE;
2560 case GE_EXPR:
2561 return COMPCODE_GE;
2562 case ORDERED_EXPR:
2563 return COMPCODE_ORD;
2564 case UNORDERED_EXPR:
2565 return COMPCODE_UNORD;
2566 case UNLT_EXPR:
2567 return COMPCODE_UNLT;
2568 case UNEQ_EXPR:
2569 return COMPCODE_UNEQ;
2570 case UNLE_EXPR:
2571 return COMPCODE_UNLE;
2572 case UNGT_EXPR:
2573 return COMPCODE_UNGT;
2574 case LTGT_EXPR:
2575 return COMPCODE_LTGT;
2576 case UNGE_EXPR:
2577 return COMPCODE_UNGE;
2578 default:
2579 gcc_unreachable ();
2580 }
2581 }
2582
2583 /* Convert a compcode bit-based encoding of a comparison operator back
2584 to GCC's enum tree_code representation. This function is the
2585 inverse of comparison_to_compcode. */
2586
2587 static enum tree_code
2588 compcode_to_comparison (enum comparison_code code)
2589 {
2590 switch (code)
2591 {
2592 case COMPCODE_LT:
2593 return LT_EXPR;
2594 case COMPCODE_EQ:
2595 return EQ_EXPR;
2596 case COMPCODE_LE:
2597 return LE_EXPR;
2598 case COMPCODE_GT:
2599 return GT_EXPR;
2600 case COMPCODE_NE:
2601 return NE_EXPR;
2602 case COMPCODE_GE:
2603 return GE_EXPR;
2604 case COMPCODE_ORD:
2605 return ORDERED_EXPR;
2606 case COMPCODE_UNORD:
2607 return UNORDERED_EXPR;
2608 case COMPCODE_UNLT:
2609 return UNLT_EXPR;
2610 case COMPCODE_UNEQ:
2611 return UNEQ_EXPR;
2612 case COMPCODE_UNLE:
2613 return UNLE_EXPR;
2614 case COMPCODE_UNGT:
2615 return UNGT_EXPR;
2616 case COMPCODE_LTGT:
2617 return LTGT_EXPR;
2618 case COMPCODE_UNGE:
2619 return UNGE_EXPR;
2620 default:
2621 gcc_unreachable ();
2622 }
2623 }
2624
2625 /* Return a tree for the comparison which is the combination of
2626 doing the AND or OR (depending on CODE) of the two operations LCODE
2627 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2628 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2629 if this makes the transformation invalid. */
2630
2631 tree
2632 combine_comparisons (location_t loc,
2633 enum tree_code code, enum tree_code lcode,
2634 enum tree_code rcode, tree truth_type,
2635 tree ll_arg, tree lr_arg)
2636 {
2637 bool honor_nans = HONOR_NANS (ll_arg);
2638 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2639 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2640 int compcode;
2641
2642 switch (code)
2643 {
2644 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2645 compcode = lcompcode & rcompcode;
2646 break;
2647
2648 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2649 compcode = lcompcode | rcompcode;
2650 break;
2651
2652 default:
2653 return NULL_TREE;
2654 }
2655
2656 if (!honor_nans)
2657 {
2658 /* Eliminate unordered comparisons, as well as LTGT and ORD
2659 which are not used unless the mode has NaNs. */
2660 compcode &= ~COMPCODE_UNORD;
2661 if (compcode == COMPCODE_LTGT)
2662 compcode = COMPCODE_NE;
2663 else if (compcode == COMPCODE_ORD)
2664 compcode = COMPCODE_TRUE;
2665 }
2666 else if (flag_trapping_math)
2667 {
2668 /* Check that the original operation and the optimized ones will trap
2669 under the same condition. */
2670 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2671 && (lcompcode != COMPCODE_EQ)
2672 && (lcompcode != COMPCODE_ORD);
2673 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2674 && (rcompcode != COMPCODE_EQ)
2675 && (rcompcode != COMPCODE_ORD);
2676 bool trap = (compcode & COMPCODE_UNORD) == 0
2677 && (compcode != COMPCODE_EQ)
2678 && (compcode != COMPCODE_ORD);
2679
2680 /* In a short-circuited boolean expression the LHS might be
2681 such that the RHS, if evaluated, will never trap. For
2682 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2683 if neither x nor y is NaN. (This is a mixed blessing: for
2684 example, the expression above will never trap, hence
2685 optimizing it to x < y would be invalid). */
2686 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2687 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2688 rtrap = false;
2689
2690 /* If the comparison was short-circuited, and only the RHS
2691 trapped, we may now generate a spurious trap. */
2692 if (rtrap && !ltrap
2693 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2694 return NULL_TREE;
2695
2696 /* If we changed the conditions that cause a trap, we lose. */
2697 if ((ltrap || rtrap) != trap)
2698 return NULL_TREE;
2699 }
2700
2701 if (compcode == COMPCODE_TRUE)
2702 return constant_boolean_node (true, truth_type);
2703 else if (compcode == COMPCODE_FALSE)
2704 return constant_boolean_node (false, truth_type);
2705 else
2706 {
2707 enum tree_code tcode;
2708
2709 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2710 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2711 }
2712 }
2713 \f
2714 /* Return nonzero if two operands (typically of the same tree node)
2715 are necessarily equal. FLAGS modifies behavior as follows:
2716
2717 If OEP_ONLY_CONST is set, only return nonzero for constants.
2718 This function tests whether the operands are indistinguishable;
2719 it does not test whether they are equal using C's == operation.
2720 The distinction is important for IEEE floating point, because
2721 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2722 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2723
2724 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2725 even though it may hold multiple values during a function.
2726 This is because a GCC tree node guarantees that nothing else is
2727 executed between the evaluation of its "operands" (which may often
2728 be evaluated in arbitrary order). Hence if the operands themselves
2729 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2730 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2731 unset means assuming isochronic (or instantaneous) tree equivalence.
2732 Unless comparing arbitrary expression trees, such as from different
2733 statements, this flag can usually be left unset.
2734
2735 If OEP_PURE_SAME is set, then pure functions with identical arguments
2736 are considered the same. It is used when the caller has other ways
2737 to ensure that global memory is unchanged in between.
2738
2739 If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
2740 not values of expressions.
2741
2742 Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
2743 any operand with side effect. This is unnecesarily conservative in the
2744 case we know that arg0 and arg1 are in disjoint code paths (such as in
2745 ?: operator). In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
2746 addresses with TREE_CONSTANT flag set so we know that &var == &var
2747 even if var is volatile. */
2748
2749 int
2750 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2751 {
2752 /* If either is ERROR_MARK, they aren't equal. */
2753 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2754 || TREE_TYPE (arg0) == error_mark_node
2755 || TREE_TYPE (arg1) == error_mark_node)
2756 return 0;
2757
2758 /* Similar, if either does not have a type (like a released SSA name),
2759 they aren't equal. */
2760 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2761 return 0;
2762
2763 /* We cannot consider pointers to different address space equal. */
2764 if (POINTER_TYPE_P (TREE_TYPE (arg0))
2765 && POINTER_TYPE_P (TREE_TYPE (arg1))
2766 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2767 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2768 return 0;
2769
2770 /* Check equality of integer constants before bailing out due to
2771 precision differences. */
2772 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2773 {
2774 /* Address of INTEGER_CST is not defined; check that we did not forget
2775 to drop the OEP_ADDRESS_OF flags. */
2776 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2777 return tree_int_cst_equal (arg0, arg1);
2778 }
2779
2780 if (!(flags & OEP_ADDRESS_OF))
2781 {
2782 /* If both types don't have the same signedness, then we can't consider
2783 them equal. We must check this before the STRIP_NOPS calls
2784 because they may change the signedness of the arguments. As pointers
2785 strictly don't have a signedness, require either two pointers or
2786 two non-pointers as well. */
2787 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2788 || POINTER_TYPE_P (TREE_TYPE (arg0))
2789 != POINTER_TYPE_P (TREE_TYPE (arg1)))
2790 return 0;
2791
2792 /* If both types don't have the same precision, then it is not safe
2793 to strip NOPs. */
2794 if (element_precision (TREE_TYPE (arg0))
2795 != element_precision (TREE_TYPE (arg1)))
2796 return 0;
2797
2798 STRIP_NOPS (arg0);
2799 STRIP_NOPS (arg1);
2800 }
2801 #if 0
2802 /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
2803 sanity check once the issue is solved. */
2804 else
2805 /* Addresses of conversions and SSA_NAMEs (and many other things)
2806 are not defined. Check that we did not forget to drop the
2807 OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
2808 gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
2809 && TREE_CODE (arg0) != SSA_NAME);
2810 #endif
2811
2812 /* In case both args are comparisons but with different comparison
2813 code, try to swap the comparison operands of one arg to produce
2814 a match and compare that variant. */
2815 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2816 && COMPARISON_CLASS_P (arg0)
2817 && COMPARISON_CLASS_P (arg1))
2818 {
2819 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2820
2821 if (TREE_CODE (arg0) == swap_code)
2822 return operand_equal_p (TREE_OPERAND (arg0, 0),
2823 TREE_OPERAND (arg1, 1), flags)
2824 && operand_equal_p (TREE_OPERAND (arg0, 1),
2825 TREE_OPERAND (arg1, 0), flags);
2826 }
2827
2828 if (TREE_CODE (arg0) != TREE_CODE (arg1))
2829 {
2830 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2831 if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
2832 ;
2833 else if (flags & OEP_ADDRESS_OF)
2834 {
2835 /* If we are interested in comparing addresses ignore
2836 MEM_REF wrappings of the base that can appear just for
2837 TBAA reasons. */
2838 if (TREE_CODE (arg0) == MEM_REF
2839 && DECL_P (arg1)
2840 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
2841 && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
2842 && integer_zerop (TREE_OPERAND (arg0, 1)))
2843 return 1;
2844 else if (TREE_CODE (arg1) == MEM_REF
2845 && DECL_P (arg0)
2846 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
2847 && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
2848 && integer_zerop (TREE_OPERAND (arg1, 1)))
2849 return 1;
2850 return 0;
2851 }
2852 else
2853 return 0;
2854 }
2855
2856 /* When not checking adddresses, this is needed for conversions and for
2857 COMPONENT_REF. Might as well play it safe and always test this. */
2858 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2859 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2860 || (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))
2861 && !(flags & OEP_ADDRESS_OF)))
2862 return 0;
2863
2864 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2865 We don't care about side effects in that case because the SAVE_EXPR
2866 takes care of that for us. In all other cases, two expressions are
2867 equal if they have no side effects. If we have two identical
2868 expressions with side effects that should be treated the same due
2869 to the only side effects being identical SAVE_EXPR's, that will
2870 be detected in the recursive calls below.
2871 If we are taking an invariant address of two identical objects
2872 they are necessarily equal as well. */
2873 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2874 && (TREE_CODE (arg0) == SAVE_EXPR
2875 || (flags & OEP_MATCH_SIDE_EFFECTS)
2876 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2877 return 1;
2878
2879 /* Next handle constant cases, those for which we can return 1 even
2880 if ONLY_CONST is set. */
2881 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2882 switch (TREE_CODE (arg0))
2883 {
2884 case INTEGER_CST:
2885 return tree_int_cst_equal (arg0, arg1);
2886
2887 case FIXED_CST:
2888 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2889 TREE_FIXED_CST (arg1));
2890
2891 case REAL_CST:
2892 if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
2893 return 1;
2894
2895
2896 if (!HONOR_SIGNED_ZEROS (arg0))
2897 {
2898 /* If we do not distinguish between signed and unsigned zero,
2899 consider them equal. */
2900 if (real_zerop (arg0) && real_zerop (arg1))
2901 return 1;
2902 }
2903 return 0;
2904
2905 case VECTOR_CST:
2906 {
2907 unsigned i;
2908
2909 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2910 return 0;
2911
2912 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2913 {
2914 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2915 VECTOR_CST_ELT (arg1, i), flags))
2916 return 0;
2917 }
2918 return 1;
2919 }
2920
2921 case COMPLEX_CST:
2922 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2923 flags)
2924 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2925 flags));
2926
2927 case STRING_CST:
2928 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2929 && ! memcmp (TREE_STRING_POINTER (arg0),
2930 TREE_STRING_POINTER (arg1),
2931 TREE_STRING_LENGTH (arg0)));
2932
2933 case ADDR_EXPR:
2934 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2935 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2936 flags | OEP_ADDRESS_OF
2937 | OEP_MATCH_SIDE_EFFECTS);
2938 case CONSTRUCTOR:
2939 /* In GIMPLE empty constructors are allowed in initializers of
2940 aggregates. */
2941 return (!vec_safe_length (CONSTRUCTOR_ELTS (arg0))
2942 && !vec_safe_length (CONSTRUCTOR_ELTS (arg1)));
2943 default:
2944 break;
2945 }
2946
2947 if (flags & OEP_ONLY_CONST)
2948 return 0;
2949
2950 /* Define macros to test an operand from arg0 and arg1 for equality and a
2951 variant that allows null and views null as being different from any
2952 non-null value. In the latter case, if either is null, the both
2953 must be; otherwise, do the normal comparison. */
2954 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2955 TREE_OPERAND (arg1, N), flags)
2956
2957 #define OP_SAME_WITH_NULL(N) \
2958 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2959 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2960
2961 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2962 {
2963 case tcc_unary:
2964 /* Two conversions are equal only if signedness and modes match. */
2965 switch (TREE_CODE (arg0))
2966 {
2967 CASE_CONVERT:
2968 case FIX_TRUNC_EXPR:
2969 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2970 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2971 return 0;
2972 break;
2973 default:
2974 break;
2975 }
2976
2977 return OP_SAME (0);
2978
2979
2980 case tcc_comparison:
2981 case tcc_binary:
2982 if (OP_SAME (0) && OP_SAME (1))
2983 return 1;
2984
2985 /* For commutative ops, allow the other order. */
2986 return (commutative_tree_code (TREE_CODE (arg0))
2987 && operand_equal_p (TREE_OPERAND (arg0, 0),
2988 TREE_OPERAND (arg1, 1), flags)
2989 && operand_equal_p (TREE_OPERAND (arg0, 1),
2990 TREE_OPERAND (arg1, 0), flags));
2991
2992 case tcc_reference:
2993 /* If either of the pointer (or reference) expressions we are
2994 dereferencing contain a side effect, these cannot be equal,
2995 but their addresses can be. */
2996 if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0
2997 && (TREE_SIDE_EFFECTS (arg0)
2998 || TREE_SIDE_EFFECTS (arg1)))
2999 return 0;
3000
3001 switch (TREE_CODE (arg0))
3002 {
3003 case INDIRECT_REF:
3004 if (!(flags & OEP_ADDRESS_OF)
3005 && (TYPE_ALIGN (TREE_TYPE (arg0))
3006 != TYPE_ALIGN (TREE_TYPE (arg1))))
3007 return 0;
3008 flags &= ~OEP_ADDRESS_OF;
3009 return OP_SAME (0);
3010
3011 case IMAGPART_EXPR:
3012 /* Require the same offset. */
3013 if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3014 TYPE_SIZE (TREE_TYPE (arg1)),
3015 flags & ~OEP_ADDRESS_OF))
3016 return 0;
3017
3018 /* Fallthru. */
3019 case REALPART_EXPR:
3020 case VIEW_CONVERT_EXPR:
3021 return OP_SAME (0);
3022
3023 case TARGET_MEM_REF:
3024 case MEM_REF:
3025 if (!(flags & OEP_ADDRESS_OF))
3026 {
3027 /* Require equal access sizes */
3028 if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1))
3029 && (!TYPE_SIZE (TREE_TYPE (arg0))
3030 || !TYPE_SIZE (TREE_TYPE (arg1))
3031 || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3032 TYPE_SIZE (TREE_TYPE (arg1)),
3033 flags)))
3034 return 0;
3035 /* Verify that access happens in similar types. */
3036 if (!types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3037 return 0;
3038 /* Verify that accesses are TBAA compatible. */
3039 if (!alias_ptr_types_compatible_p
3040 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
3041 TREE_TYPE (TREE_OPERAND (arg1, 1)))
3042 || (MR_DEPENDENCE_CLIQUE (arg0)
3043 != MR_DEPENDENCE_CLIQUE (arg1))
3044 || (MR_DEPENDENCE_BASE (arg0)
3045 != MR_DEPENDENCE_BASE (arg1)))
3046 return 0;
3047 /* Verify that alignment is compatible. */
3048 if (TYPE_ALIGN (TREE_TYPE (arg0))
3049 != TYPE_ALIGN (TREE_TYPE (arg1)))
3050 return 0;
3051 }
3052 flags &= ~OEP_ADDRESS_OF;
3053 return (OP_SAME (0) && OP_SAME (1)
3054 /* TARGET_MEM_REF require equal extra operands. */
3055 && (TREE_CODE (arg0) != TARGET_MEM_REF
3056 || (OP_SAME_WITH_NULL (2)
3057 && OP_SAME_WITH_NULL (3)
3058 && OP_SAME_WITH_NULL (4))));
3059
3060 case ARRAY_REF:
3061 case ARRAY_RANGE_REF:
3062 if (!OP_SAME (0))
3063 return 0;
3064 flags &= ~OEP_ADDRESS_OF;
3065 /* Compare the array index by value if it is constant first as we
3066 may have different types but same value here. */
3067 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3068 TREE_OPERAND (arg1, 1))
3069 || OP_SAME (1))
3070 && OP_SAME_WITH_NULL (2)
3071 && OP_SAME_WITH_NULL (3)
3072 /* Compare low bound and element size as with OEP_ADDRESS_OF
3073 we have to account for the offset of the ref. */
3074 && (TREE_TYPE (TREE_OPERAND (arg0, 0))
3075 == TREE_TYPE (TREE_OPERAND (arg1, 0))
3076 || (operand_equal_p (array_ref_low_bound
3077 (CONST_CAST_TREE (arg0)),
3078 array_ref_low_bound
3079 (CONST_CAST_TREE (arg1)), flags)
3080 && operand_equal_p (array_ref_element_size
3081 (CONST_CAST_TREE (arg0)),
3082 array_ref_element_size
3083 (CONST_CAST_TREE (arg1)),
3084 flags))));
3085
3086 case COMPONENT_REF:
3087 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3088 may be NULL when we're called to compare MEM_EXPRs. */
3089 if (!OP_SAME_WITH_NULL (0)
3090 || !OP_SAME (1))
3091 return 0;
3092 flags &= ~OEP_ADDRESS_OF;
3093 return OP_SAME_WITH_NULL (2);
3094
3095 case BIT_FIELD_REF:
3096 if (!OP_SAME (0))
3097 return 0;
3098 flags &= ~OEP_ADDRESS_OF;
3099 return OP_SAME (1) && OP_SAME (2);
3100
3101 default:
3102 return 0;
3103 }
3104
3105 case tcc_expression:
3106 switch (TREE_CODE (arg0))
3107 {
3108 case ADDR_EXPR:
3109 /* Be sure we pass right ADDRESS_OF flag. */
3110 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3111 return operand_equal_p (TREE_OPERAND (arg0, 0),
3112 TREE_OPERAND (arg1, 0),
3113 flags | OEP_ADDRESS_OF);
3114
3115 case TRUTH_NOT_EXPR:
3116 return OP_SAME (0);
3117
3118 case TRUTH_ANDIF_EXPR:
3119 case TRUTH_ORIF_EXPR:
3120 return OP_SAME (0) && OP_SAME (1);
3121
3122 case FMA_EXPR:
3123 case WIDEN_MULT_PLUS_EXPR:
3124 case WIDEN_MULT_MINUS_EXPR:
3125 if (!OP_SAME (2))
3126 return 0;
3127 /* The multiplcation operands are commutative. */
3128 /* FALLTHRU */
3129
3130 case TRUTH_AND_EXPR:
3131 case TRUTH_OR_EXPR:
3132 case TRUTH_XOR_EXPR:
3133 if (OP_SAME (0) && OP_SAME (1))
3134 return 1;
3135
3136 /* Otherwise take into account this is a commutative operation. */
3137 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3138 TREE_OPERAND (arg1, 1), flags)
3139 && operand_equal_p (TREE_OPERAND (arg0, 1),
3140 TREE_OPERAND (arg1, 0), flags));
3141
3142 case COND_EXPR:
3143 if (! OP_SAME (1) || ! OP_SAME (2))
3144 return 0;
3145 flags &= ~OEP_ADDRESS_OF;
3146 return OP_SAME (0);
3147
3148 case VEC_COND_EXPR:
3149 case DOT_PROD_EXPR:
3150 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3151
3152 default:
3153 return 0;
3154 }
3155
3156 case tcc_vl_exp:
3157 switch (TREE_CODE (arg0))
3158 {
3159 case CALL_EXPR:
3160 if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3161 != (CALL_EXPR_FN (arg1) == NULL_TREE))
3162 /* If not both CALL_EXPRs are either internal or normal function
3163 functions, then they are not equal. */
3164 return 0;
3165 else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3166 {
3167 /* If the CALL_EXPRs call different internal functions, then they
3168 are not equal. */
3169 if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3170 return 0;
3171 }
3172 else
3173 {
3174 /* If the CALL_EXPRs call different functions, then they are not
3175 equal. */
3176 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3177 flags))
3178 return 0;
3179 }
3180
3181 /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */
3182 {
3183 unsigned int cef = call_expr_flags (arg0);
3184 if (flags & OEP_PURE_SAME)
3185 cef &= ECF_CONST | ECF_PURE;
3186 else
3187 cef &= ECF_CONST;
3188 if (!cef)
3189 return 0;
3190 }
3191
3192 /* Now see if all the arguments are the same. */
3193 {
3194 const_call_expr_arg_iterator iter0, iter1;
3195 const_tree a0, a1;
3196 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3197 a1 = first_const_call_expr_arg (arg1, &iter1);
3198 a0 && a1;
3199 a0 = next_const_call_expr_arg (&iter0),
3200 a1 = next_const_call_expr_arg (&iter1))
3201 if (! operand_equal_p (a0, a1, flags))
3202 return 0;
3203
3204 /* If we get here and both argument lists are exhausted
3205 then the CALL_EXPRs are equal. */
3206 return ! (a0 || a1);
3207 }
3208 default:
3209 return 0;
3210 }
3211
3212 case tcc_declaration:
3213 /* Consider __builtin_sqrt equal to sqrt. */
3214 return (TREE_CODE (arg0) == FUNCTION_DECL
3215 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3216 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3217 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3218
3219 case tcc_exceptional:
3220 if (TREE_CODE (arg0) == CONSTRUCTOR)
3221 {
3222 /* In GIMPLE constructors are used only to build vectors from
3223 elements. Individual elements in the constructor must be
3224 indexed in increasing order and form an initial sequence.
3225
3226 We make no effort to compare constructors in generic.
3227 (see sem_variable::equals in ipa-icf which can do so for
3228 constants). */
3229 if (!VECTOR_TYPE_P (TREE_TYPE (arg0))
3230 || !VECTOR_TYPE_P (TREE_TYPE (arg1)))
3231 return 0;
3232
3233 /* Be sure that vectors constructed have the same representation.
3234 We only tested element precision and modes to match.
3235 Vectors may be BLKmode and thus also check that the number of
3236 parts match. */
3237 if (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))
3238 != TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)))
3239 return 0;
3240
3241 vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3242 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3243 unsigned int len = vec_safe_length (v0);
3244
3245 if (len != vec_safe_length (v1))
3246 return 0;
3247
3248 for (unsigned int i = 0; i < len; i++)
3249 {
3250 constructor_elt *c0 = &(*v0)[i];
3251 constructor_elt *c1 = &(*v1)[i];
3252
3253 if (!operand_equal_p (c0->value, c1->value, flags)
3254 /* In GIMPLE the indexes can be either NULL or matching i.
3255 Double check this so we won't get false
3256 positives for GENERIC. */
3257 || (c0->index
3258 && (TREE_CODE (c0->index) != INTEGER_CST
3259 || !compare_tree_int (c0->index, i)))
3260 || (c1->index
3261 && (TREE_CODE (c1->index) != INTEGER_CST
3262 || !compare_tree_int (c1->index, i))))
3263 return 0;
3264 }
3265 return 1;
3266 }
3267 return 0;
3268
3269 default:
3270 return 0;
3271 }
3272
3273 #undef OP_SAME
3274 #undef OP_SAME_WITH_NULL
3275 }
3276 \f
3277 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3278 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3279
3280 When in doubt, return 0. */
3281
3282 static int
3283 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3284 {
3285 int unsignedp1, unsignedpo;
3286 tree primarg0, primarg1, primother;
3287 unsigned int correct_width;
3288
3289 if (operand_equal_p (arg0, arg1, 0))
3290 return 1;
3291
3292 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3293 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3294 return 0;
3295
3296 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3297 and see if the inner values are the same. This removes any
3298 signedness comparison, which doesn't matter here. */
3299 primarg0 = arg0, primarg1 = arg1;
3300 STRIP_NOPS (primarg0);
3301 STRIP_NOPS (primarg1);
3302 if (operand_equal_p (primarg0, primarg1, 0))
3303 return 1;
3304
3305 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3306 actual comparison operand, ARG0.
3307
3308 First throw away any conversions to wider types
3309 already present in the operands. */
3310
3311 primarg1 = get_narrower (arg1, &unsignedp1);
3312 primother = get_narrower (other, &unsignedpo);
3313
3314 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3315 if (unsignedp1 == unsignedpo
3316 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3317 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3318 {
3319 tree type = TREE_TYPE (arg0);
3320
3321 /* Make sure shorter operand is extended the right way
3322 to match the longer operand. */
3323 primarg1 = fold_convert (signed_or_unsigned_type_for
3324 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3325
3326 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3327 return 1;
3328 }
3329
3330 return 0;
3331 }
3332 \f
3333 /* See if ARG is an expression that is either a comparison or is performing
3334 arithmetic on comparisons. The comparisons must only be comparing
3335 two different values, which will be stored in *CVAL1 and *CVAL2; if
3336 they are nonzero it means that some operands have already been found.
3337 No variables may be used anywhere else in the expression except in the
3338 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3339 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3340
3341 If this is true, return 1. Otherwise, return zero. */
3342
3343 static int
3344 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3345 {
3346 enum tree_code code = TREE_CODE (arg);
3347 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3348
3349 /* We can handle some of the tcc_expression cases here. */
3350 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3351 tclass = tcc_unary;
3352 else if (tclass == tcc_expression
3353 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3354 || code == COMPOUND_EXPR))
3355 tclass = tcc_binary;
3356
3357 else if (tclass == tcc_expression && code == SAVE_EXPR
3358 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3359 {
3360 /* If we've already found a CVAL1 or CVAL2, this expression is
3361 two complex to handle. */
3362 if (*cval1 || *cval2)
3363 return 0;
3364
3365 tclass = tcc_unary;
3366 *save_p = 1;
3367 }
3368
3369 switch (tclass)
3370 {
3371 case tcc_unary:
3372 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3373
3374 case tcc_binary:
3375 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3376 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3377 cval1, cval2, save_p));
3378
3379 case tcc_constant:
3380 return 1;
3381
3382 case tcc_expression:
3383 if (code == COND_EXPR)
3384 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3385 cval1, cval2, save_p)
3386 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3387 cval1, cval2, save_p)
3388 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3389 cval1, cval2, save_p));
3390 return 0;
3391
3392 case tcc_comparison:
3393 /* First see if we can handle the first operand, then the second. For
3394 the second operand, we know *CVAL1 can't be zero. It must be that
3395 one side of the comparison is each of the values; test for the
3396 case where this isn't true by failing if the two operands
3397 are the same. */
3398
3399 if (operand_equal_p (TREE_OPERAND (arg, 0),
3400 TREE_OPERAND (arg, 1), 0))
3401 return 0;
3402
3403 if (*cval1 == 0)
3404 *cval1 = TREE_OPERAND (arg, 0);
3405 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3406 ;
3407 else if (*cval2 == 0)
3408 *cval2 = TREE_OPERAND (arg, 0);
3409 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3410 ;
3411 else
3412 return 0;
3413
3414 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3415 ;
3416 else if (*cval2 == 0)
3417 *cval2 = TREE_OPERAND (arg, 1);
3418 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3419 ;
3420 else
3421 return 0;
3422
3423 return 1;
3424
3425 default:
3426 return 0;
3427 }
3428 }
3429 \f
3430 /* ARG is a tree that is known to contain just arithmetic operations and
3431 comparisons. Evaluate the operations in the tree substituting NEW0 for
3432 any occurrence of OLD0 as an operand of a comparison and likewise for
3433 NEW1 and OLD1. */
3434
3435 static tree
3436 eval_subst (location_t loc, tree arg, tree old0, tree new0,
3437 tree old1, tree new1)
3438 {
3439 tree type = TREE_TYPE (arg);
3440 enum tree_code code = TREE_CODE (arg);
3441 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3442
3443 /* We can handle some of the tcc_expression cases here. */
3444 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3445 tclass = tcc_unary;
3446 else if (tclass == tcc_expression
3447 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3448 tclass = tcc_binary;
3449
3450 switch (tclass)
3451 {
3452 case tcc_unary:
3453 return fold_build1_loc (loc, code, type,
3454 eval_subst (loc, TREE_OPERAND (arg, 0),
3455 old0, new0, old1, new1));
3456
3457 case tcc_binary:
3458 return fold_build2_loc (loc, code, type,
3459 eval_subst (loc, TREE_OPERAND (arg, 0),
3460 old0, new0, old1, new1),
3461 eval_subst (loc, TREE_OPERAND (arg, 1),
3462 old0, new0, old1, new1));
3463
3464 case tcc_expression:
3465 switch (code)
3466 {
3467 case SAVE_EXPR:
3468 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3469 old1, new1);
3470
3471 case COMPOUND_EXPR:
3472 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3473 old1, new1);
3474
3475 case COND_EXPR:
3476 return fold_build3_loc (loc, code, type,
3477 eval_subst (loc, TREE_OPERAND (arg, 0),
3478 old0, new0, old1, new1),
3479 eval_subst (loc, TREE_OPERAND (arg, 1),
3480 old0, new0, old1, new1),
3481 eval_subst (loc, TREE_OPERAND (arg, 2),
3482 old0, new0, old1, new1));
3483 default:
3484 break;
3485 }
3486 /* Fall through - ??? */
3487
3488 case tcc_comparison:
3489 {
3490 tree arg0 = TREE_OPERAND (arg, 0);
3491 tree arg1 = TREE_OPERAND (arg, 1);
3492
3493 /* We need to check both for exact equality and tree equality. The
3494 former will be true if the operand has a side-effect. In that
3495 case, we know the operand occurred exactly once. */
3496
3497 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3498 arg0 = new0;
3499 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3500 arg0 = new1;
3501
3502 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3503 arg1 = new0;
3504 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3505 arg1 = new1;
3506
3507 return fold_build2_loc (loc, code, type, arg0, arg1);
3508 }
3509
3510 default:
3511 return arg;
3512 }
3513 }
3514 \f
3515 /* Return a tree for the case when the result of an expression is RESULT
3516 converted to TYPE and OMITTED was previously an operand of the expression
3517 but is now not needed (e.g., we folded OMITTED * 0).
3518
3519 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3520 the conversion of RESULT to TYPE. */
3521
3522 tree
3523 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3524 {
3525 tree t = fold_convert_loc (loc, type, result);
3526
3527 /* If the resulting operand is an empty statement, just return the omitted
3528 statement casted to void. */
3529 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3530 return build1_loc (loc, NOP_EXPR, void_type_node,
3531 fold_ignored_result (omitted));
3532
3533 if (TREE_SIDE_EFFECTS (omitted))
3534 return build2_loc (loc, COMPOUND_EXPR, type,
3535 fold_ignored_result (omitted), t);
3536
3537 return non_lvalue_loc (loc, t);
3538 }
3539
3540 /* Return a tree for the case when the result of an expression is RESULT
3541 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3542 of the expression but are now not needed.
3543
3544 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3545 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3546 evaluated before OMITTED2. Otherwise, if neither has side effects,
3547 just do the conversion of RESULT to TYPE. */
3548
3549 tree
3550 omit_two_operands_loc (location_t loc, tree type, tree result,
3551 tree omitted1, tree omitted2)
3552 {
3553 tree t = fold_convert_loc (loc, type, result);
3554
3555 if (TREE_SIDE_EFFECTS (omitted2))
3556 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3557 if (TREE_SIDE_EFFECTS (omitted1))
3558 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3559
3560 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3561 }
3562
3563 \f
3564 /* Return a simplified tree node for the truth-negation of ARG. This
3565 never alters ARG itself. We assume that ARG is an operation that
3566 returns a truth value (0 or 1).
3567
3568 FIXME: one would think we would fold the result, but it causes
3569 problems with the dominator optimizer. */
3570
3571 static tree
3572 fold_truth_not_expr (location_t loc, tree arg)
3573 {
3574 tree type = TREE_TYPE (arg);
3575 enum tree_code code = TREE_CODE (arg);
3576 location_t loc1, loc2;
3577
3578 /* If this is a comparison, we can simply invert it, except for
3579 floating-point non-equality comparisons, in which case we just
3580 enclose a TRUTH_NOT_EXPR around what we have. */
3581
3582 if (TREE_CODE_CLASS (code) == tcc_comparison)
3583 {
3584 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3585 if (FLOAT_TYPE_P (op_type)
3586 && flag_trapping_math
3587 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3588 && code != NE_EXPR && code != EQ_EXPR)
3589 return NULL_TREE;
3590
3591 code = invert_tree_comparison (code, HONOR_NANS (op_type));
3592 if (code == ERROR_MARK)
3593 return NULL_TREE;
3594
3595 tree ret = build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3596 TREE_OPERAND (arg, 1));
3597 if (TREE_NO_WARNING (arg))
3598 TREE_NO_WARNING (ret) = 1;
3599 return ret;
3600 }
3601
3602 switch (code)
3603 {
3604 case INTEGER_CST:
3605 return constant_boolean_node (integer_zerop (arg), type);
3606
3607 case TRUTH_AND_EXPR:
3608 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3609 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3610 return build2_loc (loc, TRUTH_OR_EXPR, type,
3611 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3612 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3613
3614 case TRUTH_OR_EXPR:
3615 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3616 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3617 return build2_loc (loc, TRUTH_AND_EXPR, type,
3618 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3619 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3620
3621 case TRUTH_XOR_EXPR:
3622 /* Here we can invert either operand. We invert the first operand
3623 unless the second operand is a TRUTH_NOT_EXPR in which case our
3624 result is the XOR of the first operand with the inside of the
3625 negation of the second operand. */
3626
3627 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3628 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3629 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3630 else
3631 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3632 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3633 TREE_OPERAND (arg, 1));
3634
3635 case TRUTH_ANDIF_EXPR:
3636 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3637 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3638 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3639 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3640 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3641
3642 case TRUTH_ORIF_EXPR:
3643 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3644 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3645 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3646 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3647 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3648
3649 case TRUTH_NOT_EXPR:
3650 return TREE_OPERAND (arg, 0);
3651
3652 case COND_EXPR:
3653 {
3654 tree arg1 = TREE_OPERAND (arg, 1);
3655 tree arg2 = TREE_OPERAND (arg, 2);
3656
3657 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3658 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3659
3660 /* A COND_EXPR may have a throw as one operand, which
3661 then has void type. Just leave void operands
3662 as they are. */
3663 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3664 VOID_TYPE_P (TREE_TYPE (arg1))
3665 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3666 VOID_TYPE_P (TREE_TYPE (arg2))
3667 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3668 }
3669
3670 case COMPOUND_EXPR:
3671 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3672 return build2_loc (loc, COMPOUND_EXPR, type,
3673 TREE_OPERAND (arg, 0),
3674 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3675
3676 case NON_LVALUE_EXPR:
3677 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3678 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3679
3680 CASE_CONVERT:
3681 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3682 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3683
3684 /* ... fall through ... */
3685
3686 case FLOAT_EXPR:
3687 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3688 return build1_loc (loc, TREE_CODE (arg), type,
3689 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3690
3691 case BIT_AND_EXPR:
3692 if (!integer_onep (TREE_OPERAND (arg, 1)))
3693 return NULL_TREE;
3694 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3695
3696 case SAVE_EXPR:
3697 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3698
3699 case CLEANUP_POINT_EXPR:
3700 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3701 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3702 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3703
3704 default:
3705 return NULL_TREE;
3706 }
3707 }
3708
3709 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3710 assume that ARG is an operation that returns a truth value (0 or 1
3711 for scalars, 0 or -1 for vectors). Return the folded expression if
3712 folding is successful. Otherwise, return NULL_TREE. */
3713
3714 static tree
3715 fold_invert_truthvalue (location_t loc, tree arg)
3716 {
3717 tree type = TREE_TYPE (arg);
3718 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3719 ? BIT_NOT_EXPR
3720 : TRUTH_NOT_EXPR,
3721 type, arg);
3722 }
3723
3724 /* Return a simplified tree node for the truth-negation of ARG. This
3725 never alters ARG itself. We assume that ARG is an operation that
3726 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3727
3728 tree
3729 invert_truthvalue_loc (location_t loc, tree arg)
3730 {
3731 if (TREE_CODE (arg) == ERROR_MARK)
3732 return arg;
3733
3734 tree type = TREE_TYPE (arg);
3735 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3736 ? BIT_NOT_EXPR
3737 : TRUTH_NOT_EXPR,
3738 type, arg);
3739 }
3740
3741 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3742 with code CODE. This optimization is unsafe. */
3743 static tree
3744 distribute_real_division (location_t loc, enum tree_code code, tree type,
3745 tree arg0, tree arg1)
3746 {
3747 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3748 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3749
3750 /* (A / C) +- (B / C) -> (A +- B) / C. */
3751 if (mul0 == mul1
3752 && operand_equal_p (TREE_OPERAND (arg0, 1),
3753 TREE_OPERAND (arg1, 1), 0))
3754 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3755 fold_build2_loc (loc, code, type,
3756 TREE_OPERAND (arg0, 0),
3757 TREE_OPERAND (arg1, 0)),
3758 TREE_OPERAND (arg0, 1));
3759
3760 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3761 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3762 TREE_OPERAND (arg1, 0), 0)
3763 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3764 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3765 {
3766 REAL_VALUE_TYPE r0, r1;
3767 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3768 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3769 if (!mul0)
3770 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3771 if (!mul1)
3772 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3773 real_arithmetic (&r0, code, &r0, &r1);
3774 return fold_build2_loc (loc, MULT_EXPR, type,
3775 TREE_OPERAND (arg0, 0),
3776 build_real (type, r0));
3777 }
3778
3779 return NULL_TREE;
3780 }
3781 \f
3782 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3783 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero
3784 and uses reverse storage order if REVERSEP is nonzero. */
3785
3786 static tree
3787 make_bit_field_ref (location_t loc, tree inner, tree type,
3788 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
3789 int unsignedp, int reversep)
3790 {
3791 tree result, bftype;
3792
3793 if (bitpos == 0 && !reversep)
3794 {
3795 tree size = TYPE_SIZE (TREE_TYPE (inner));
3796 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3797 || POINTER_TYPE_P (TREE_TYPE (inner)))
3798 && tree_fits_shwi_p (size)
3799 && tree_to_shwi (size) == bitsize)
3800 return fold_convert_loc (loc, type, inner);
3801 }
3802
3803 bftype = type;
3804 if (TYPE_PRECISION (bftype) != bitsize
3805 || TYPE_UNSIGNED (bftype) == !unsignedp)
3806 bftype = build_nonstandard_integer_type (bitsize, 0);
3807
3808 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3809 size_int (bitsize), bitsize_int (bitpos));
3810 REF_REVERSE_STORAGE_ORDER (result) = reversep;
3811
3812 if (bftype != type)
3813 result = fold_convert_loc (loc, type, result);
3814
3815 return result;
3816 }
3817
3818 /* Optimize a bit-field compare.
3819
3820 There are two cases: First is a compare against a constant and the
3821 second is a comparison of two items where the fields are at the same
3822 bit position relative to the start of a chunk (byte, halfword, word)
3823 large enough to contain it. In these cases we can avoid the shift
3824 implicit in bitfield extractions.
3825
3826 For constants, we emit a compare of the shifted constant with the
3827 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3828 compared. For two fields at the same position, we do the ANDs with the
3829 similar mask and compare the result of the ANDs.
3830
3831 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3832 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3833 are the left and right operands of the comparison, respectively.
3834
3835 If the optimization described above can be done, we return the resulting
3836 tree. Otherwise we return zero. */
3837
3838 static tree
3839 optimize_bit_field_compare (location_t loc, enum tree_code code,
3840 tree compare_type, tree lhs, tree rhs)
3841 {
3842 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3843 tree type = TREE_TYPE (lhs);
3844 tree unsigned_type;
3845 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3846 machine_mode lmode, rmode, nmode;
3847 int lunsignedp, runsignedp;
3848 int lreversep, rreversep;
3849 int lvolatilep = 0, rvolatilep = 0;
3850 tree linner, rinner = NULL_TREE;
3851 tree mask;
3852 tree offset;
3853
3854 /* Get all the information about the extractions being done. If the bit size
3855 if the same as the size of the underlying object, we aren't doing an
3856 extraction at all and so can do nothing. We also don't want to
3857 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3858 then will no longer be able to replace it. */
3859 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3860 &lunsignedp, &lreversep, &lvolatilep, false);
3861 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3862 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3863 return 0;
3864
3865 if (const_p)
3866 rreversep = lreversep;
3867 else
3868 {
3869 /* If this is not a constant, we can only do something if bit positions,
3870 sizes, signedness and storage order are the same. */
3871 rinner
3872 = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3873 &runsignedp, &rreversep, &rvolatilep, false);
3874
3875 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3876 || lunsignedp != runsignedp || lreversep != rreversep || offset != 0
3877 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3878 return 0;
3879 }
3880
3881 /* See if we can find a mode to refer to this field. We should be able to,
3882 but fail if we can't. */
3883 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3884 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3885 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3886 TYPE_ALIGN (TREE_TYPE (rinner))),
3887 word_mode, false);
3888 if (nmode == VOIDmode)
3889 return 0;
3890
3891 /* Set signed and unsigned types of the precision of this mode for the
3892 shifts below. */
3893 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3894
3895 /* Compute the bit position and size for the new reference and our offset
3896 within it. If the new reference is the same size as the original, we
3897 won't optimize anything, so return zero. */
3898 nbitsize = GET_MODE_BITSIZE (nmode);
3899 nbitpos = lbitpos & ~ (nbitsize - 1);
3900 lbitpos -= nbitpos;
3901 if (nbitsize == lbitsize)
3902 return 0;
3903
3904 if (lreversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
3905 lbitpos = nbitsize - lbitsize - lbitpos;
3906
3907 /* Make the mask to be used against the extracted field. */
3908 mask = build_int_cst_type (unsigned_type, -1);
3909 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3910 mask = const_binop (RSHIFT_EXPR, mask,
3911 size_int (nbitsize - lbitsize - lbitpos));
3912
3913 if (! const_p)
3914 /* If not comparing with constant, just rework the comparison
3915 and return. */
3916 return fold_build2_loc (loc, code, compare_type,
3917 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3918 make_bit_field_ref (loc, linner,
3919 unsigned_type,
3920 nbitsize, nbitpos,
3921 1, lreversep),
3922 mask),
3923 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3924 make_bit_field_ref (loc, rinner,
3925 unsigned_type,
3926 nbitsize, nbitpos,
3927 1, rreversep),
3928 mask));
3929
3930 /* Otherwise, we are handling the constant case. See if the constant is too
3931 big for the field. Warn and return a tree for 0 (false) if so. We do
3932 this not only for its own sake, but to avoid having to test for this
3933 error case below. If we didn't, we might generate wrong code.
3934
3935 For unsigned fields, the constant shifted right by the field length should
3936 be all zero. For signed fields, the high-order bits should agree with
3937 the sign bit. */
3938
3939 if (lunsignedp)
3940 {
3941 if (wi::lrshift (rhs, lbitsize) != 0)
3942 {
3943 warning (0, "comparison is always %d due to width of bit-field",
3944 code == NE_EXPR);
3945 return constant_boolean_node (code == NE_EXPR, compare_type);
3946 }
3947 }
3948 else
3949 {
3950 wide_int tem = wi::arshift (rhs, lbitsize - 1);
3951 if (tem != 0 && tem != -1)
3952 {
3953 warning (0, "comparison is always %d due to width of bit-field",
3954 code == NE_EXPR);
3955 return constant_boolean_node (code == NE_EXPR, compare_type);
3956 }
3957 }
3958
3959 /* Single-bit compares should always be against zero. */
3960 if (lbitsize == 1 && ! integer_zerop (rhs))
3961 {
3962 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3963 rhs = build_int_cst (type, 0);
3964 }
3965
3966 /* Make a new bitfield reference, shift the constant over the
3967 appropriate number of bits and mask it with the computed mask
3968 (in case this was a signed field). If we changed it, make a new one. */
3969 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1,
3970 lreversep);
3971
3972 rhs = const_binop (BIT_AND_EXPR,
3973 const_binop (LSHIFT_EXPR,
3974 fold_convert_loc (loc, unsigned_type, rhs),
3975 size_int (lbitpos)),
3976 mask);
3977
3978 lhs = build2_loc (loc, code, compare_type,
3979 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3980 return lhs;
3981 }
3982 \f
3983 /* Subroutine for fold_truth_andor_1: decode a field reference.
3984
3985 If EXP is a comparison reference, we return the innermost reference.
3986
3987 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3988 set to the starting bit number.
3989
3990 If the innermost field can be completely contained in a mode-sized
3991 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3992
3993 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3994 otherwise it is not changed.
3995
3996 *PUNSIGNEDP is set to the signedness of the field.
3997
3998 *PREVERSEP is set to the storage order of the field.
3999
4000 *PMASK is set to the mask used. This is either contained in a
4001 BIT_AND_EXPR or derived from the width of the field.
4002
4003 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4004
4005 Return 0 if this is not a component reference or is one that we can't
4006 do anything with. */
4007
4008 static tree
4009 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
4010 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
4011 int *punsignedp, int *preversep, int *pvolatilep,
4012 tree *pmask, tree *pand_mask)
4013 {
4014 tree outer_type = 0;
4015 tree and_mask = 0;
4016 tree mask, inner, offset;
4017 tree unsigned_type;
4018 unsigned int precision;
4019
4020 /* All the optimizations using this function assume integer fields.
4021 There are problems with FP fields since the type_for_size call
4022 below can fail for, e.g., XFmode. */
4023 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4024 return 0;
4025
4026 /* We are interested in the bare arrangement of bits, so strip everything
4027 that doesn't affect the machine mode. However, record the type of the
4028 outermost expression if it may matter below. */
4029 if (CONVERT_EXPR_P (exp)
4030 || TREE_CODE (exp) == NON_LVALUE_EXPR)
4031 outer_type = TREE_TYPE (exp);
4032 STRIP_NOPS (exp);
4033
4034 if (TREE_CODE (exp) == BIT_AND_EXPR)
4035 {
4036 and_mask = TREE_OPERAND (exp, 1);
4037 exp = TREE_OPERAND (exp, 0);
4038 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4039 if (TREE_CODE (and_mask) != INTEGER_CST)
4040 return 0;
4041 }
4042
4043 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
4044 punsignedp, preversep, pvolatilep, false);
4045 if ((inner == exp && and_mask == 0)
4046 || *pbitsize < 0 || offset != 0
4047 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
4048 return 0;
4049
4050 /* If the number of bits in the reference is the same as the bitsize of
4051 the outer type, then the outer type gives the signedness. Otherwise
4052 (in case of a small bitfield) the signedness is unchanged. */
4053 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4054 *punsignedp = TYPE_UNSIGNED (outer_type);
4055
4056 /* Compute the mask to access the bitfield. */
4057 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4058 precision = TYPE_PRECISION (unsigned_type);
4059
4060 mask = build_int_cst_type (unsigned_type, -1);
4061
4062 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4063 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4064
4065 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4066 if (and_mask != 0)
4067 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4068 fold_convert_loc (loc, unsigned_type, and_mask), mask);
4069
4070 *pmask = mask;
4071 *pand_mask = and_mask;
4072 return inner;
4073 }
4074
4075 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4076 bit positions and MASK is SIGNED. */
4077
4078 static int
4079 all_ones_mask_p (const_tree mask, unsigned int size)
4080 {
4081 tree type = TREE_TYPE (mask);
4082 unsigned int precision = TYPE_PRECISION (type);
4083
4084 /* If this function returns true when the type of the mask is
4085 UNSIGNED, then there will be errors. In particular see
4086 gcc.c-torture/execute/990326-1.c. There does not appear to be
4087 any documentation paper trail as to why this is so. But the pre
4088 wide-int worked with that restriction and it has been preserved
4089 here. */
4090 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
4091 return false;
4092
4093 return wi::mask (size, false, precision) == mask;
4094 }
4095
4096 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4097 represents the sign bit of EXP's type. If EXP represents a sign
4098 or zero extension, also test VAL against the unextended type.
4099 The return value is the (sub)expression whose sign bit is VAL,
4100 or NULL_TREE otherwise. */
4101
4102 tree
4103 sign_bit_p (tree exp, const_tree val)
4104 {
4105 int width;
4106 tree t;
4107
4108 /* Tree EXP must have an integral type. */
4109 t = TREE_TYPE (exp);
4110 if (! INTEGRAL_TYPE_P (t))
4111 return NULL_TREE;
4112
4113 /* Tree VAL must be an integer constant. */
4114 if (TREE_CODE (val) != INTEGER_CST
4115 || TREE_OVERFLOW (val))
4116 return NULL_TREE;
4117
4118 width = TYPE_PRECISION (t);
4119 if (wi::only_sign_bit_p (val, width))
4120 return exp;
4121
4122 /* Handle extension from a narrower type. */
4123 if (TREE_CODE (exp) == NOP_EXPR
4124 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4125 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4126
4127 return NULL_TREE;
4128 }
4129
4130 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
4131 to be evaluated unconditionally. */
4132
4133 static int
4134 simple_operand_p (const_tree exp)
4135 {
4136 /* Strip any conversions that don't change the machine mode. */
4137 STRIP_NOPS (exp);
4138
4139 return (CONSTANT_CLASS_P (exp)
4140 || TREE_CODE (exp) == SSA_NAME
4141 || (DECL_P (exp)
4142 && ! TREE_ADDRESSABLE (exp)
4143 && ! TREE_THIS_VOLATILE (exp)
4144 && ! DECL_NONLOCAL (exp)
4145 /* Don't regard global variables as simple. They may be
4146 allocated in ways unknown to the compiler (shared memory,
4147 #pragma weak, etc). */
4148 && ! TREE_PUBLIC (exp)
4149 && ! DECL_EXTERNAL (exp)
4150 /* Weakrefs are not safe to be read, since they can be NULL.
4151 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4152 have DECL_WEAK flag set. */
4153 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4154 /* Loading a static variable is unduly expensive, but global
4155 registers aren't expensive. */
4156 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4157 }
4158
4159 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4160 to be evaluated unconditionally.
4161 I addition to simple_operand_p, we assume that comparisons, conversions,
4162 and logic-not operations are simple, if their operands are simple, too. */
4163
4164 static bool
4165 simple_operand_p_2 (tree exp)
4166 {
4167 enum tree_code code;
4168
4169 if (TREE_SIDE_EFFECTS (exp)
4170 || tree_could_trap_p (exp))
4171 return false;
4172
4173 while (CONVERT_EXPR_P (exp))
4174 exp = TREE_OPERAND (exp, 0);
4175
4176 code = TREE_CODE (exp);
4177
4178 if (TREE_CODE_CLASS (code) == tcc_comparison)
4179 return (simple_operand_p (TREE_OPERAND (exp, 0))
4180 && simple_operand_p (TREE_OPERAND (exp, 1)));
4181
4182 if (code == TRUTH_NOT_EXPR)
4183 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4184
4185 return simple_operand_p (exp);
4186 }
4187
4188 \f
4189 /* The following functions are subroutines to fold_range_test and allow it to
4190 try to change a logical combination of comparisons into a range test.
4191
4192 For example, both
4193 X == 2 || X == 3 || X == 4 || X == 5
4194 and
4195 X >= 2 && X <= 5
4196 are converted to
4197 (unsigned) (X - 2) <= 3
4198
4199 We describe each set of comparisons as being either inside or outside
4200 a range, using a variable named like IN_P, and then describe the
4201 range with a lower and upper bound. If one of the bounds is omitted,
4202 it represents either the highest or lowest value of the type.
4203
4204 In the comments below, we represent a range by two numbers in brackets
4205 preceded by a "+" to designate being inside that range, or a "-" to
4206 designate being outside that range, so the condition can be inverted by
4207 flipping the prefix. An omitted bound is represented by a "-". For
4208 example, "- [-, 10]" means being outside the range starting at the lowest
4209 possible value and ending at 10, in other words, being greater than 10.
4210 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4211 always false.
4212
4213 We set up things so that the missing bounds are handled in a consistent
4214 manner so neither a missing bound nor "true" and "false" need to be
4215 handled using a special case. */
4216
4217 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4218 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4219 and UPPER1_P are nonzero if the respective argument is an upper bound
4220 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4221 must be specified for a comparison. ARG1 will be converted to ARG0's
4222 type if both are specified. */
4223
4224 static tree
4225 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4226 tree arg1, int upper1_p)
4227 {
4228 tree tem;
4229 int result;
4230 int sgn0, sgn1;
4231
4232 /* If neither arg represents infinity, do the normal operation.
4233 Else, if not a comparison, return infinity. Else handle the special
4234 comparison rules. Note that most of the cases below won't occur, but
4235 are handled for consistency. */
4236
4237 if (arg0 != 0 && arg1 != 0)
4238 {
4239 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4240 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4241 STRIP_NOPS (tem);
4242 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4243 }
4244
4245 if (TREE_CODE_CLASS (code) != tcc_comparison)
4246 return 0;
4247
4248 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4249 for neither. In real maths, we cannot assume open ended ranges are
4250 the same. But, this is computer arithmetic, where numbers are finite.
4251 We can therefore make the transformation of any unbounded range with
4252 the value Z, Z being greater than any representable number. This permits
4253 us to treat unbounded ranges as equal. */
4254 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4255 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4256 switch (code)
4257 {
4258 case EQ_EXPR:
4259 result = sgn0 == sgn1;
4260 break;
4261 case NE_EXPR:
4262 result = sgn0 != sgn1;
4263 break;
4264 case LT_EXPR:
4265 result = sgn0 < sgn1;
4266 break;
4267 case LE_EXPR:
4268 result = sgn0 <= sgn1;
4269 break;
4270 case GT_EXPR:
4271 result = sgn0 > sgn1;
4272 break;
4273 case GE_EXPR:
4274 result = sgn0 >= sgn1;
4275 break;
4276 default:
4277 gcc_unreachable ();
4278 }
4279
4280 return constant_boolean_node (result, type);
4281 }
4282 \f
4283 /* Helper routine for make_range. Perform one step for it, return
4284 new expression if the loop should continue or NULL_TREE if it should
4285 stop. */
4286
4287 tree
4288 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4289 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4290 bool *strict_overflow_p)
4291 {
4292 tree arg0_type = TREE_TYPE (arg0);
4293 tree n_low, n_high, low = *p_low, high = *p_high;
4294 int in_p = *p_in_p, n_in_p;
4295
4296 switch (code)
4297 {
4298 case TRUTH_NOT_EXPR:
4299 /* We can only do something if the range is testing for zero. */
4300 if (low == NULL_TREE || high == NULL_TREE
4301 || ! integer_zerop (low) || ! integer_zerop (high))
4302 return NULL_TREE;
4303 *p_in_p = ! in_p;
4304 return arg0;
4305
4306 case EQ_EXPR: case NE_EXPR:
4307 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4308 /* We can only do something if the range is testing for zero
4309 and if the second operand is an integer constant. Note that
4310 saying something is "in" the range we make is done by
4311 complementing IN_P since it will set in the initial case of
4312 being not equal to zero; "out" is leaving it alone. */
4313 if (low == NULL_TREE || high == NULL_TREE
4314 || ! integer_zerop (low) || ! integer_zerop (high)
4315 || TREE_CODE (arg1) != INTEGER_CST)
4316 return NULL_TREE;
4317
4318 switch (code)
4319 {
4320 case NE_EXPR: /* - [c, c] */
4321 low = high = arg1;
4322 break;
4323 case EQ_EXPR: /* + [c, c] */
4324 in_p = ! in_p, low = high = arg1;
4325 break;
4326 case GT_EXPR: /* - [-, c] */
4327 low = 0, high = arg1;
4328 break;
4329 case GE_EXPR: /* + [c, -] */
4330 in_p = ! in_p, low = arg1, high = 0;
4331 break;
4332 case LT_EXPR: /* - [c, -] */
4333 low = arg1, high = 0;
4334 break;
4335 case LE_EXPR: /* + [-, c] */
4336 in_p = ! in_p, low = 0, high = arg1;
4337 break;
4338 default:
4339 gcc_unreachable ();
4340 }
4341
4342 /* If this is an unsigned comparison, we also know that EXP is
4343 greater than or equal to zero. We base the range tests we make
4344 on that fact, so we record it here so we can parse existing
4345 range tests. We test arg0_type since often the return type
4346 of, e.g. EQ_EXPR, is boolean. */
4347 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4348 {
4349 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4350 in_p, low, high, 1,
4351 build_int_cst (arg0_type, 0),
4352 NULL_TREE))
4353 return NULL_TREE;
4354
4355 in_p = n_in_p, low = n_low, high = n_high;
4356
4357 /* If the high bound is missing, but we have a nonzero low
4358 bound, reverse the range so it goes from zero to the low bound
4359 minus 1. */
4360 if (high == 0 && low && ! integer_zerop (low))
4361 {
4362 in_p = ! in_p;
4363 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4364 build_int_cst (TREE_TYPE (low), 1), 0);
4365 low = build_int_cst (arg0_type, 0);
4366 }
4367 }
4368
4369 *p_low = low;
4370 *p_high = high;
4371 *p_in_p = in_p;
4372 return arg0;
4373
4374 case NEGATE_EXPR:
4375 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4376 low and high are non-NULL, then normalize will DTRT. */
4377 if (!TYPE_UNSIGNED (arg0_type)
4378 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4379 {
4380 if (low == NULL_TREE)
4381 low = TYPE_MIN_VALUE (arg0_type);
4382 if (high == NULL_TREE)
4383 high = TYPE_MAX_VALUE (arg0_type);
4384 }
4385
4386 /* (-x) IN [a,b] -> x in [-b, -a] */
4387 n_low = range_binop (MINUS_EXPR, exp_type,
4388 build_int_cst (exp_type, 0),
4389 0, high, 1);
4390 n_high = range_binop (MINUS_EXPR, exp_type,
4391 build_int_cst (exp_type, 0),
4392 0, low, 0);
4393 if (n_high != 0 && TREE_OVERFLOW (n_high))
4394 return NULL_TREE;
4395 goto normalize;
4396
4397 case BIT_NOT_EXPR:
4398 /* ~ X -> -X - 1 */
4399 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4400 build_int_cst (exp_type, 1));
4401
4402 case PLUS_EXPR:
4403 case MINUS_EXPR:
4404 if (TREE_CODE (arg1) != INTEGER_CST)
4405 return NULL_TREE;
4406
4407 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4408 move a constant to the other side. */
4409 if (!TYPE_UNSIGNED (arg0_type)
4410 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4411 return NULL_TREE;
4412
4413 /* If EXP is signed, any overflow in the computation is undefined,
4414 so we don't worry about it so long as our computations on
4415 the bounds don't overflow. For unsigned, overflow is defined
4416 and this is exactly the right thing. */
4417 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4418 arg0_type, low, 0, arg1, 0);
4419 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4420 arg0_type, high, 1, arg1, 0);
4421 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4422 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4423 return NULL_TREE;
4424
4425 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4426 *strict_overflow_p = true;
4427
4428 normalize:
4429 /* Check for an unsigned range which has wrapped around the maximum
4430 value thus making n_high < n_low, and normalize it. */
4431 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4432 {
4433 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4434 build_int_cst (TREE_TYPE (n_high), 1), 0);
4435 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4436 build_int_cst (TREE_TYPE (n_low), 1), 0);
4437
4438 /* If the range is of the form +/- [ x+1, x ], we won't
4439 be able to normalize it. But then, it represents the
4440 whole range or the empty set, so make it
4441 +/- [ -, - ]. */
4442 if (tree_int_cst_equal (n_low, low)
4443 && tree_int_cst_equal (n_high, high))
4444 low = high = 0;
4445 else
4446 in_p = ! in_p;
4447 }
4448 else
4449 low = n_low, high = n_high;
4450
4451 *p_low = low;
4452 *p_high = high;
4453 *p_in_p = in_p;
4454 return arg0;
4455
4456 CASE_CONVERT:
4457 case NON_LVALUE_EXPR:
4458 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4459 return NULL_TREE;
4460
4461 if (! INTEGRAL_TYPE_P (arg0_type)
4462 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4463 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4464 return NULL_TREE;
4465
4466 n_low = low, n_high = high;
4467
4468 if (n_low != 0)
4469 n_low = fold_convert_loc (loc, arg0_type, n_low);
4470
4471 if (n_high != 0)
4472 n_high = fold_convert_loc (loc, arg0_type, n_high);
4473
4474 /* If we're converting arg0 from an unsigned type, to exp,
4475 a signed type, we will be doing the comparison as unsigned.
4476 The tests above have already verified that LOW and HIGH
4477 are both positive.
4478
4479 So we have to ensure that we will handle large unsigned
4480 values the same way that the current signed bounds treat
4481 negative values. */
4482
4483 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4484 {
4485 tree high_positive;
4486 tree equiv_type;
4487 /* For fixed-point modes, we need to pass the saturating flag
4488 as the 2nd parameter. */
4489 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4490 equiv_type
4491 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4492 TYPE_SATURATING (arg0_type));
4493 else
4494 equiv_type
4495 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4496
4497 /* A range without an upper bound is, naturally, unbounded.
4498 Since convert would have cropped a very large value, use
4499 the max value for the destination type. */
4500 high_positive
4501 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4502 : TYPE_MAX_VALUE (arg0_type);
4503
4504 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4505 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4506 fold_convert_loc (loc, arg0_type,
4507 high_positive),
4508 build_int_cst (arg0_type, 1));
4509
4510 /* If the low bound is specified, "and" the range with the
4511 range for which the original unsigned value will be
4512 positive. */
4513 if (low != 0)
4514 {
4515 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4516 1, fold_convert_loc (loc, arg0_type,
4517 integer_zero_node),
4518 high_positive))
4519 return NULL_TREE;
4520
4521 in_p = (n_in_p == in_p);
4522 }
4523 else
4524 {
4525 /* Otherwise, "or" the range with the range of the input
4526 that will be interpreted as negative. */
4527 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4528 1, fold_convert_loc (loc, arg0_type,
4529 integer_zero_node),
4530 high_positive))
4531 return NULL_TREE;
4532
4533 in_p = (in_p != n_in_p);
4534 }
4535 }
4536
4537 *p_low = n_low;
4538 *p_high = n_high;
4539 *p_in_p = in_p;
4540 return arg0;
4541
4542 default:
4543 return NULL_TREE;
4544 }
4545 }
4546
4547 /* Given EXP, a logical expression, set the range it is testing into
4548 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4549 actually being tested. *PLOW and *PHIGH will be made of the same
4550 type as the returned expression. If EXP is not a comparison, we
4551 will most likely not be returning a useful value and range. Set
4552 *STRICT_OVERFLOW_P to true if the return value is only valid
4553 because signed overflow is undefined; otherwise, do not change
4554 *STRICT_OVERFLOW_P. */
4555
4556 tree
4557 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4558 bool *strict_overflow_p)
4559 {
4560 enum tree_code code;
4561 tree arg0, arg1 = NULL_TREE;
4562 tree exp_type, nexp;
4563 int in_p;
4564 tree low, high;
4565 location_t loc = EXPR_LOCATION (exp);
4566
4567 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4568 and see if we can refine the range. Some of the cases below may not
4569 happen, but it doesn't seem worth worrying about this. We "continue"
4570 the outer loop when we've changed something; otherwise we "break"
4571 the switch, which will "break" the while. */
4572
4573 in_p = 0;
4574 low = high = build_int_cst (TREE_TYPE (exp), 0);
4575
4576 while (1)
4577 {
4578 code = TREE_CODE (exp);
4579 exp_type = TREE_TYPE (exp);
4580 arg0 = NULL_TREE;
4581
4582 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4583 {
4584 if (TREE_OPERAND_LENGTH (exp) > 0)
4585 arg0 = TREE_OPERAND (exp, 0);
4586 if (TREE_CODE_CLASS (code) == tcc_binary
4587 || TREE_CODE_CLASS (code) == tcc_comparison
4588 || (TREE_CODE_CLASS (code) == tcc_expression
4589 && TREE_OPERAND_LENGTH (exp) > 1))
4590 arg1 = TREE_OPERAND (exp, 1);
4591 }
4592 if (arg0 == NULL_TREE)
4593 break;
4594
4595 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4596 &high, &in_p, strict_overflow_p);
4597 if (nexp == NULL_TREE)
4598 break;
4599 exp = nexp;
4600 }
4601
4602 /* If EXP is a constant, we can evaluate whether this is true or false. */
4603 if (TREE_CODE (exp) == INTEGER_CST)
4604 {
4605 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4606 exp, 0, low, 0))
4607 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4608 exp, 1, high, 1)));
4609 low = high = 0;
4610 exp = 0;
4611 }
4612
4613 *pin_p = in_p, *plow = low, *phigh = high;
4614 return exp;
4615 }
4616 \f
4617 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4618 type, TYPE, return an expression to test if EXP is in (or out of, depending
4619 on IN_P) the range. Return 0 if the test couldn't be created. */
4620
4621 tree
4622 build_range_check (location_t loc, tree type, tree exp, int in_p,
4623 tree low, tree high)
4624 {
4625 tree etype = TREE_TYPE (exp), value;
4626
4627 /* Disable this optimization for function pointer expressions
4628 on targets that require function pointer canonicalization. */
4629 if (targetm.have_canonicalize_funcptr_for_compare ()
4630 && TREE_CODE (etype) == POINTER_TYPE
4631 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4632 return NULL_TREE;
4633
4634 if (! in_p)
4635 {
4636 value = build_range_check (loc, type, exp, 1, low, high);
4637 if (value != 0)
4638 return invert_truthvalue_loc (loc, value);
4639
4640 return 0;
4641 }
4642
4643 if (low == 0 && high == 0)
4644 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4645
4646 if (low == 0)
4647 return fold_build2_loc (loc, LE_EXPR, type, exp,
4648 fold_convert_loc (loc, etype, high));
4649
4650 if (high == 0)
4651 return fold_build2_loc (loc, GE_EXPR, type, exp,
4652 fold_convert_loc (loc, etype, low));
4653
4654 if (operand_equal_p (low, high, 0))
4655 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4656 fold_convert_loc (loc, etype, low));
4657
4658 if (integer_zerop (low))
4659 {
4660 if (! TYPE_UNSIGNED (etype))
4661 {
4662 etype = unsigned_type_for (etype);
4663 high = fold_convert_loc (loc, etype, high);
4664 exp = fold_convert_loc (loc, etype, exp);
4665 }
4666 return build_range_check (loc, type, exp, 1, 0, high);
4667 }
4668
4669 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4670 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4671 {
4672 int prec = TYPE_PRECISION (etype);
4673
4674 if (wi::mask (prec - 1, false, prec) == high)
4675 {
4676 if (TYPE_UNSIGNED (etype))
4677 {
4678 tree signed_etype = signed_type_for (etype);
4679 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4680 etype
4681 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4682 else
4683 etype = signed_etype;
4684 exp = fold_convert_loc (loc, etype, exp);
4685 }
4686 return fold_build2_loc (loc, GT_EXPR, type, exp,
4687 build_int_cst (etype, 0));
4688 }
4689 }
4690
4691 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4692 This requires wrap-around arithmetics for the type of the expression.
4693 First make sure that arithmetics in this type is valid, then make sure
4694 that it wraps around. */
4695 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4696 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4697 TYPE_UNSIGNED (etype));
4698
4699 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4700 {
4701 tree utype, minv, maxv;
4702
4703 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4704 for the type in question, as we rely on this here. */
4705 utype = unsigned_type_for (etype);
4706 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4707 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4708 build_int_cst (TREE_TYPE (maxv), 1), 1);
4709 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4710
4711 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4712 minv, 1, maxv, 1)))
4713 etype = utype;
4714 else
4715 return 0;
4716 }
4717
4718 high = fold_convert_loc (loc, etype, high);
4719 low = fold_convert_loc (loc, etype, low);
4720 exp = fold_convert_loc (loc, etype, exp);
4721
4722 value = const_binop (MINUS_EXPR, high, low);
4723
4724
4725 if (POINTER_TYPE_P (etype))
4726 {
4727 if (value != 0 && !TREE_OVERFLOW (value))
4728 {
4729 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4730 return build_range_check (loc, type,
4731 fold_build_pointer_plus_loc (loc, exp, low),
4732 1, build_int_cst (etype, 0), value);
4733 }
4734 return 0;
4735 }
4736
4737 if (value != 0 && !TREE_OVERFLOW (value))
4738 return build_range_check (loc, type,
4739 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4740 1, build_int_cst (etype, 0), value);
4741
4742 return 0;
4743 }
4744 \f
4745 /* Return the predecessor of VAL in its type, handling the infinite case. */
4746
4747 static tree
4748 range_predecessor (tree val)
4749 {
4750 tree type = TREE_TYPE (val);
4751
4752 if (INTEGRAL_TYPE_P (type)
4753 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4754 return 0;
4755 else
4756 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4757 build_int_cst (TREE_TYPE (val), 1), 0);
4758 }
4759
4760 /* Return the successor of VAL in its type, handling the infinite case. */
4761
4762 static tree
4763 range_successor (tree val)
4764 {
4765 tree type = TREE_TYPE (val);
4766
4767 if (INTEGRAL_TYPE_P (type)
4768 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4769 return 0;
4770 else
4771 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4772 build_int_cst (TREE_TYPE (val), 1), 0);
4773 }
4774
4775 /* Given two ranges, see if we can merge them into one. Return 1 if we
4776 can, 0 if we can't. Set the output range into the specified parameters. */
4777
4778 bool
4779 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4780 tree high0, int in1_p, tree low1, tree high1)
4781 {
4782 int no_overlap;
4783 int subset;
4784 int temp;
4785 tree tem;
4786 int in_p;
4787 tree low, high;
4788 int lowequal = ((low0 == 0 && low1 == 0)
4789 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4790 low0, 0, low1, 0)));
4791 int highequal = ((high0 == 0 && high1 == 0)
4792 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4793 high0, 1, high1, 1)));
4794
4795 /* Make range 0 be the range that starts first, or ends last if they
4796 start at the same value. Swap them if it isn't. */
4797 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4798 low0, 0, low1, 0))
4799 || (lowequal
4800 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4801 high1, 1, high0, 1))))
4802 {
4803 temp = in0_p, in0_p = in1_p, in1_p = temp;
4804 tem = low0, low0 = low1, low1 = tem;
4805 tem = high0, high0 = high1, high1 = tem;
4806 }
4807
4808 /* Now flag two cases, whether the ranges are disjoint or whether the
4809 second range is totally subsumed in the first. Note that the tests
4810 below are simplified by the ones above. */
4811 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4812 high0, 1, low1, 0));
4813 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4814 high1, 1, high0, 1));
4815
4816 /* We now have four cases, depending on whether we are including or
4817 excluding the two ranges. */
4818 if (in0_p && in1_p)
4819 {
4820 /* If they don't overlap, the result is false. If the second range
4821 is a subset it is the result. Otherwise, the range is from the start
4822 of the second to the end of the first. */
4823 if (no_overlap)
4824 in_p = 0, low = high = 0;
4825 else if (subset)
4826 in_p = 1, low = low1, high = high1;
4827 else
4828 in_p = 1, low = low1, high = high0;
4829 }
4830
4831 else if (in0_p && ! in1_p)
4832 {
4833 /* If they don't overlap, the result is the first range. If they are
4834 equal, the result is false. If the second range is a subset of the
4835 first, and the ranges begin at the same place, we go from just after
4836 the end of the second range to the end of the first. If the second
4837 range is not a subset of the first, or if it is a subset and both
4838 ranges end at the same place, the range starts at the start of the
4839 first range and ends just before the second range.
4840 Otherwise, we can't describe this as a single range. */
4841 if (no_overlap)
4842 in_p = 1, low = low0, high = high0;
4843 else if (lowequal && highequal)
4844 in_p = 0, low = high = 0;
4845 else if (subset && lowequal)
4846 {
4847 low = range_successor (high1);
4848 high = high0;
4849 in_p = 1;
4850 if (low == 0)
4851 {
4852 /* We are in the weird situation where high0 > high1 but
4853 high1 has no successor. Punt. */
4854 return 0;
4855 }
4856 }
4857 else if (! subset || highequal)
4858 {
4859 low = low0;
4860 high = range_predecessor (low1);
4861 in_p = 1;
4862 if (high == 0)
4863 {
4864 /* low0 < low1 but low1 has no predecessor. Punt. */
4865 return 0;
4866 }
4867 }
4868 else
4869 return 0;
4870 }
4871
4872 else if (! in0_p && in1_p)
4873 {
4874 /* If they don't overlap, the result is the second range. If the second
4875 is a subset of the first, the result is false. Otherwise,
4876 the range starts just after the first range and ends at the
4877 end of the second. */
4878 if (no_overlap)
4879 in_p = 1, low = low1, high = high1;
4880 else if (subset || highequal)
4881 in_p = 0, low = high = 0;
4882 else
4883 {
4884 low = range_successor (high0);
4885 high = high1;
4886 in_p = 1;
4887 if (low == 0)
4888 {
4889 /* high1 > high0 but high0 has no successor. Punt. */
4890 return 0;
4891 }
4892 }
4893 }
4894
4895 else
4896 {
4897 /* The case where we are excluding both ranges. Here the complex case
4898 is if they don't overlap. In that case, the only time we have a
4899 range is if they are adjacent. If the second is a subset of the
4900 first, the result is the first. Otherwise, the range to exclude
4901 starts at the beginning of the first range and ends at the end of the
4902 second. */
4903 if (no_overlap)
4904 {
4905 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4906 range_successor (high0),
4907 1, low1, 0)))
4908 in_p = 0, low = low0, high = high1;
4909 else
4910 {
4911 /* Canonicalize - [min, x] into - [-, x]. */
4912 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4913 switch (TREE_CODE (TREE_TYPE (low0)))
4914 {
4915 case ENUMERAL_TYPE:
4916 if (TYPE_PRECISION (TREE_TYPE (low0))
4917 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4918 break;
4919 /* FALLTHROUGH */
4920 case INTEGER_TYPE:
4921 if (tree_int_cst_equal (low0,
4922 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4923 low0 = 0;
4924 break;
4925 case POINTER_TYPE:
4926 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4927 && integer_zerop (low0))
4928 low0 = 0;
4929 break;
4930 default:
4931 break;
4932 }
4933
4934 /* Canonicalize - [x, max] into - [x, -]. */
4935 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4936 switch (TREE_CODE (TREE_TYPE (high1)))
4937 {
4938 case ENUMERAL_TYPE:
4939 if (TYPE_PRECISION (TREE_TYPE (high1))
4940 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4941 break;
4942 /* FALLTHROUGH */
4943 case INTEGER_TYPE:
4944 if (tree_int_cst_equal (high1,
4945 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4946 high1 = 0;
4947 break;
4948 case POINTER_TYPE:
4949 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4950 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4951 high1, 1,
4952 build_int_cst (TREE_TYPE (high1), 1),
4953 1)))
4954 high1 = 0;
4955 break;
4956 default:
4957 break;
4958 }
4959
4960 /* The ranges might be also adjacent between the maximum and
4961 minimum values of the given type. For
4962 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4963 return + [x + 1, y - 1]. */
4964 if (low0 == 0 && high1 == 0)
4965 {
4966 low = range_successor (high0);
4967 high = range_predecessor (low1);
4968 if (low == 0 || high == 0)
4969 return 0;
4970
4971 in_p = 1;
4972 }
4973 else
4974 return 0;
4975 }
4976 }
4977 else if (subset)
4978 in_p = 0, low = low0, high = high0;
4979 else
4980 in_p = 0, low = low0, high = high1;
4981 }
4982
4983 *pin_p = in_p, *plow = low, *phigh = high;
4984 return 1;
4985 }
4986 \f
4987
4988 /* Subroutine of fold, looking inside expressions of the form
4989 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4990 of the COND_EXPR. This function is being used also to optimize
4991 A op B ? C : A, by reversing the comparison first.
4992
4993 Return a folded expression whose code is not a COND_EXPR
4994 anymore, or NULL_TREE if no folding opportunity is found. */
4995
4996 static tree
4997 fold_cond_expr_with_comparison (location_t loc, tree type,
4998 tree arg0, tree arg1, tree arg2)
4999 {
5000 enum tree_code comp_code = TREE_CODE (arg0);
5001 tree arg00 = TREE_OPERAND (arg0, 0);
5002 tree arg01 = TREE_OPERAND (arg0, 1);
5003 tree arg1_type = TREE_TYPE (arg1);
5004 tree tem;
5005
5006 STRIP_NOPS (arg1);
5007 STRIP_NOPS (arg2);
5008
5009 /* If we have A op 0 ? A : -A, consider applying the following
5010 transformations:
5011
5012 A == 0? A : -A same as -A
5013 A != 0? A : -A same as A
5014 A >= 0? A : -A same as abs (A)
5015 A > 0? A : -A same as abs (A)
5016 A <= 0? A : -A same as -abs (A)
5017 A < 0? A : -A same as -abs (A)
5018
5019 None of these transformations work for modes with signed
5020 zeros. If A is +/-0, the first two transformations will
5021 change the sign of the result (from +0 to -0, or vice
5022 versa). The last four will fix the sign of the result,
5023 even though the original expressions could be positive or
5024 negative, depending on the sign of A.
5025
5026 Note that all these transformations are correct if A is
5027 NaN, since the two alternatives (A and -A) are also NaNs. */
5028 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5029 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5030 ? real_zerop (arg01)
5031 : integer_zerop (arg01))
5032 && ((TREE_CODE (arg2) == NEGATE_EXPR
5033 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5034 /* In the case that A is of the form X-Y, '-A' (arg2) may
5035 have already been folded to Y-X, check for that. */
5036 || (TREE_CODE (arg1) == MINUS_EXPR
5037 && TREE_CODE (arg2) == MINUS_EXPR
5038 && operand_equal_p (TREE_OPERAND (arg1, 0),
5039 TREE_OPERAND (arg2, 1), 0)
5040 && operand_equal_p (TREE_OPERAND (arg1, 1),
5041 TREE_OPERAND (arg2, 0), 0))))
5042 switch (comp_code)
5043 {
5044 case EQ_EXPR:
5045 case UNEQ_EXPR:
5046 tem = fold_convert_loc (loc, arg1_type, arg1);
5047 return pedantic_non_lvalue_loc (loc,
5048 fold_convert_loc (loc, type,
5049 negate_expr (tem)));
5050 case NE_EXPR:
5051 case LTGT_EXPR:
5052 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5053 case UNGE_EXPR:
5054 case UNGT_EXPR:
5055 if (flag_trapping_math)
5056 break;
5057 /* Fall through. */
5058 case GE_EXPR:
5059 case GT_EXPR:
5060 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5061 break;
5062 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5063 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5064 case UNLE_EXPR:
5065 case UNLT_EXPR:
5066 if (flag_trapping_math)
5067 break;
5068 case LE_EXPR:
5069 case LT_EXPR:
5070 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5071 break;
5072 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5073 return negate_expr (fold_convert_loc (loc, type, tem));
5074 default:
5075 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5076 break;
5077 }
5078
5079 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5080 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5081 both transformations are correct when A is NaN: A != 0
5082 is then true, and A == 0 is false. */
5083
5084 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5085 && integer_zerop (arg01) && integer_zerop (arg2))
5086 {
5087 if (comp_code == NE_EXPR)
5088 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5089 else if (comp_code == EQ_EXPR)
5090 return build_zero_cst (type);
5091 }
5092
5093 /* Try some transformations of A op B ? A : B.
5094
5095 A == B? A : B same as B
5096 A != B? A : B same as A
5097 A >= B? A : B same as max (A, B)
5098 A > B? A : B same as max (B, A)
5099 A <= B? A : B same as min (A, B)
5100 A < B? A : B same as min (B, A)
5101
5102 As above, these transformations don't work in the presence
5103 of signed zeros. For example, if A and B are zeros of
5104 opposite sign, the first two transformations will change
5105 the sign of the result. In the last four, the original
5106 expressions give different results for (A=+0, B=-0) and
5107 (A=-0, B=+0), but the transformed expressions do not.
5108
5109 The first two transformations are correct if either A or B
5110 is a NaN. In the first transformation, the condition will
5111 be false, and B will indeed be chosen. In the case of the
5112 second transformation, the condition A != B will be true,
5113 and A will be chosen.
5114
5115 The conversions to max() and min() are not correct if B is
5116 a number and A is not. The conditions in the original
5117 expressions will be false, so all four give B. The min()
5118 and max() versions would give a NaN instead. */
5119 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5120 && operand_equal_for_comparison_p (arg01, arg2, arg00)
5121 /* Avoid these transformations if the COND_EXPR may be used
5122 as an lvalue in the C++ front-end. PR c++/19199. */
5123 && (in_gimple_form
5124 || VECTOR_TYPE_P (type)
5125 || (! lang_GNU_CXX ()
5126 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5127 || ! maybe_lvalue_p (arg1)
5128 || ! maybe_lvalue_p (arg2)))
5129 {
5130 tree comp_op0 = arg00;
5131 tree comp_op1 = arg01;
5132 tree comp_type = TREE_TYPE (comp_op0);
5133
5134 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
5135 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
5136 {
5137 comp_type = type;
5138 comp_op0 = arg1;
5139 comp_op1 = arg2;
5140 }
5141
5142 switch (comp_code)
5143 {
5144 case EQ_EXPR:
5145 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
5146 case NE_EXPR:
5147 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5148 case LE_EXPR:
5149 case LT_EXPR:
5150 case UNLE_EXPR:
5151 case UNLT_EXPR:
5152 /* In C++ a ?: expression can be an lvalue, so put the
5153 operand which will be used if they are equal first
5154 so that we can convert this back to the
5155 corresponding COND_EXPR. */
5156 if (!HONOR_NANS (arg1))
5157 {
5158 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5159 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5160 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5161 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5162 : fold_build2_loc (loc, MIN_EXPR, comp_type,
5163 comp_op1, comp_op0);
5164 return pedantic_non_lvalue_loc (loc,
5165 fold_convert_loc (loc, type, tem));
5166 }
5167 break;
5168 case GE_EXPR:
5169 case GT_EXPR:
5170 case UNGE_EXPR:
5171 case UNGT_EXPR:
5172 if (!HONOR_NANS (arg1))
5173 {
5174 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5175 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5176 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5177 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5178 : fold_build2_loc (loc, MAX_EXPR, comp_type,
5179 comp_op1, comp_op0);
5180 return pedantic_non_lvalue_loc (loc,
5181 fold_convert_loc (loc, type, tem));
5182 }
5183 break;
5184 case UNEQ_EXPR:
5185 if (!HONOR_NANS (arg1))
5186 return pedantic_non_lvalue_loc (loc,
5187 fold_convert_loc (loc, type, arg2));
5188 break;
5189 case LTGT_EXPR:
5190 if (!HONOR_NANS (arg1))
5191 return pedantic_non_lvalue_loc (loc,
5192 fold_convert_loc (loc, type, arg1));
5193 break;
5194 default:
5195 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5196 break;
5197 }
5198 }
5199
5200 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5201 we might still be able to simplify this. For example,
5202 if C1 is one less or one more than C2, this might have started
5203 out as a MIN or MAX and been transformed by this function.
5204 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5205
5206 if (INTEGRAL_TYPE_P (type)
5207 && TREE_CODE (arg01) == INTEGER_CST
5208 && TREE_CODE (arg2) == INTEGER_CST)
5209 switch (comp_code)
5210 {
5211 case EQ_EXPR:
5212 if (TREE_CODE (arg1) == INTEGER_CST)
5213 break;
5214 /* We can replace A with C1 in this case. */
5215 arg1 = fold_convert_loc (loc, type, arg01);
5216 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
5217
5218 case LT_EXPR:
5219 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
5220 MIN_EXPR, to preserve the signedness of the comparison. */
5221 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5222 OEP_ONLY_CONST)
5223 && operand_equal_p (arg01,
5224 const_binop (PLUS_EXPR, arg2,
5225 build_int_cst (type, 1)),
5226 OEP_ONLY_CONST))
5227 {
5228 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5229 fold_convert_loc (loc, TREE_TYPE (arg00),
5230 arg2));
5231 return pedantic_non_lvalue_loc (loc,
5232 fold_convert_loc (loc, type, tem));
5233 }
5234 break;
5235
5236 case LE_EXPR:
5237 /* If C1 is C2 - 1, this is min(A, C2), with the same care
5238 as above. */
5239 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5240 OEP_ONLY_CONST)
5241 && operand_equal_p (arg01,
5242 const_binop (MINUS_EXPR, arg2,
5243 build_int_cst (type, 1)),
5244 OEP_ONLY_CONST))
5245 {
5246 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5247 fold_convert_loc (loc, TREE_TYPE (arg00),
5248 arg2));
5249 return pedantic_non_lvalue_loc (loc,
5250 fold_convert_loc (loc, type, tem));
5251 }
5252 break;
5253
5254 case GT_EXPR:
5255 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
5256 MAX_EXPR, to preserve the signedness of the comparison. */
5257 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5258 OEP_ONLY_CONST)
5259 && operand_equal_p (arg01,
5260 const_binop (MINUS_EXPR, arg2,
5261 build_int_cst (type, 1)),
5262 OEP_ONLY_CONST))
5263 {
5264 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5265 fold_convert_loc (loc, TREE_TYPE (arg00),
5266 arg2));
5267 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5268 }
5269 break;
5270
5271 case GE_EXPR:
5272 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
5273 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5274 OEP_ONLY_CONST)
5275 && operand_equal_p (arg01,
5276 const_binop (PLUS_EXPR, arg2,
5277 build_int_cst (type, 1)),
5278 OEP_ONLY_CONST))
5279 {
5280 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5281 fold_convert_loc (loc, TREE_TYPE (arg00),
5282 arg2));
5283 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5284 }
5285 break;
5286 case NE_EXPR:
5287 break;
5288 default:
5289 gcc_unreachable ();
5290 }
5291
5292 return NULL_TREE;
5293 }
5294
5295
5296 \f
5297 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5298 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5299 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5300 false) >= 2)
5301 #endif
5302
5303 /* EXP is some logical combination of boolean tests. See if we can
5304 merge it into some range test. Return the new tree if so. */
5305
5306 static tree
5307 fold_range_test (location_t loc, enum tree_code code, tree type,
5308 tree op0, tree op1)
5309 {
5310 int or_op = (code == TRUTH_ORIF_EXPR
5311 || code == TRUTH_OR_EXPR);
5312 int in0_p, in1_p, in_p;
5313 tree low0, low1, low, high0, high1, high;
5314 bool strict_overflow_p = false;
5315 tree tem, lhs, rhs;
5316 const char * const warnmsg = G_("assuming signed overflow does not occur "
5317 "when simplifying range test");
5318
5319 if (!INTEGRAL_TYPE_P (type))
5320 return 0;
5321
5322 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5323 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5324
5325 /* If this is an OR operation, invert both sides; we will invert
5326 again at the end. */
5327 if (or_op)
5328 in0_p = ! in0_p, in1_p = ! in1_p;
5329
5330 /* If both expressions are the same, if we can merge the ranges, and we
5331 can build the range test, return it or it inverted. If one of the
5332 ranges is always true or always false, consider it to be the same
5333 expression as the other. */
5334 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5335 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5336 in1_p, low1, high1)
5337 && 0 != (tem = (build_range_check (loc, type,
5338 lhs != 0 ? lhs
5339 : rhs != 0 ? rhs : integer_zero_node,
5340 in_p, low, high))))
5341 {
5342 if (strict_overflow_p)
5343 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5344 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5345 }
5346
5347 /* On machines where the branch cost is expensive, if this is a
5348 short-circuited branch and the underlying object on both sides
5349 is the same, make a non-short-circuit operation. */
5350 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5351 && lhs != 0 && rhs != 0
5352 && (code == TRUTH_ANDIF_EXPR
5353 || code == TRUTH_ORIF_EXPR)
5354 && operand_equal_p (lhs, rhs, 0))
5355 {
5356 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5357 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5358 which cases we can't do this. */
5359 if (simple_operand_p (lhs))
5360 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5361 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5362 type, op0, op1);
5363
5364 else if (!lang_hooks.decls.global_bindings_p ()
5365 && !CONTAINS_PLACEHOLDER_P (lhs))
5366 {
5367 tree common = save_expr (lhs);
5368
5369 if (0 != (lhs = build_range_check (loc, type, common,
5370 or_op ? ! in0_p : in0_p,
5371 low0, high0))
5372 && (0 != (rhs = build_range_check (loc, type, common,
5373 or_op ? ! in1_p : in1_p,
5374 low1, high1))))
5375 {
5376 if (strict_overflow_p)
5377 fold_overflow_warning (warnmsg,
5378 WARN_STRICT_OVERFLOW_COMPARISON);
5379 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5380 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5381 type, lhs, rhs);
5382 }
5383 }
5384 }
5385
5386 return 0;
5387 }
5388 \f
5389 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5390 bit value. Arrange things so the extra bits will be set to zero if and
5391 only if C is signed-extended to its full width. If MASK is nonzero,
5392 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5393
5394 static tree
5395 unextend (tree c, int p, int unsignedp, tree mask)
5396 {
5397 tree type = TREE_TYPE (c);
5398 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5399 tree temp;
5400
5401 if (p == modesize || unsignedp)
5402 return c;
5403
5404 /* We work by getting just the sign bit into the low-order bit, then
5405 into the high-order bit, then sign-extend. We then XOR that value
5406 with C. */
5407 temp = build_int_cst (TREE_TYPE (c), wi::extract_uhwi (c, p - 1, 1));
5408
5409 /* We must use a signed type in order to get an arithmetic right shift.
5410 However, we must also avoid introducing accidental overflows, so that
5411 a subsequent call to integer_zerop will work. Hence we must
5412 do the type conversion here. At this point, the constant is either
5413 zero or one, and the conversion to a signed type can never overflow.
5414 We could get an overflow if this conversion is done anywhere else. */
5415 if (TYPE_UNSIGNED (type))
5416 temp = fold_convert (signed_type_for (type), temp);
5417
5418 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5419 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5420 if (mask != 0)
5421 temp = const_binop (BIT_AND_EXPR, temp,
5422 fold_convert (TREE_TYPE (c), mask));
5423 /* If necessary, convert the type back to match the type of C. */
5424 if (TYPE_UNSIGNED (type))
5425 temp = fold_convert (type, temp);
5426
5427 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5428 }
5429 \f
5430 /* For an expression that has the form
5431 (A && B) || ~B
5432 or
5433 (A || B) && ~B,
5434 we can drop one of the inner expressions and simplify to
5435 A || ~B
5436 or
5437 A && ~B
5438 LOC is the location of the resulting expression. OP is the inner
5439 logical operation; the left-hand side in the examples above, while CMPOP
5440 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5441 removing a condition that guards another, as in
5442 (A != NULL && A->...) || A == NULL
5443 which we must not transform. If RHS_ONLY is true, only eliminate the
5444 right-most operand of the inner logical operation. */
5445
5446 static tree
5447 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5448 bool rhs_only)
5449 {
5450 tree type = TREE_TYPE (cmpop);
5451 enum tree_code code = TREE_CODE (cmpop);
5452 enum tree_code truthop_code = TREE_CODE (op);
5453 tree lhs = TREE_OPERAND (op, 0);
5454 tree rhs = TREE_OPERAND (op, 1);
5455 tree orig_lhs = lhs, orig_rhs = rhs;
5456 enum tree_code rhs_code = TREE_CODE (rhs);
5457 enum tree_code lhs_code = TREE_CODE (lhs);
5458 enum tree_code inv_code;
5459
5460 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5461 return NULL_TREE;
5462
5463 if (TREE_CODE_CLASS (code) != tcc_comparison)
5464 return NULL_TREE;
5465
5466 if (rhs_code == truthop_code)
5467 {
5468 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5469 if (newrhs != NULL_TREE)
5470 {
5471 rhs = newrhs;
5472 rhs_code = TREE_CODE (rhs);
5473 }
5474 }
5475 if (lhs_code == truthop_code && !rhs_only)
5476 {
5477 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5478 if (newlhs != NULL_TREE)
5479 {
5480 lhs = newlhs;
5481 lhs_code = TREE_CODE (lhs);
5482 }
5483 }
5484
5485 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
5486 if (inv_code == rhs_code
5487 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5488 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5489 return lhs;
5490 if (!rhs_only && inv_code == lhs_code
5491 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5492 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5493 return rhs;
5494 if (rhs != orig_rhs || lhs != orig_lhs)
5495 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5496 lhs, rhs);
5497 return NULL_TREE;
5498 }
5499
5500 /* Find ways of folding logical expressions of LHS and RHS:
5501 Try to merge two comparisons to the same innermost item.
5502 Look for range tests like "ch >= '0' && ch <= '9'".
5503 Look for combinations of simple terms on machines with expensive branches
5504 and evaluate the RHS unconditionally.
5505
5506 For example, if we have p->a == 2 && p->b == 4 and we can make an
5507 object large enough to span both A and B, we can do this with a comparison
5508 against the object ANDed with the a mask.
5509
5510 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5511 operations to do this with one comparison.
5512
5513 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5514 function and the one above.
5515
5516 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5517 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5518
5519 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5520 two operands.
5521
5522 We return the simplified tree or 0 if no optimization is possible. */
5523
5524 static tree
5525 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5526 tree lhs, tree rhs)
5527 {
5528 /* If this is the "or" of two comparisons, we can do something if
5529 the comparisons are NE_EXPR. If this is the "and", we can do something
5530 if the comparisons are EQ_EXPR. I.e.,
5531 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5532
5533 WANTED_CODE is this operation code. For single bit fields, we can
5534 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5535 comparison for one-bit fields. */
5536
5537 enum tree_code wanted_code;
5538 enum tree_code lcode, rcode;
5539 tree ll_arg, lr_arg, rl_arg, rr_arg;
5540 tree ll_inner, lr_inner, rl_inner, rr_inner;
5541 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5542 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5543 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5544 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5545 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5546 int ll_reversep, lr_reversep, rl_reversep, rr_reversep;
5547 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5548 machine_mode lnmode, rnmode;
5549 tree ll_mask, lr_mask, rl_mask, rr_mask;
5550 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5551 tree l_const, r_const;
5552 tree lntype, rntype, result;
5553 HOST_WIDE_INT first_bit, end_bit;
5554 int volatilep;
5555
5556 /* Start by getting the comparison codes. Fail if anything is volatile.
5557 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5558 it were surrounded with a NE_EXPR. */
5559
5560 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5561 return 0;
5562
5563 lcode = TREE_CODE (lhs);
5564 rcode = TREE_CODE (rhs);
5565
5566 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5567 {
5568 lhs = build2 (NE_EXPR, truth_type, lhs,
5569 build_int_cst (TREE_TYPE (lhs), 0));
5570 lcode = NE_EXPR;
5571 }
5572
5573 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5574 {
5575 rhs = build2 (NE_EXPR, truth_type, rhs,
5576 build_int_cst (TREE_TYPE (rhs), 0));
5577 rcode = NE_EXPR;
5578 }
5579
5580 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5581 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5582 return 0;
5583
5584 ll_arg = TREE_OPERAND (lhs, 0);
5585 lr_arg = TREE_OPERAND (lhs, 1);
5586 rl_arg = TREE_OPERAND (rhs, 0);
5587 rr_arg = TREE_OPERAND (rhs, 1);
5588
5589 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5590 if (simple_operand_p (ll_arg)
5591 && simple_operand_p (lr_arg))
5592 {
5593 if (operand_equal_p (ll_arg, rl_arg, 0)
5594 && operand_equal_p (lr_arg, rr_arg, 0))
5595 {
5596 result = combine_comparisons (loc, code, lcode, rcode,
5597 truth_type, ll_arg, lr_arg);
5598 if (result)
5599 return result;
5600 }
5601 else if (operand_equal_p (ll_arg, rr_arg, 0)
5602 && operand_equal_p (lr_arg, rl_arg, 0))
5603 {
5604 result = combine_comparisons (loc, code, lcode,
5605 swap_tree_comparison (rcode),
5606 truth_type, ll_arg, lr_arg);
5607 if (result)
5608 return result;
5609 }
5610 }
5611
5612 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5613 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5614
5615 /* If the RHS can be evaluated unconditionally and its operands are
5616 simple, it wins to evaluate the RHS unconditionally on machines
5617 with expensive branches. In this case, this isn't a comparison
5618 that can be merged. */
5619
5620 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5621 false) >= 2
5622 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5623 && simple_operand_p (rl_arg)
5624 && simple_operand_p (rr_arg))
5625 {
5626 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5627 if (code == TRUTH_OR_EXPR
5628 && lcode == NE_EXPR && integer_zerop (lr_arg)
5629 && rcode == NE_EXPR && integer_zerop (rr_arg)
5630 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5631 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5632 return build2_loc (loc, NE_EXPR, truth_type,
5633 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5634 ll_arg, rl_arg),
5635 build_int_cst (TREE_TYPE (ll_arg), 0));
5636
5637 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5638 if (code == TRUTH_AND_EXPR
5639 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5640 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5641 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5642 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5643 return build2_loc (loc, EQ_EXPR, truth_type,
5644 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5645 ll_arg, rl_arg),
5646 build_int_cst (TREE_TYPE (ll_arg), 0));
5647 }
5648
5649 /* See if the comparisons can be merged. Then get all the parameters for
5650 each side. */
5651
5652 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5653 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5654 return 0;
5655
5656 ll_reversep = lr_reversep = rl_reversep = rr_reversep = 0;
5657 volatilep = 0;
5658 ll_inner = decode_field_reference (loc, ll_arg,
5659 &ll_bitsize, &ll_bitpos, &ll_mode,
5660 &ll_unsignedp, &ll_reversep, &volatilep,
5661 &ll_mask, &ll_and_mask);
5662 lr_inner = decode_field_reference (loc, lr_arg,
5663 &lr_bitsize, &lr_bitpos, &lr_mode,
5664 &lr_unsignedp, &lr_reversep, &volatilep,
5665 &lr_mask, &lr_and_mask);
5666 rl_inner = decode_field_reference (loc, rl_arg,
5667 &rl_bitsize, &rl_bitpos, &rl_mode,
5668 &rl_unsignedp, &rl_reversep, &volatilep,
5669 &rl_mask, &rl_and_mask);
5670 rr_inner = decode_field_reference (loc, rr_arg,
5671 &rr_bitsize, &rr_bitpos, &rr_mode,
5672 &rr_unsignedp, &rr_reversep, &volatilep,
5673 &rr_mask, &rr_and_mask);
5674
5675 /* It must be true that the inner operation on the lhs of each
5676 comparison must be the same if we are to be able to do anything.
5677 Then see if we have constants. If not, the same must be true for
5678 the rhs's. */
5679 if (volatilep
5680 || ll_reversep != rl_reversep
5681 || ll_inner == 0 || rl_inner == 0
5682 || ! operand_equal_p (ll_inner, rl_inner, 0))
5683 return 0;
5684
5685 if (TREE_CODE (lr_arg) == INTEGER_CST
5686 && TREE_CODE (rr_arg) == INTEGER_CST)
5687 {
5688 l_const = lr_arg, r_const = rr_arg;
5689 lr_reversep = ll_reversep;
5690 }
5691 else if (lr_reversep != rr_reversep
5692 || lr_inner == 0 || rr_inner == 0
5693 || ! operand_equal_p (lr_inner, rr_inner, 0))
5694 return 0;
5695 else
5696 l_const = r_const = 0;
5697
5698 /* If either comparison code is not correct for our logical operation,
5699 fail. However, we can convert a one-bit comparison against zero into
5700 the opposite comparison against that bit being set in the field. */
5701
5702 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5703 if (lcode != wanted_code)
5704 {
5705 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5706 {
5707 /* Make the left operand unsigned, since we are only interested
5708 in the value of one bit. Otherwise we are doing the wrong
5709 thing below. */
5710 ll_unsignedp = 1;
5711 l_const = ll_mask;
5712 }
5713 else
5714 return 0;
5715 }
5716
5717 /* This is analogous to the code for l_const above. */
5718 if (rcode != wanted_code)
5719 {
5720 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5721 {
5722 rl_unsignedp = 1;
5723 r_const = rl_mask;
5724 }
5725 else
5726 return 0;
5727 }
5728
5729 /* See if we can find a mode that contains both fields being compared on
5730 the left. If we can't, fail. Otherwise, update all constants and masks
5731 to be relative to a field of that size. */
5732 first_bit = MIN (ll_bitpos, rl_bitpos);
5733 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5734 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5735 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5736 volatilep);
5737 if (lnmode == VOIDmode)
5738 return 0;
5739
5740 lnbitsize = GET_MODE_BITSIZE (lnmode);
5741 lnbitpos = first_bit & ~ (lnbitsize - 1);
5742 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5743 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5744
5745 if (ll_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5746 {
5747 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5748 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5749 }
5750
5751 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5752 size_int (xll_bitpos));
5753 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5754 size_int (xrl_bitpos));
5755
5756 if (l_const)
5757 {
5758 l_const = fold_convert_loc (loc, lntype, l_const);
5759 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5760 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5761 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5762 fold_build1_loc (loc, BIT_NOT_EXPR,
5763 lntype, ll_mask))))
5764 {
5765 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5766
5767 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5768 }
5769 }
5770 if (r_const)
5771 {
5772 r_const = fold_convert_loc (loc, lntype, r_const);
5773 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5774 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5775 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5776 fold_build1_loc (loc, BIT_NOT_EXPR,
5777 lntype, rl_mask))))
5778 {
5779 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5780
5781 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5782 }
5783 }
5784
5785 /* If the right sides are not constant, do the same for it. Also,
5786 disallow this optimization if a size or signedness mismatch occurs
5787 between the left and right sides. */
5788 if (l_const == 0)
5789 {
5790 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5791 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5792 /* Make sure the two fields on the right
5793 correspond to the left without being swapped. */
5794 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5795 return 0;
5796
5797 first_bit = MIN (lr_bitpos, rr_bitpos);
5798 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5799 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5800 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5801 volatilep);
5802 if (rnmode == VOIDmode)
5803 return 0;
5804
5805 rnbitsize = GET_MODE_BITSIZE (rnmode);
5806 rnbitpos = first_bit & ~ (rnbitsize - 1);
5807 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5808 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5809
5810 if (lr_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5811 {
5812 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5813 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5814 }
5815
5816 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5817 rntype, lr_mask),
5818 size_int (xlr_bitpos));
5819 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5820 rntype, rr_mask),
5821 size_int (xrr_bitpos));
5822
5823 /* Make a mask that corresponds to both fields being compared.
5824 Do this for both items being compared. If the operands are the
5825 same size and the bits being compared are in the same position
5826 then we can do this by masking both and comparing the masked
5827 results. */
5828 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5829 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5830 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5831 {
5832 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5833 ll_unsignedp || rl_unsignedp, ll_reversep);
5834 if (! all_ones_mask_p (ll_mask, lnbitsize))
5835 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5836
5837 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5838 lr_unsignedp || rr_unsignedp, lr_reversep);
5839 if (! all_ones_mask_p (lr_mask, rnbitsize))
5840 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5841
5842 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5843 }
5844
5845 /* There is still another way we can do something: If both pairs of
5846 fields being compared are adjacent, we may be able to make a wider
5847 field containing them both.
5848
5849 Note that we still must mask the lhs/rhs expressions. Furthermore,
5850 the mask must be shifted to account for the shift done by
5851 make_bit_field_ref. */
5852 if ((ll_bitsize + ll_bitpos == rl_bitpos
5853 && lr_bitsize + lr_bitpos == rr_bitpos)
5854 || (ll_bitpos == rl_bitpos + rl_bitsize
5855 && lr_bitpos == rr_bitpos + rr_bitsize))
5856 {
5857 tree type;
5858
5859 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5860 ll_bitsize + rl_bitsize,
5861 MIN (ll_bitpos, rl_bitpos),
5862 ll_unsignedp, ll_reversep);
5863 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5864 lr_bitsize + rr_bitsize,
5865 MIN (lr_bitpos, rr_bitpos),
5866 lr_unsignedp, lr_reversep);
5867
5868 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5869 size_int (MIN (xll_bitpos, xrl_bitpos)));
5870 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5871 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5872
5873 /* Convert to the smaller type before masking out unwanted bits. */
5874 type = lntype;
5875 if (lntype != rntype)
5876 {
5877 if (lnbitsize > rnbitsize)
5878 {
5879 lhs = fold_convert_loc (loc, rntype, lhs);
5880 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5881 type = rntype;
5882 }
5883 else if (lnbitsize < rnbitsize)
5884 {
5885 rhs = fold_convert_loc (loc, lntype, rhs);
5886 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5887 type = lntype;
5888 }
5889 }
5890
5891 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5892 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5893
5894 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5895 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5896
5897 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5898 }
5899
5900 return 0;
5901 }
5902
5903 /* Handle the case of comparisons with constants. If there is something in
5904 common between the masks, those bits of the constants must be the same.
5905 If not, the condition is always false. Test for this to avoid generating
5906 incorrect code below. */
5907 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5908 if (! integer_zerop (result)
5909 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5910 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5911 {
5912 if (wanted_code == NE_EXPR)
5913 {
5914 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5915 return constant_boolean_node (true, truth_type);
5916 }
5917 else
5918 {
5919 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5920 return constant_boolean_node (false, truth_type);
5921 }
5922 }
5923
5924 /* Construct the expression we will return. First get the component
5925 reference we will make. Unless the mask is all ones the width of
5926 that field, perform the mask operation. Then compare with the
5927 merged constant. */
5928 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5929 ll_unsignedp || rl_unsignedp, ll_reversep);
5930
5931 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5932 if (! all_ones_mask_p (ll_mask, lnbitsize))
5933 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5934
5935 return build2_loc (loc, wanted_code, truth_type, result,
5936 const_binop (BIT_IOR_EXPR, l_const, r_const));
5937 }
5938 \f
5939 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5940 constant. */
5941
5942 static tree
5943 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5944 tree op0, tree op1)
5945 {
5946 tree arg0 = op0;
5947 enum tree_code op_code;
5948 tree comp_const;
5949 tree minmax_const;
5950 int consts_equal, consts_lt;
5951 tree inner;
5952
5953 STRIP_SIGN_NOPS (arg0);
5954
5955 op_code = TREE_CODE (arg0);
5956 minmax_const = TREE_OPERAND (arg0, 1);
5957 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5958 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5959 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5960 inner = TREE_OPERAND (arg0, 0);
5961
5962 /* If something does not permit us to optimize, return the original tree. */
5963 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5964 || TREE_CODE (comp_const) != INTEGER_CST
5965 || TREE_OVERFLOW (comp_const)
5966 || TREE_CODE (minmax_const) != INTEGER_CST
5967 || TREE_OVERFLOW (minmax_const))
5968 return NULL_TREE;
5969
5970 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5971 and GT_EXPR, doing the rest with recursive calls using logical
5972 simplifications. */
5973 switch (code)
5974 {
5975 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5976 {
5977 tree tem
5978 = optimize_minmax_comparison (loc,
5979 invert_tree_comparison (code, false),
5980 type, op0, op1);
5981 if (tem)
5982 return invert_truthvalue_loc (loc, tem);
5983 return NULL_TREE;
5984 }
5985
5986 case GE_EXPR:
5987 return
5988 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5989 optimize_minmax_comparison
5990 (loc, EQ_EXPR, type, arg0, comp_const),
5991 optimize_minmax_comparison
5992 (loc, GT_EXPR, type, arg0, comp_const));
5993
5994 case EQ_EXPR:
5995 if (op_code == MAX_EXPR && consts_equal)
5996 /* MAX (X, 0) == 0 -> X <= 0 */
5997 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5998
5999 else if (op_code == MAX_EXPR && consts_lt)
6000 /* MAX (X, 0) == 5 -> X == 5 */
6001 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
6002
6003 else if (op_code == MAX_EXPR)
6004 /* MAX (X, 0) == -1 -> false */
6005 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
6006
6007 else if (consts_equal)
6008 /* MIN (X, 0) == 0 -> X >= 0 */
6009 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
6010
6011 else if (consts_lt)
6012 /* MIN (X, 0) == 5 -> false */
6013 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
6014
6015 else
6016 /* MIN (X, 0) == -1 -> X == -1 */
6017 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
6018
6019 case GT_EXPR:
6020 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
6021 /* MAX (X, 0) > 0 -> X > 0
6022 MAX (X, 0) > 5 -> X > 5 */
6023 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
6024
6025 else if (op_code == MAX_EXPR)
6026 /* MAX (X, 0) > -1 -> true */
6027 return omit_one_operand_loc (loc, type, integer_one_node, inner);
6028
6029 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
6030 /* MIN (X, 0) > 0 -> false
6031 MIN (X, 0) > 5 -> false */
6032 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
6033
6034 else
6035 /* MIN (X, 0) > -1 -> X > -1 */
6036 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
6037
6038 default:
6039 return NULL_TREE;
6040 }
6041 }
6042 \f
6043 /* T is an integer expression that is being multiplied, divided, or taken a
6044 modulus (CODE says which and what kind of divide or modulus) by a
6045 constant C. See if we can eliminate that operation by folding it with
6046 other operations already in T. WIDE_TYPE, if non-null, is a type that
6047 should be used for the computation if wider than our type.
6048
6049 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6050 (X * 2) + (Y * 4). We must, however, be assured that either the original
6051 expression would not overflow or that overflow is undefined for the type
6052 in the language in question.
6053
6054 If we return a non-null expression, it is an equivalent form of the
6055 original computation, but need not be in the original type.
6056
6057 We set *STRICT_OVERFLOW_P to true if the return values depends on
6058 signed overflow being undefined. Otherwise we do not change
6059 *STRICT_OVERFLOW_P. */
6060
6061 static tree
6062 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6063 bool *strict_overflow_p)
6064 {
6065 /* To avoid exponential search depth, refuse to allow recursion past
6066 three levels. Beyond that (1) it's highly unlikely that we'll find
6067 something interesting and (2) we've probably processed it before
6068 when we built the inner expression. */
6069
6070 static int depth;
6071 tree ret;
6072
6073 if (depth > 3)
6074 return NULL;
6075
6076 depth++;
6077 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6078 depth--;
6079
6080 return ret;
6081 }
6082
6083 static tree
6084 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6085 bool *strict_overflow_p)
6086 {
6087 tree type = TREE_TYPE (t);
6088 enum tree_code tcode = TREE_CODE (t);
6089 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
6090 > GET_MODE_SIZE (TYPE_MODE (type)))
6091 ? wide_type : type);
6092 tree t1, t2;
6093 int same_p = tcode == code;
6094 tree op0 = NULL_TREE, op1 = NULL_TREE;
6095 bool sub_strict_overflow_p;
6096
6097 /* Don't deal with constants of zero here; they confuse the code below. */
6098 if (integer_zerop (c))
6099 return NULL_TREE;
6100
6101 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6102 op0 = TREE_OPERAND (t, 0);
6103
6104 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6105 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6106
6107 /* Note that we need not handle conditional operations here since fold
6108 already handles those cases. So just do arithmetic here. */
6109 switch (tcode)
6110 {
6111 case INTEGER_CST:
6112 /* For a constant, we can always simplify if we are a multiply
6113 or (for divide and modulus) if it is a multiple of our constant. */
6114 if (code == MULT_EXPR
6115 || wi::multiple_of_p (t, c, TYPE_SIGN (type)))
6116 {
6117 tree tem = const_binop (code, fold_convert (ctype, t),
6118 fold_convert (ctype, c));
6119 /* If the multiplication overflowed to INT_MIN then we lost sign
6120 information on it and a subsequent multiplication might
6121 spuriously overflow. See PR68142. */
6122 if (TREE_OVERFLOW (tem)
6123 && wi::eq_p (tem, wi::min_value (TYPE_PRECISION (ctype), SIGNED)))
6124 return NULL_TREE;
6125 return tem;
6126 }
6127 break;
6128
6129 CASE_CONVERT: case NON_LVALUE_EXPR:
6130 /* If op0 is an expression ... */
6131 if ((COMPARISON_CLASS_P (op0)
6132 || UNARY_CLASS_P (op0)
6133 || BINARY_CLASS_P (op0)
6134 || VL_EXP_CLASS_P (op0)
6135 || EXPRESSION_CLASS_P (op0))
6136 /* ... and has wrapping overflow, and its type is smaller
6137 than ctype, then we cannot pass through as widening. */
6138 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6139 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0)))
6140 && (TYPE_PRECISION (ctype)
6141 > TYPE_PRECISION (TREE_TYPE (op0))))
6142 /* ... or this is a truncation (t is narrower than op0),
6143 then we cannot pass through this narrowing. */
6144 || (TYPE_PRECISION (type)
6145 < TYPE_PRECISION (TREE_TYPE (op0)))
6146 /* ... or signedness changes for division or modulus,
6147 then we cannot pass through this conversion. */
6148 || (code != MULT_EXPR
6149 && (TYPE_UNSIGNED (ctype)
6150 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6151 /* ... or has undefined overflow while the converted to
6152 type has not, we cannot do the operation in the inner type
6153 as that would introduce undefined overflow. */
6154 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6155 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0)))
6156 && !TYPE_OVERFLOW_UNDEFINED (type))))
6157 break;
6158
6159 /* Pass the constant down and see if we can make a simplification. If
6160 we can, replace this expression with the inner simplification for
6161 possible later conversion to our or some other type. */
6162 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6163 && TREE_CODE (t2) == INTEGER_CST
6164 && !TREE_OVERFLOW (t2)
6165 && (0 != (t1 = extract_muldiv (op0, t2, code,
6166 code == MULT_EXPR
6167 ? ctype : NULL_TREE,
6168 strict_overflow_p))))
6169 return t1;
6170 break;
6171
6172 case ABS_EXPR:
6173 /* If widening the type changes it from signed to unsigned, then we
6174 must avoid building ABS_EXPR itself as unsigned. */
6175 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6176 {
6177 tree cstype = (*signed_type_for) (ctype);
6178 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6179 != 0)
6180 {
6181 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6182 return fold_convert (ctype, t1);
6183 }
6184 break;
6185 }
6186 /* If the constant is negative, we cannot simplify this. */
6187 if (tree_int_cst_sgn (c) == -1)
6188 break;
6189 /* FALLTHROUGH */
6190 case NEGATE_EXPR:
6191 /* For division and modulus, type can't be unsigned, as e.g.
6192 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6193 For signed types, even with wrapping overflow, this is fine. */
6194 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6195 break;
6196 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6197 != 0)
6198 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6199 break;
6200
6201 case MIN_EXPR: case MAX_EXPR:
6202 /* If widening the type changes the signedness, then we can't perform
6203 this optimization as that changes the result. */
6204 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6205 break;
6206
6207 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6208 sub_strict_overflow_p = false;
6209 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6210 &sub_strict_overflow_p)) != 0
6211 && (t2 = extract_muldiv (op1, c, code, wide_type,
6212 &sub_strict_overflow_p)) != 0)
6213 {
6214 if (tree_int_cst_sgn (c) < 0)
6215 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6216 if (sub_strict_overflow_p)
6217 *strict_overflow_p = true;
6218 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6219 fold_convert (ctype, t2));
6220 }
6221 break;
6222
6223 case LSHIFT_EXPR: case RSHIFT_EXPR:
6224 /* If the second operand is constant, this is a multiplication
6225 or floor division, by a power of two, so we can treat it that
6226 way unless the multiplier or divisor overflows. Signed
6227 left-shift overflow is implementation-defined rather than
6228 undefined in C90, so do not convert signed left shift into
6229 multiplication. */
6230 if (TREE_CODE (op1) == INTEGER_CST
6231 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6232 /* const_binop may not detect overflow correctly,
6233 so check for it explicitly here. */
6234 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
6235 && 0 != (t1 = fold_convert (ctype,
6236 const_binop (LSHIFT_EXPR,
6237 size_one_node,
6238 op1)))
6239 && !TREE_OVERFLOW (t1))
6240 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6241 ? MULT_EXPR : FLOOR_DIV_EXPR,
6242 ctype,
6243 fold_convert (ctype, op0),
6244 t1),
6245 c, code, wide_type, strict_overflow_p);
6246 break;
6247
6248 case PLUS_EXPR: case MINUS_EXPR:
6249 /* See if we can eliminate the operation on both sides. If we can, we
6250 can return a new PLUS or MINUS. If we can't, the only remaining
6251 cases where we can do anything are if the second operand is a
6252 constant. */
6253 sub_strict_overflow_p = false;
6254 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6255 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6256 if (t1 != 0 && t2 != 0
6257 && (code == MULT_EXPR
6258 /* If not multiplication, we can only do this if both operands
6259 are divisible by c. */
6260 || (multiple_of_p (ctype, op0, c)
6261 && multiple_of_p (ctype, op1, c))))
6262 {
6263 if (sub_strict_overflow_p)
6264 *strict_overflow_p = true;
6265 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6266 fold_convert (ctype, t2));
6267 }
6268
6269 /* If this was a subtraction, negate OP1 and set it to be an addition.
6270 This simplifies the logic below. */
6271 if (tcode == MINUS_EXPR)
6272 {
6273 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6274 /* If OP1 was not easily negatable, the constant may be OP0. */
6275 if (TREE_CODE (op0) == INTEGER_CST)
6276 {
6277 std::swap (op0, op1);
6278 std::swap (t1, t2);
6279 }
6280 }
6281
6282 if (TREE_CODE (op1) != INTEGER_CST)
6283 break;
6284
6285 /* If either OP1 or C are negative, this optimization is not safe for
6286 some of the division and remainder types while for others we need
6287 to change the code. */
6288 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6289 {
6290 if (code == CEIL_DIV_EXPR)
6291 code = FLOOR_DIV_EXPR;
6292 else if (code == FLOOR_DIV_EXPR)
6293 code = CEIL_DIV_EXPR;
6294 else if (code != MULT_EXPR
6295 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6296 break;
6297 }
6298
6299 /* If it's a multiply or a division/modulus operation of a multiple
6300 of our constant, do the operation and verify it doesn't overflow. */
6301 if (code == MULT_EXPR
6302 || wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6303 {
6304 op1 = const_binop (code, fold_convert (ctype, op1),
6305 fold_convert (ctype, c));
6306 /* We allow the constant to overflow with wrapping semantics. */
6307 if (op1 == 0
6308 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6309 break;
6310 }
6311 else
6312 break;
6313
6314 /* If we have an unsigned type, we cannot widen the operation since it
6315 will change the result if the original computation overflowed. */
6316 if (TYPE_UNSIGNED (ctype) && ctype != type)
6317 break;
6318
6319 /* If we were able to eliminate our operation from the first side,
6320 apply our operation to the second side and reform the PLUS. */
6321 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
6322 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
6323
6324 /* The last case is if we are a multiply. In that case, we can
6325 apply the distributive law to commute the multiply and addition
6326 if the multiplication of the constants doesn't overflow
6327 and overflow is defined. With undefined overflow
6328 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
6329 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
6330 return fold_build2 (tcode, ctype,
6331 fold_build2 (code, ctype,
6332 fold_convert (ctype, op0),
6333 fold_convert (ctype, c)),
6334 op1);
6335
6336 break;
6337
6338 case MULT_EXPR:
6339 /* We have a special case here if we are doing something like
6340 (C * 8) % 4 since we know that's zero. */
6341 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6342 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6343 /* If the multiplication can overflow we cannot optimize this. */
6344 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6345 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6346 && wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6347 {
6348 *strict_overflow_p = true;
6349 return omit_one_operand (type, integer_zero_node, op0);
6350 }
6351
6352 /* ... fall through ... */
6353
6354 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6355 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6356 /* If we can extract our operation from the LHS, do so and return a
6357 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6358 do something only if the second operand is a constant. */
6359 if (same_p
6360 && (t1 = extract_muldiv (op0, c, code, wide_type,
6361 strict_overflow_p)) != 0)
6362 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6363 fold_convert (ctype, op1));
6364 else if (tcode == MULT_EXPR && code == MULT_EXPR
6365 && (t1 = extract_muldiv (op1, c, code, wide_type,
6366 strict_overflow_p)) != 0)
6367 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6368 fold_convert (ctype, t1));
6369 else if (TREE_CODE (op1) != INTEGER_CST)
6370 return 0;
6371
6372 /* If these are the same operation types, we can associate them
6373 assuming no overflow. */
6374 if (tcode == code)
6375 {
6376 bool overflow_p = false;
6377 bool overflow_mul_p;
6378 signop sign = TYPE_SIGN (ctype);
6379 unsigned prec = TYPE_PRECISION (ctype);
6380 wide_int mul = wi::mul (wide_int::from (op1, prec, sign),
6381 wide_int::from (c, prec, sign),
6382 sign, &overflow_mul_p);
6383 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6384 if (overflow_mul_p
6385 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6386 overflow_p = true;
6387 if (!overflow_p)
6388 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6389 wide_int_to_tree (ctype, mul));
6390 }
6391
6392 /* If these operations "cancel" each other, we have the main
6393 optimizations of this pass, which occur when either constant is a
6394 multiple of the other, in which case we replace this with either an
6395 operation or CODE or TCODE.
6396
6397 If we have an unsigned type, we cannot do this since it will change
6398 the result if the original computation overflowed. */
6399 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6400 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6401 || (tcode == MULT_EXPR
6402 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6403 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6404 && code != MULT_EXPR)))
6405 {
6406 if (wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6407 {
6408 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6409 *strict_overflow_p = true;
6410 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6411 fold_convert (ctype,
6412 const_binop (TRUNC_DIV_EXPR,
6413 op1, c)));
6414 }
6415 else if (wi::multiple_of_p (c, op1, TYPE_SIGN (type)))
6416 {
6417 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6418 *strict_overflow_p = true;
6419 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6420 fold_convert (ctype,
6421 const_binop (TRUNC_DIV_EXPR,
6422 c, op1)));
6423 }
6424 }
6425 break;
6426
6427 default:
6428 break;
6429 }
6430
6431 return 0;
6432 }
6433 \f
6434 /* Return a node which has the indicated constant VALUE (either 0 or
6435 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6436 and is of the indicated TYPE. */
6437
6438 tree
6439 constant_boolean_node (bool value, tree type)
6440 {
6441 if (type == integer_type_node)
6442 return value ? integer_one_node : integer_zero_node;
6443 else if (type == boolean_type_node)
6444 return value ? boolean_true_node : boolean_false_node;
6445 else if (TREE_CODE (type) == VECTOR_TYPE)
6446 return build_vector_from_val (type,
6447 build_int_cst (TREE_TYPE (type),
6448 value ? -1 : 0));
6449 else
6450 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6451 }
6452
6453
6454 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6455 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6456 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6457 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6458 COND is the first argument to CODE; otherwise (as in the example
6459 given here), it is the second argument. TYPE is the type of the
6460 original expression. Return NULL_TREE if no simplification is
6461 possible. */
6462
6463 static tree
6464 fold_binary_op_with_conditional_arg (location_t loc,
6465 enum tree_code code,
6466 tree type, tree op0, tree op1,
6467 tree cond, tree arg, int cond_first_p)
6468 {
6469 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6470 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6471 tree test, true_value, false_value;
6472 tree lhs = NULL_TREE;
6473 tree rhs = NULL_TREE;
6474 enum tree_code cond_code = COND_EXPR;
6475
6476 if (TREE_CODE (cond) == COND_EXPR
6477 || TREE_CODE (cond) == VEC_COND_EXPR)
6478 {
6479 test = TREE_OPERAND (cond, 0);
6480 true_value = TREE_OPERAND (cond, 1);
6481 false_value = TREE_OPERAND (cond, 2);
6482 /* If this operand throws an expression, then it does not make
6483 sense to try to perform a logical or arithmetic operation
6484 involving it. */
6485 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6486 lhs = true_value;
6487 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6488 rhs = false_value;
6489 }
6490 else if (!(TREE_CODE (type) != VECTOR_TYPE
6491 && TREE_CODE (TREE_TYPE (cond)) == VECTOR_TYPE))
6492 {
6493 tree testtype = TREE_TYPE (cond);
6494 test = cond;
6495 true_value = constant_boolean_node (true, testtype);
6496 false_value = constant_boolean_node (false, testtype);
6497 }
6498 else
6499 /* Detect the case of mixing vector and scalar types - bail out. */
6500 return NULL_TREE;
6501
6502 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6503 cond_code = VEC_COND_EXPR;
6504
6505 /* This transformation is only worthwhile if we don't have to wrap ARG
6506 in a SAVE_EXPR and the operation can be simplified without recursing
6507 on at least one of the branches once its pushed inside the COND_EXPR. */
6508 if (!TREE_CONSTANT (arg)
6509 && (TREE_SIDE_EFFECTS (arg)
6510 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6511 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6512 return NULL_TREE;
6513
6514 arg = fold_convert_loc (loc, arg_type, arg);
6515 if (lhs == 0)
6516 {
6517 true_value = fold_convert_loc (loc, cond_type, true_value);
6518 if (cond_first_p)
6519 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6520 else
6521 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6522 }
6523 if (rhs == 0)
6524 {
6525 false_value = fold_convert_loc (loc, cond_type, false_value);
6526 if (cond_first_p)
6527 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6528 else
6529 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6530 }
6531
6532 /* Check that we have simplified at least one of the branches. */
6533 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6534 return NULL_TREE;
6535
6536 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6537 }
6538
6539 \f
6540 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6541
6542 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6543 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6544 ADDEND is the same as X.
6545
6546 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6547 and finite. The problematic cases are when X is zero, and its mode
6548 has signed zeros. In the case of rounding towards -infinity,
6549 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6550 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6551
6552 bool
6553 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6554 {
6555 if (!real_zerop (addend))
6556 return false;
6557
6558 /* Don't allow the fold with -fsignaling-nans. */
6559 if (HONOR_SNANS (element_mode (type)))
6560 return false;
6561
6562 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6563 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
6564 return true;
6565
6566 /* In a vector or complex, we would need to check the sign of all zeros. */
6567 if (TREE_CODE (addend) != REAL_CST)
6568 return false;
6569
6570 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6571 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6572 negate = !negate;
6573
6574 /* The mode has signed zeros, and we have to honor their sign.
6575 In this situation, there is only one case we can return true for.
6576 X - 0 is the same as X unless rounding towards -infinity is
6577 supported. */
6578 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type));
6579 }
6580
6581 /* Subroutine of fold() that optimizes comparisons of a division by
6582 a nonzero integer constant against an integer constant, i.e.
6583 X/C1 op C2.
6584
6585 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6586 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6587 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6588
6589 The function returns the constant folded tree if a simplification
6590 can be made, and NULL_TREE otherwise. */
6591
6592 static tree
6593 fold_div_compare (location_t loc,
6594 enum tree_code code, tree type, tree arg0, tree arg1)
6595 {
6596 tree prod, tmp, hi, lo;
6597 tree arg00 = TREE_OPERAND (arg0, 0);
6598 tree arg01 = TREE_OPERAND (arg0, 1);
6599 signop sign = TYPE_SIGN (TREE_TYPE (arg0));
6600 bool neg_overflow = false;
6601 bool overflow;
6602
6603 /* We have to do this the hard way to detect unsigned overflow.
6604 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6605 wide_int val = wi::mul (arg01, arg1, sign, &overflow);
6606 prod = force_fit_type (TREE_TYPE (arg00), val, -1, overflow);
6607 neg_overflow = false;
6608
6609 if (sign == UNSIGNED)
6610 {
6611 tmp = int_const_binop (MINUS_EXPR, arg01,
6612 build_int_cst (TREE_TYPE (arg01), 1));
6613 lo = prod;
6614
6615 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6616 val = wi::add (prod, tmp, sign, &overflow);
6617 hi = force_fit_type (TREE_TYPE (arg00), val,
6618 -1, overflow | TREE_OVERFLOW (prod));
6619 }
6620 else if (tree_int_cst_sgn (arg01) >= 0)
6621 {
6622 tmp = int_const_binop (MINUS_EXPR, arg01,
6623 build_int_cst (TREE_TYPE (arg01), 1));
6624 switch (tree_int_cst_sgn (arg1))
6625 {
6626 case -1:
6627 neg_overflow = true;
6628 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6629 hi = prod;
6630 break;
6631
6632 case 0:
6633 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6634 hi = tmp;
6635 break;
6636
6637 case 1:
6638 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6639 lo = prod;
6640 break;
6641
6642 default:
6643 gcc_unreachable ();
6644 }
6645 }
6646 else
6647 {
6648 /* A negative divisor reverses the relational operators. */
6649 code = swap_tree_comparison (code);
6650
6651 tmp = int_const_binop (PLUS_EXPR, arg01,
6652 build_int_cst (TREE_TYPE (arg01), 1));
6653 switch (tree_int_cst_sgn (arg1))
6654 {
6655 case -1:
6656 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6657 lo = prod;
6658 break;
6659
6660 case 0:
6661 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6662 lo = tmp;
6663 break;
6664
6665 case 1:
6666 neg_overflow = true;
6667 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6668 hi = prod;
6669 break;
6670
6671 default:
6672 gcc_unreachable ();
6673 }
6674 }
6675
6676 switch (code)
6677 {
6678 case EQ_EXPR:
6679 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6680 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6681 if (TREE_OVERFLOW (hi))
6682 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6683 if (TREE_OVERFLOW (lo))
6684 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6685 return build_range_check (loc, type, arg00, 1, lo, hi);
6686
6687 case NE_EXPR:
6688 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6689 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6690 if (TREE_OVERFLOW (hi))
6691 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6692 if (TREE_OVERFLOW (lo))
6693 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6694 return build_range_check (loc, type, arg00, 0, lo, hi);
6695
6696 case LT_EXPR:
6697 if (TREE_OVERFLOW (lo))
6698 {
6699 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6700 return omit_one_operand_loc (loc, type, tmp, arg00);
6701 }
6702 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6703
6704 case LE_EXPR:
6705 if (TREE_OVERFLOW (hi))
6706 {
6707 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6708 return omit_one_operand_loc (loc, type, tmp, arg00);
6709 }
6710 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6711
6712 case GT_EXPR:
6713 if (TREE_OVERFLOW (hi))
6714 {
6715 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6716 return omit_one_operand_loc (loc, type, tmp, arg00);
6717 }
6718 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6719
6720 case GE_EXPR:
6721 if (TREE_OVERFLOW (lo))
6722 {
6723 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6724 return omit_one_operand_loc (loc, type, tmp, arg00);
6725 }
6726 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6727
6728 default:
6729 break;
6730 }
6731
6732 return NULL_TREE;
6733 }
6734
6735
6736 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6737 equality/inequality test, then return a simplified form of the test
6738 using a sign testing. Otherwise return NULL. TYPE is the desired
6739 result type. */
6740
6741 static tree
6742 fold_single_bit_test_into_sign_test (location_t loc,
6743 enum tree_code code, tree arg0, tree arg1,
6744 tree result_type)
6745 {
6746 /* If this is testing a single bit, we can optimize the test. */
6747 if ((code == NE_EXPR || code == EQ_EXPR)
6748 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6749 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6750 {
6751 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6752 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6753 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6754
6755 if (arg00 != NULL_TREE
6756 /* This is only a win if casting to a signed type is cheap,
6757 i.e. when arg00's type is not a partial mode. */
6758 && TYPE_PRECISION (TREE_TYPE (arg00))
6759 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6760 {
6761 tree stype = signed_type_for (TREE_TYPE (arg00));
6762 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6763 result_type,
6764 fold_convert_loc (loc, stype, arg00),
6765 build_int_cst (stype, 0));
6766 }
6767 }
6768
6769 return NULL_TREE;
6770 }
6771
6772 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6773 equality/inequality test, then return a simplified form of
6774 the test using shifts and logical operations. Otherwise return
6775 NULL. TYPE is the desired result type. */
6776
6777 tree
6778 fold_single_bit_test (location_t loc, enum tree_code code,
6779 tree arg0, tree arg1, tree result_type)
6780 {
6781 /* If this is testing a single bit, we can optimize the test. */
6782 if ((code == NE_EXPR || code == EQ_EXPR)
6783 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6784 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6785 {
6786 tree inner = TREE_OPERAND (arg0, 0);
6787 tree type = TREE_TYPE (arg0);
6788 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6789 machine_mode operand_mode = TYPE_MODE (type);
6790 int ops_unsigned;
6791 tree signed_type, unsigned_type, intermediate_type;
6792 tree tem, one;
6793
6794 /* First, see if we can fold the single bit test into a sign-bit
6795 test. */
6796 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6797 result_type);
6798 if (tem)
6799 return tem;
6800
6801 /* Otherwise we have (A & C) != 0 where C is a single bit,
6802 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6803 Similarly for (A & C) == 0. */
6804
6805 /* If INNER is a right shift of a constant and it plus BITNUM does
6806 not overflow, adjust BITNUM and INNER. */
6807 if (TREE_CODE (inner) == RSHIFT_EXPR
6808 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6809 && bitnum < TYPE_PRECISION (type)
6810 && wi::ltu_p (TREE_OPERAND (inner, 1),
6811 TYPE_PRECISION (type) - bitnum))
6812 {
6813 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6814 inner = TREE_OPERAND (inner, 0);
6815 }
6816
6817 /* If we are going to be able to omit the AND below, we must do our
6818 operations as unsigned. If we must use the AND, we have a choice.
6819 Normally unsigned is faster, but for some machines signed is. */
6820 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6821 && !flag_syntax_only) ? 0 : 1;
6822
6823 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6824 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6825 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6826 inner = fold_convert_loc (loc, intermediate_type, inner);
6827
6828 if (bitnum != 0)
6829 inner = build2 (RSHIFT_EXPR, intermediate_type,
6830 inner, size_int (bitnum));
6831
6832 one = build_int_cst (intermediate_type, 1);
6833
6834 if (code == EQ_EXPR)
6835 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6836
6837 /* Put the AND last so it can combine with more things. */
6838 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6839
6840 /* Make sure to return the proper type. */
6841 inner = fold_convert_loc (loc, result_type, inner);
6842
6843 return inner;
6844 }
6845 return NULL_TREE;
6846 }
6847
6848 /* Check whether we are allowed to reorder operands arg0 and arg1,
6849 such that the evaluation of arg1 occurs before arg0. */
6850
6851 static bool
6852 reorder_operands_p (const_tree arg0, const_tree arg1)
6853 {
6854 if (! flag_evaluation_order)
6855 return true;
6856 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6857 return true;
6858 return ! TREE_SIDE_EFFECTS (arg0)
6859 && ! TREE_SIDE_EFFECTS (arg1);
6860 }
6861
6862 /* Test whether it is preferable two swap two operands, ARG0 and
6863 ARG1, for example because ARG0 is an integer constant and ARG1
6864 isn't. If REORDER is true, only recommend swapping if we can
6865 evaluate the operands in reverse order. */
6866
6867 bool
6868 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6869 {
6870 if (CONSTANT_CLASS_P (arg1))
6871 return 0;
6872 if (CONSTANT_CLASS_P (arg0))
6873 return 1;
6874
6875 STRIP_NOPS (arg0);
6876 STRIP_NOPS (arg1);
6877
6878 if (TREE_CONSTANT (arg1))
6879 return 0;
6880 if (TREE_CONSTANT (arg0))
6881 return 1;
6882
6883 if (reorder && flag_evaluation_order
6884 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6885 return 0;
6886
6887 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6888 for commutative and comparison operators. Ensuring a canonical
6889 form allows the optimizers to find additional redundancies without
6890 having to explicitly check for both orderings. */
6891 if (TREE_CODE (arg0) == SSA_NAME
6892 && TREE_CODE (arg1) == SSA_NAME
6893 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6894 return 1;
6895
6896 /* Put SSA_NAMEs last. */
6897 if (TREE_CODE (arg1) == SSA_NAME)
6898 return 0;
6899 if (TREE_CODE (arg0) == SSA_NAME)
6900 return 1;
6901
6902 /* Put variables last. */
6903 if (DECL_P (arg1))
6904 return 0;
6905 if (DECL_P (arg0))
6906 return 1;
6907
6908 return 0;
6909 }
6910
6911
6912 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6913 means A >= Y && A != MAX, but in this case we know that
6914 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6915
6916 static tree
6917 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6918 {
6919 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6920
6921 if (TREE_CODE (bound) == LT_EXPR)
6922 a = TREE_OPERAND (bound, 0);
6923 else if (TREE_CODE (bound) == GT_EXPR)
6924 a = TREE_OPERAND (bound, 1);
6925 else
6926 return NULL_TREE;
6927
6928 typea = TREE_TYPE (a);
6929 if (!INTEGRAL_TYPE_P (typea)
6930 && !POINTER_TYPE_P (typea))
6931 return NULL_TREE;
6932
6933 if (TREE_CODE (ineq) == LT_EXPR)
6934 {
6935 a1 = TREE_OPERAND (ineq, 1);
6936 y = TREE_OPERAND (ineq, 0);
6937 }
6938 else if (TREE_CODE (ineq) == GT_EXPR)
6939 {
6940 a1 = TREE_OPERAND (ineq, 0);
6941 y = TREE_OPERAND (ineq, 1);
6942 }
6943 else
6944 return NULL_TREE;
6945
6946 if (TREE_TYPE (a1) != typea)
6947 return NULL_TREE;
6948
6949 if (POINTER_TYPE_P (typea))
6950 {
6951 /* Convert the pointer types into integer before taking the difference. */
6952 tree ta = fold_convert_loc (loc, ssizetype, a);
6953 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
6954 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
6955 }
6956 else
6957 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
6958
6959 if (!diff || !integer_onep (diff))
6960 return NULL_TREE;
6961
6962 return fold_build2_loc (loc, GE_EXPR, type, a, y);
6963 }
6964
6965 /* Fold a sum or difference of at least one multiplication.
6966 Returns the folded tree or NULL if no simplification could be made. */
6967
6968 static tree
6969 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
6970 tree arg0, tree arg1)
6971 {
6972 tree arg00, arg01, arg10, arg11;
6973 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6974
6975 /* (A * C) +- (B * C) -> (A+-B) * C.
6976 (A * C) +- A -> A * (C+-1).
6977 We are most concerned about the case where C is a constant,
6978 but other combinations show up during loop reduction. Since
6979 it is not difficult, try all four possibilities. */
6980
6981 if (TREE_CODE (arg0) == MULT_EXPR)
6982 {
6983 arg00 = TREE_OPERAND (arg0, 0);
6984 arg01 = TREE_OPERAND (arg0, 1);
6985 }
6986 else if (TREE_CODE (arg0) == INTEGER_CST)
6987 {
6988 arg00 = build_one_cst (type);
6989 arg01 = arg0;
6990 }
6991 else
6992 {
6993 /* We cannot generate constant 1 for fract. */
6994 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6995 return NULL_TREE;
6996 arg00 = arg0;
6997 arg01 = build_one_cst (type);
6998 }
6999 if (TREE_CODE (arg1) == MULT_EXPR)
7000 {
7001 arg10 = TREE_OPERAND (arg1, 0);
7002 arg11 = TREE_OPERAND (arg1, 1);
7003 }
7004 else if (TREE_CODE (arg1) == INTEGER_CST)
7005 {
7006 arg10 = build_one_cst (type);
7007 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7008 the purpose of this canonicalization. */
7009 if (wi::neg_p (arg1, TYPE_SIGN (TREE_TYPE (arg1)))
7010 && negate_expr_p (arg1)
7011 && code == PLUS_EXPR)
7012 {
7013 arg11 = negate_expr (arg1);
7014 code = MINUS_EXPR;
7015 }
7016 else
7017 arg11 = arg1;
7018 }
7019 else
7020 {
7021 /* We cannot generate constant 1 for fract. */
7022 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7023 return NULL_TREE;
7024 arg10 = arg1;
7025 arg11 = build_one_cst (type);
7026 }
7027 same = NULL_TREE;
7028
7029 if (operand_equal_p (arg01, arg11, 0))
7030 same = arg01, alt0 = arg00, alt1 = arg10;
7031 else if (operand_equal_p (arg00, arg10, 0))
7032 same = arg00, alt0 = arg01, alt1 = arg11;
7033 else if (operand_equal_p (arg00, arg11, 0))
7034 same = arg00, alt0 = arg01, alt1 = arg10;
7035 else if (operand_equal_p (arg01, arg10, 0))
7036 same = arg01, alt0 = arg00, alt1 = arg11;
7037
7038 /* No identical multiplicands; see if we can find a common
7039 power-of-two factor in non-power-of-two multiplies. This
7040 can help in multi-dimensional array access. */
7041 else if (tree_fits_shwi_p (arg01)
7042 && tree_fits_shwi_p (arg11))
7043 {
7044 HOST_WIDE_INT int01, int11, tmp;
7045 bool swap = false;
7046 tree maybe_same;
7047 int01 = tree_to_shwi (arg01);
7048 int11 = tree_to_shwi (arg11);
7049
7050 /* Move min of absolute values to int11. */
7051 if (absu_hwi (int01) < absu_hwi (int11))
7052 {
7053 tmp = int01, int01 = int11, int11 = tmp;
7054 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7055 maybe_same = arg01;
7056 swap = true;
7057 }
7058 else
7059 maybe_same = arg11;
7060
7061 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7062 /* The remainder should not be a constant, otherwise we
7063 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7064 increased the number of multiplications necessary. */
7065 && TREE_CODE (arg10) != INTEGER_CST)
7066 {
7067 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7068 build_int_cst (TREE_TYPE (arg00),
7069 int01 / int11));
7070 alt1 = arg10;
7071 same = maybe_same;
7072 if (swap)
7073 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7074 }
7075 }
7076
7077 if (same)
7078 return fold_build2_loc (loc, MULT_EXPR, type,
7079 fold_build2_loc (loc, code, type,
7080 fold_convert_loc (loc, type, alt0),
7081 fold_convert_loc (loc, type, alt1)),
7082 fold_convert_loc (loc, type, same));
7083
7084 return NULL_TREE;
7085 }
7086
7087 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7088 specified by EXPR into the buffer PTR of length LEN bytes.
7089 Return the number of bytes placed in the buffer, or zero
7090 upon failure. */
7091
7092 static int
7093 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7094 {
7095 tree type = TREE_TYPE (expr);
7096 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7097 int byte, offset, word, words;
7098 unsigned char value;
7099
7100 if ((off == -1 && total_bytes > len)
7101 || off >= total_bytes)
7102 return 0;
7103 if (off == -1)
7104 off = 0;
7105 words = total_bytes / UNITS_PER_WORD;
7106
7107 for (byte = 0; byte < total_bytes; byte++)
7108 {
7109 int bitpos = byte * BITS_PER_UNIT;
7110 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7111 number of bytes. */
7112 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7113
7114 if (total_bytes > UNITS_PER_WORD)
7115 {
7116 word = byte / UNITS_PER_WORD;
7117 if (WORDS_BIG_ENDIAN)
7118 word = (words - 1) - word;
7119 offset = word * UNITS_PER_WORD;
7120 if (BYTES_BIG_ENDIAN)
7121 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7122 else
7123 offset += byte % UNITS_PER_WORD;
7124 }
7125 else
7126 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7127 if (offset >= off
7128 && offset - off < len)
7129 ptr[offset - off] = value;
7130 }
7131 return MIN (len, total_bytes - off);
7132 }
7133
7134
7135 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7136 specified by EXPR into the buffer PTR of length LEN bytes.
7137 Return the number of bytes placed in the buffer, or zero
7138 upon failure. */
7139
7140 static int
7141 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7142 {
7143 tree type = TREE_TYPE (expr);
7144 machine_mode mode = TYPE_MODE (type);
7145 int total_bytes = GET_MODE_SIZE (mode);
7146 FIXED_VALUE_TYPE value;
7147 tree i_value, i_type;
7148
7149 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7150 return 0;
7151
7152 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7153
7154 if (NULL_TREE == i_type
7155 || TYPE_PRECISION (i_type) != total_bytes)
7156 return 0;
7157
7158 value = TREE_FIXED_CST (expr);
7159 i_value = double_int_to_tree (i_type, value.data);
7160
7161 return native_encode_int (i_value, ptr, len, off);
7162 }
7163
7164
7165 /* Subroutine of native_encode_expr. Encode the REAL_CST
7166 specified by EXPR into the buffer PTR of length LEN bytes.
7167 Return the number of bytes placed in the buffer, or zero
7168 upon failure. */
7169
7170 static int
7171 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7172 {
7173 tree type = TREE_TYPE (expr);
7174 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7175 int byte, offset, word, words, bitpos;
7176 unsigned char value;
7177
7178 /* There are always 32 bits in each long, no matter the size of
7179 the hosts long. We handle floating point representations with
7180 up to 192 bits. */
7181 long tmp[6];
7182
7183 if ((off == -1 && total_bytes > len)
7184 || off >= total_bytes)
7185 return 0;
7186 if (off == -1)
7187 off = 0;
7188 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7189
7190 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7191
7192 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7193 bitpos += BITS_PER_UNIT)
7194 {
7195 byte = (bitpos / BITS_PER_UNIT) & 3;
7196 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7197
7198 if (UNITS_PER_WORD < 4)
7199 {
7200 word = byte / UNITS_PER_WORD;
7201 if (WORDS_BIG_ENDIAN)
7202 word = (words - 1) - word;
7203 offset = word * UNITS_PER_WORD;
7204 if (BYTES_BIG_ENDIAN)
7205 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7206 else
7207 offset += byte % UNITS_PER_WORD;
7208 }
7209 else
7210 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7211 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7212 if (offset >= off
7213 && offset - off < len)
7214 ptr[offset - off] = value;
7215 }
7216 return MIN (len, total_bytes - off);
7217 }
7218
7219 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7220 specified by EXPR into the buffer PTR of length LEN bytes.
7221 Return the number of bytes placed in the buffer, or zero
7222 upon failure. */
7223
7224 static int
7225 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7226 {
7227 int rsize, isize;
7228 tree part;
7229
7230 part = TREE_REALPART (expr);
7231 rsize = native_encode_expr (part, ptr, len, off);
7232 if (off == -1
7233 && rsize == 0)
7234 return 0;
7235 part = TREE_IMAGPART (expr);
7236 if (off != -1)
7237 off = MAX (0, off - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part))));
7238 isize = native_encode_expr (part, ptr+rsize, len-rsize, off);
7239 if (off == -1
7240 && isize != rsize)
7241 return 0;
7242 return rsize + isize;
7243 }
7244
7245
7246 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7247 specified by EXPR into the buffer PTR of length LEN bytes.
7248 Return the number of bytes placed in the buffer, or zero
7249 upon failure. */
7250
7251 static int
7252 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7253 {
7254 unsigned i, count;
7255 int size, offset;
7256 tree itype, elem;
7257
7258 offset = 0;
7259 count = VECTOR_CST_NELTS (expr);
7260 itype = TREE_TYPE (TREE_TYPE (expr));
7261 size = GET_MODE_SIZE (TYPE_MODE (itype));
7262 for (i = 0; i < count; i++)
7263 {
7264 if (off >= size)
7265 {
7266 off -= size;
7267 continue;
7268 }
7269 elem = VECTOR_CST_ELT (expr, i);
7270 int res = native_encode_expr (elem, ptr+offset, len-offset, off);
7271 if ((off == -1 && res != size)
7272 || res == 0)
7273 return 0;
7274 offset += res;
7275 if (offset >= len)
7276 return offset;
7277 if (off != -1)
7278 off = 0;
7279 }
7280 return offset;
7281 }
7282
7283
7284 /* Subroutine of native_encode_expr. Encode the STRING_CST
7285 specified by EXPR into the buffer PTR of length LEN bytes.
7286 Return the number of bytes placed in the buffer, or zero
7287 upon failure. */
7288
7289 static int
7290 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7291 {
7292 tree type = TREE_TYPE (expr);
7293 HOST_WIDE_INT total_bytes;
7294
7295 if (TREE_CODE (type) != ARRAY_TYPE
7296 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7297 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7298 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7299 return 0;
7300 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7301 if ((off == -1 && total_bytes > len)
7302 || off >= total_bytes)
7303 return 0;
7304 if (off == -1)
7305 off = 0;
7306 if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7307 {
7308 int written = 0;
7309 if (off < TREE_STRING_LENGTH (expr))
7310 {
7311 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7312 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7313 }
7314 memset (ptr + written, 0,
7315 MIN (total_bytes - written, len - written));
7316 }
7317 else
7318 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7319 return MIN (total_bytes - off, len);
7320 }
7321
7322
7323 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7324 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7325 buffer PTR of length LEN bytes. If OFF is not -1 then start
7326 the encoding at byte offset OFF and encode at most LEN bytes.
7327 Return the number of bytes placed in the buffer, or zero upon failure. */
7328
7329 int
7330 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7331 {
7332 /* We don't support starting at negative offset and -1 is special. */
7333 if (off < -1)
7334 return 0;
7335
7336 switch (TREE_CODE (expr))
7337 {
7338 case INTEGER_CST:
7339 return native_encode_int (expr, ptr, len, off);
7340
7341 case REAL_CST:
7342 return native_encode_real (expr, ptr, len, off);
7343
7344 case FIXED_CST:
7345 return native_encode_fixed (expr, ptr, len, off);
7346
7347 case COMPLEX_CST:
7348 return native_encode_complex (expr, ptr, len, off);
7349
7350 case VECTOR_CST:
7351 return native_encode_vector (expr, ptr, len, off);
7352
7353 case STRING_CST:
7354 return native_encode_string (expr, ptr, len, off);
7355
7356 default:
7357 return 0;
7358 }
7359 }
7360
7361
7362 /* Subroutine of native_interpret_expr. Interpret the contents of
7363 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7364 If the buffer cannot be interpreted, return NULL_TREE. */
7365
7366 static tree
7367 native_interpret_int (tree type, const unsigned char *ptr, int len)
7368 {
7369 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7370
7371 if (total_bytes > len
7372 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7373 return NULL_TREE;
7374
7375 wide_int result = wi::from_buffer (ptr, total_bytes);
7376
7377 return wide_int_to_tree (type, result);
7378 }
7379
7380
7381 /* Subroutine of native_interpret_expr. Interpret the contents of
7382 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7383 If the buffer cannot be interpreted, return NULL_TREE. */
7384
7385 static tree
7386 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7387 {
7388 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7389 double_int result;
7390 FIXED_VALUE_TYPE fixed_value;
7391
7392 if (total_bytes > len
7393 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7394 return NULL_TREE;
7395
7396 result = double_int::from_buffer (ptr, total_bytes);
7397 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7398
7399 return build_fixed (type, fixed_value);
7400 }
7401
7402
7403 /* Subroutine of native_interpret_expr. Interpret the contents of
7404 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7405 If the buffer cannot be interpreted, return NULL_TREE. */
7406
7407 static tree
7408 native_interpret_real (tree type, const unsigned char *ptr, int len)
7409 {
7410 machine_mode mode = TYPE_MODE (type);
7411 int total_bytes = GET_MODE_SIZE (mode);
7412 unsigned char value;
7413 /* There are always 32 bits in each long, no matter the size of
7414 the hosts long. We handle floating point representations with
7415 up to 192 bits. */
7416 REAL_VALUE_TYPE r;
7417 long tmp[6];
7418
7419 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7420 if (total_bytes > len || total_bytes > 24)
7421 return NULL_TREE;
7422 int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7423
7424 memset (tmp, 0, sizeof (tmp));
7425 for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7426 bitpos += BITS_PER_UNIT)
7427 {
7428 /* Both OFFSET and BYTE index within a long;
7429 bitpos indexes the whole float. */
7430 int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
7431 if (UNITS_PER_WORD < 4)
7432 {
7433 int word = byte / UNITS_PER_WORD;
7434 if (WORDS_BIG_ENDIAN)
7435 word = (words - 1) - word;
7436 offset = word * UNITS_PER_WORD;
7437 if (BYTES_BIG_ENDIAN)
7438 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7439 else
7440 offset += byte % UNITS_PER_WORD;
7441 }
7442 else
7443 {
7444 offset = byte;
7445 if (BYTES_BIG_ENDIAN)
7446 {
7447 /* Reverse bytes within each long, or within the entire float
7448 if it's smaller than a long (for HFmode). */
7449 offset = MIN (3, total_bytes - 1) - offset;
7450 gcc_assert (offset >= 0);
7451 }
7452 }
7453 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7454
7455 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7456 }
7457
7458 real_from_target (&r, tmp, mode);
7459 return build_real (type, r);
7460 }
7461
7462
7463 /* Subroutine of native_interpret_expr. Interpret the contents of
7464 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7465 If the buffer cannot be interpreted, return NULL_TREE. */
7466
7467 static tree
7468 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7469 {
7470 tree etype, rpart, ipart;
7471 int size;
7472
7473 etype = TREE_TYPE (type);
7474 size = GET_MODE_SIZE (TYPE_MODE (etype));
7475 if (size * 2 > len)
7476 return NULL_TREE;
7477 rpart = native_interpret_expr (etype, ptr, size);
7478 if (!rpart)
7479 return NULL_TREE;
7480 ipart = native_interpret_expr (etype, ptr+size, size);
7481 if (!ipart)
7482 return NULL_TREE;
7483 return build_complex (type, rpart, ipart);
7484 }
7485
7486
7487 /* Subroutine of native_interpret_expr. Interpret the contents of
7488 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7489 If the buffer cannot be interpreted, return NULL_TREE. */
7490
7491 static tree
7492 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7493 {
7494 tree etype, elem;
7495 int i, size, count;
7496 tree *elements;
7497
7498 etype = TREE_TYPE (type);
7499 size = GET_MODE_SIZE (TYPE_MODE (etype));
7500 count = TYPE_VECTOR_SUBPARTS (type);
7501 if (size * count > len)
7502 return NULL_TREE;
7503
7504 elements = XALLOCAVEC (tree, count);
7505 for (i = count - 1; i >= 0; i--)
7506 {
7507 elem = native_interpret_expr (etype, ptr+(i*size), size);
7508 if (!elem)
7509 return NULL_TREE;
7510 elements[i] = elem;
7511 }
7512 return build_vector (type, elements);
7513 }
7514
7515
7516 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7517 the buffer PTR of length LEN as a constant of type TYPE. For
7518 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7519 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7520 return NULL_TREE. */
7521
7522 tree
7523 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7524 {
7525 switch (TREE_CODE (type))
7526 {
7527 case INTEGER_TYPE:
7528 case ENUMERAL_TYPE:
7529 case BOOLEAN_TYPE:
7530 case POINTER_TYPE:
7531 case REFERENCE_TYPE:
7532 return native_interpret_int (type, ptr, len);
7533
7534 case REAL_TYPE:
7535 return native_interpret_real (type, ptr, len);
7536
7537 case FIXED_POINT_TYPE:
7538 return native_interpret_fixed (type, ptr, len);
7539
7540 case COMPLEX_TYPE:
7541 return native_interpret_complex (type, ptr, len);
7542
7543 case VECTOR_TYPE:
7544 return native_interpret_vector (type, ptr, len);
7545
7546 default:
7547 return NULL_TREE;
7548 }
7549 }
7550
7551 /* Returns true if we can interpret the contents of a native encoding
7552 as TYPE. */
7553
7554 static bool
7555 can_native_interpret_type_p (tree type)
7556 {
7557 switch (TREE_CODE (type))
7558 {
7559 case INTEGER_TYPE:
7560 case ENUMERAL_TYPE:
7561 case BOOLEAN_TYPE:
7562 case POINTER_TYPE:
7563 case REFERENCE_TYPE:
7564 case FIXED_POINT_TYPE:
7565 case REAL_TYPE:
7566 case COMPLEX_TYPE:
7567 case VECTOR_TYPE:
7568 return true;
7569 default:
7570 return false;
7571 }
7572 }
7573
7574 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7575 TYPE at compile-time. If we're unable to perform the conversion
7576 return NULL_TREE. */
7577
7578 static tree
7579 fold_view_convert_expr (tree type, tree expr)
7580 {
7581 /* We support up to 512-bit values (for V8DFmode). */
7582 unsigned char buffer[64];
7583 int len;
7584
7585 /* Check that the host and target are sane. */
7586 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7587 return NULL_TREE;
7588
7589 len = native_encode_expr (expr, buffer, sizeof (buffer));
7590 if (len == 0)
7591 return NULL_TREE;
7592
7593 return native_interpret_expr (type, buffer, len);
7594 }
7595
7596 /* Build an expression for the address of T. Folds away INDIRECT_REF
7597 to avoid confusing the gimplify process. */
7598
7599 tree
7600 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7601 {
7602 /* The size of the object is not relevant when talking about its address. */
7603 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7604 t = TREE_OPERAND (t, 0);
7605
7606 if (TREE_CODE (t) == INDIRECT_REF)
7607 {
7608 t = TREE_OPERAND (t, 0);
7609
7610 if (TREE_TYPE (t) != ptrtype)
7611 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7612 }
7613 else if (TREE_CODE (t) == MEM_REF
7614 && integer_zerop (TREE_OPERAND (t, 1)))
7615 return TREE_OPERAND (t, 0);
7616 else if (TREE_CODE (t) == MEM_REF
7617 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7618 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7619 TREE_OPERAND (t, 0),
7620 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7621 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7622 {
7623 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7624
7625 if (TREE_TYPE (t) != ptrtype)
7626 t = fold_convert_loc (loc, ptrtype, t);
7627 }
7628 else
7629 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7630
7631 return t;
7632 }
7633
7634 /* Build an expression for the address of T. */
7635
7636 tree
7637 build_fold_addr_expr_loc (location_t loc, tree t)
7638 {
7639 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7640
7641 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7642 }
7643
7644 /* Fold a unary expression of code CODE and type TYPE with operand
7645 OP0. Return the folded expression if folding is successful.
7646 Otherwise, return NULL_TREE. */
7647
7648 tree
7649 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7650 {
7651 tree tem;
7652 tree arg0;
7653 enum tree_code_class kind = TREE_CODE_CLASS (code);
7654
7655 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7656 && TREE_CODE_LENGTH (code) == 1);
7657
7658 arg0 = op0;
7659 if (arg0)
7660 {
7661 if (CONVERT_EXPR_CODE_P (code)
7662 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7663 {
7664 /* Don't use STRIP_NOPS, because signedness of argument type
7665 matters. */
7666 STRIP_SIGN_NOPS (arg0);
7667 }
7668 else
7669 {
7670 /* Strip any conversions that don't change the mode. This
7671 is safe for every expression, except for a comparison
7672 expression because its signedness is derived from its
7673 operands.
7674
7675 Note that this is done as an internal manipulation within
7676 the constant folder, in order to find the simplest
7677 representation of the arguments so that their form can be
7678 studied. In any cases, the appropriate type conversions
7679 should be put back in the tree that will get out of the
7680 constant folder. */
7681 STRIP_NOPS (arg0);
7682 }
7683
7684 if (CONSTANT_CLASS_P (arg0))
7685 {
7686 tree tem = const_unop (code, type, arg0);
7687 if (tem)
7688 {
7689 if (TREE_TYPE (tem) != type)
7690 tem = fold_convert_loc (loc, type, tem);
7691 return tem;
7692 }
7693 }
7694 }
7695
7696 tem = generic_simplify (loc, code, type, op0);
7697 if (tem)
7698 return tem;
7699
7700 if (TREE_CODE_CLASS (code) == tcc_unary)
7701 {
7702 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7703 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7704 fold_build1_loc (loc, code, type,
7705 fold_convert_loc (loc, TREE_TYPE (op0),
7706 TREE_OPERAND (arg0, 1))));
7707 else if (TREE_CODE (arg0) == COND_EXPR)
7708 {
7709 tree arg01 = TREE_OPERAND (arg0, 1);
7710 tree arg02 = TREE_OPERAND (arg0, 2);
7711 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7712 arg01 = fold_build1_loc (loc, code, type,
7713 fold_convert_loc (loc,
7714 TREE_TYPE (op0), arg01));
7715 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7716 arg02 = fold_build1_loc (loc, code, type,
7717 fold_convert_loc (loc,
7718 TREE_TYPE (op0), arg02));
7719 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7720 arg01, arg02);
7721
7722 /* If this was a conversion, and all we did was to move into
7723 inside the COND_EXPR, bring it back out. But leave it if
7724 it is a conversion from integer to integer and the
7725 result precision is no wider than a word since such a
7726 conversion is cheap and may be optimized away by combine,
7727 while it couldn't if it were outside the COND_EXPR. Then return
7728 so we don't get into an infinite recursion loop taking the
7729 conversion out and then back in. */
7730
7731 if ((CONVERT_EXPR_CODE_P (code)
7732 || code == NON_LVALUE_EXPR)
7733 && TREE_CODE (tem) == COND_EXPR
7734 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7735 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7736 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7737 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7738 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7739 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7740 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7741 && (INTEGRAL_TYPE_P
7742 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7743 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7744 || flag_syntax_only))
7745 tem = build1_loc (loc, code, type,
7746 build3 (COND_EXPR,
7747 TREE_TYPE (TREE_OPERAND
7748 (TREE_OPERAND (tem, 1), 0)),
7749 TREE_OPERAND (tem, 0),
7750 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7751 TREE_OPERAND (TREE_OPERAND (tem, 2),
7752 0)));
7753 return tem;
7754 }
7755 }
7756
7757 switch (code)
7758 {
7759 case NON_LVALUE_EXPR:
7760 if (!maybe_lvalue_p (op0))
7761 return fold_convert_loc (loc, type, op0);
7762 return NULL_TREE;
7763
7764 CASE_CONVERT:
7765 case FLOAT_EXPR:
7766 case FIX_TRUNC_EXPR:
7767 if (COMPARISON_CLASS_P (op0))
7768 {
7769 /* If we have (type) (a CMP b) and type is an integral type, return
7770 new expression involving the new type. Canonicalize
7771 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7772 non-integral type.
7773 Do not fold the result as that would not simplify further, also
7774 folding again results in recursions. */
7775 if (TREE_CODE (type) == BOOLEAN_TYPE)
7776 return build2_loc (loc, TREE_CODE (op0), type,
7777 TREE_OPERAND (op0, 0),
7778 TREE_OPERAND (op0, 1));
7779 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7780 && TREE_CODE (type) != VECTOR_TYPE)
7781 return build3_loc (loc, COND_EXPR, type, op0,
7782 constant_boolean_node (true, type),
7783 constant_boolean_node (false, type));
7784 }
7785
7786 /* Handle (T *)&A.B.C for A being of type T and B and C
7787 living at offset zero. This occurs frequently in
7788 C++ upcasting and then accessing the base. */
7789 if (TREE_CODE (op0) == ADDR_EXPR
7790 && POINTER_TYPE_P (type)
7791 && handled_component_p (TREE_OPERAND (op0, 0)))
7792 {
7793 HOST_WIDE_INT bitsize, bitpos;
7794 tree offset;
7795 machine_mode mode;
7796 int unsignedp, reversep, volatilep;
7797 tree base
7798 = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
7799 &offset, &mode, &unsignedp, &reversep,
7800 &volatilep, false);
7801 /* If the reference was to a (constant) zero offset, we can use
7802 the address of the base if it has the same base type
7803 as the result type and the pointer type is unqualified. */
7804 if (! offset && bitpos == 0
7805 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7806 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7807 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7808 return fold_convert_loc (loc, type,
7809 build_fold_addr_expr_loc (loc, base));
7810 }
7811
7812 if (TREE_CODE (op0) == MODIFY_EXPR
7813 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7814 /* Detect assigning a bitfield. */
7815 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7816 && DECL_BIT_FIELD
7817 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7818 {
7819 /* Don't leave an assignment inside a conversion
7820 unless assigning a bitfield. */
7821 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7822 /* First do the assignment, then return converted constant. */
7823 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7824 TREE_NO_WARNING (tem) = 1;
7825 TREE_USED (tem) = 1;
7826 return tem;
7827 }
7828
7829 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7830 constants (if x has signed type, the sign bit cannot be set
7831 in c). This folds extension into the BIT_AND_EXPR.
7832 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7833 very likely don't have maximal range for their precision and this
7834 transformation effectively doesn't preserve non-maximal ranges. */
7835 if (TREE_CODE (type) == INTEGER_TYPE
7836 && TREE_CODE (op0) == BIT_AND_EXPR
7837 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7838 {
7839 tree and_expr = op0;
7840 tree and0 = TREE_OPERAND (and_expr, 0);
7841 tree and1 = TREE_OPERAND (and_expr, 1);
7842 int change = 0;
7843
7844 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7845 || (TYPE_PRECISION (type)
7846 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7847 change = 1;
7848 else if (TYPE_PRECISION (TREE_TYPE (and1))
7849 <= HOST_BITS_PER_WIDE_INT
7850 && tree_fits_uhwi_p (and1))
7851 {
7852 unsigned HOST_WIDE_INT cst;
7853
7854 cst = tree_to_uhwi (and1);
7855 cst &= HOST_WIDE_INT_M1U
7856 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7857 change = (cst == 0);
7858 if (change
7859 && !flag_syntax_only
7860 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7861 == ZERO_EXTEND))
7862 {
7863 tree uns = unsigned_type_for (TREE_TYPE (and0));
7864 and0 = fold_convert_loc (loc, uns, and0);
7865 and1 = fold_convert_loc (loc, uns, and1);
7866 }
7867 }
7868 if (change)
7869 {
7870 tem = force_fit_type (type, wi::to_widest (and1), 0,
7871 TREE_OVERFLOW (and1));
7872 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7873 fold_convert_loc (loc, type, and0), tem);
7874 }
7875 }
7876
7877 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
7878 cast (T1)X will fold away. We assume that this happens when X itself
7879 is a cast. */
7880 if (POINTER_TYPE_P (type)
7881 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7882 && CONVERT_EXPR_P (TREE_OPERAND (arg0, 0)))
7883 {
7884 tree arg00 = TREE_OPERAND (arg0, 0);
7885 tree arg01 = TREE_OPERAND (arg0, 1);
7886
7887 return fold_build_pointer_plus_loc
7888 (loc, fold_convert_loc (loc, type, arg00), arg01);
7889 }
7890
7891 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7892 of the same precision, and X is an integer type not narrower than
7893 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7894 if (INTEGRAL_TYPE_P (type)
7895 && TREE_CODE (op0) == BIT_NOT_EXPR
7896 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7897 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7898 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7899 {
7900 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7901 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7902 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7903 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7904 fold_convert_loc (loc, type, tem));
7905 }
7906
7907 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7908 type of X and Y (integer types only). */
7909 if (INTEGRAL_TYPE_P (type)
7910 && TREE_CODE (op0) == MULT_EXPR
7911 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7912 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7913 {
7914 /* Be careful not to introduce new overflows. */
7915 tree mult_type;
7916 if (TYPE_OVERFLOW_WRAPS (type))
7917 mult_type = type;
7918 else
7919 mult_type = unsigned_type_for (type);
7920
7921 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7922 {
7923 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7924 fold_convert_loc (loc, mult_type,
7925 TREE_OPERAND (op0, 0)),
7926 fold_convert_loc (loc, mult_type,
7927 TREE_OPERAND (op0, 1)));
7928 return fold_convert_loc (loc, type, tem);
7929 }
7930 }
7931
7932 return NULL_TREE;
7933
7934 case VIEW_CONVERT_EXPR:
7935 if (TREE_CODE (op0) == MEM_REF)
7936 {
7937 tem = fold_build2_loc (loc, MEM_REF, type,
7938 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
7939 REF_REVERSE_STORAGE_ORDER (tem) = REF_REVERSE_STORAGE_ORDER (op0);
7940 return tem;
7941 }
7942
7943 return NULL_TREE;
7944
7945 case NEGATE_EXPR:
7946 tem = fold_negate_expr (loc, arg0);
7947 if (tem)
7948 return fold_convert_loc (loc, type, tem);
7949 return NULL_TREE;
7950
7951 case ABS_EXPR:
7952 /* Convert fabs((double)float) into (double)fabsf(float). */
7953 if (TREE_CODE (arg0) == NOP_EXPR
7954 && TREE_CODE (type) == REAL_TYPE)
7955 {
7956 tree targ0 = strip_float_extensions (arg0);
7957 if (targ0 != arg0)
7958 return fold_convert_loc (loc, type,
7959 fold_build1_loc (loc, ABS_EXPR,
7960 TREE_TYPE (targ0),
7961 targ0));
7962 }
7963 return NULL_TREE;
7964
7965 case BIT_NOT_EXPR:
7966 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7967 if (TREE_CODE (arg0) == BIT_XOR_EXPR
7968 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7969 fold_convert_loc (loc, type,
7970 TREE_OPERAND (arg0, 0)))))
7971 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
7972 fold_convert_loc (loc, type,
7973 TREE_OPERAND (arg0, 1)));
7974 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7975 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7976 fold_convert_loc (loc, type,
7977 TREE_OPERAND (arg0, 1)))))
7978 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
7979 fold_convert_loc (loc, type,
7980 TREE_OPERAND (arg0, 0)), tem);
7981
7982 return NULL_TREE;
7983
7984 case TRUTH_NOT_EXPR:
7985 /* Note that the operand of this must be an int
7986 and its values must be 0 or 1.
7987 ("true" is a fixed value perhaps depending on the language,
7988 but we don't handle values other than 1 correctly yet.) */
7989 tem = fold_truth_not_expr (loc, arg0);
7990 if (!tem)
7991 return NULL_TREE;
7992 return fold_convert_loc (loc, type, tem);
7993
7994 case INDIRECT_REF:
7995 /* Fold *&X to X if X is an lvalue. */
7996 if (TREE_CODE (op0) == ADDR_EXPR)
7997 {
7998 tree op00 = TREE_OPERAND (op0, 0);
7999 if ((TREE_CODE (op00) == VAR_DECL
8000 || TREE_CODE (op00) == PARM_DECL
8001 || TREE_CODE (op00) == RESULT_DECL)
8002 && !TREE_READONLY (op00))
8003 return op00;
8004 }
8005 return NULL_TREE;
8006
8007 default:
8008 return NULL_TREE;
8009 } /* switch (code) */
8010 }
8011
8012
8013 /* If the operation was a conversion do _not_ mark a resulting constant
8014 with TREE_OVERFLOW if the original constant was not. These conversions
8015 have implementation defined behavior and retaining the TREE_OVERFLOW
8016 flag here would confuse later passes such as VRP. */
8017 tree
8018 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8019 tree type, tree op0)
8020 {
8021 tree res = fold_unary_loc (loc, code, type, op0);
8022 if (res
8023 && TREE_CODE (res) == INTEGER_CST
8024 && TREE_CODE (op0) == INTEGER_CST
8025 && CONVERT_EXPR_CODE_P (code))
8026 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8027
8028 return res;
8029 }
8030
8031 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8032 operands OP0 and OP1. LOC is the location of the resulting expression.
8033 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8034 Return the folded expression if folding is successful. Otherwise,
8035 return NULL_TREE. */
8036 static tree
8037 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8038 tree arg0, tree arg1, tree op0, tree op1)
8039 {
8040 tree tem;
8041
8042 /* We only do these simplifications if we are optimizing. */
8043 if (!optimize)
8044 return NULL_TREE;
8045
8046 /* Check for things like (A || B) && (A || C). We can convert this
8047 to A || (B && C). Note that either operator can be any of the four
8048 truth and/or operations and the transformation will still be
8049 valid. Also note that we only care about order for the
8050 ANDIF and ORIF operators. If B contains side effects, this
8051 might change the truth-value of A. */
8052 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8053 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8054 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8055 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8056 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8057 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8058 {
8059 tree a00 = TREE_OPERAND (arg0, 0);
8060 tree a01 = TREE_OPERAND (arg0, 1);
8061 tree a10 = TREE_OPERAND (arg1, 0);
8062 tree a11 = TREE_OPERAND (arg1, 1);
8063 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8064 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8065 && (code == TRUTH_AND_EXPR
8066 || code == TRUTH_OR_EXPR));
8067
8068 if (operand_equal_p (a00, a10, 0))
8069 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8070 fold_build2_loc (loc, code, type, a01, a11));
8071 else if (commutative && operand_equal_p (a00, a11, 0))
8072 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8073 fold_build2_loc (loc, code, type, a01, a10));
8074 else if (commutative && operand_equal_p (a01, a10, 0))
8075 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8076 fold_build2_loc (loc, code, type, a00, a11));
8077
8078 /* This case if tricky because we must either have commutative
8079 operators or else A10 must not have side-effects. */
8080
8081 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8082 && operand_equal_p (a01, a11, 0))
8083 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8084 fold_build2_loc (loc, code, type, a00, a10),
8085 a01);
8086 }
8087
8088 /* See if we can build a range comparison. */
8089 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8090 return tem;
8091
8092 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8093 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8094 {
8095 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8096 if (tem)
8097 return fold_build2_loc (loc, code, type, tem, arg1);
8098 }
8099
8100 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8101 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8102 {
8103 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8104 if (tem)
8105 return fold_build2_loc (loc, code, type, arg0, tem);
8106 }
8107
8108 /* Check for the possibility of merging component references. If our
8109 lhs is another similar operation, try to merge its rhs with our
8110 rhs. Then try to merge our lhs and rhs. */
8111 if (TREE_CODE (arg0) == code
8112 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8113 TREE_OPERAND (arg0, 1), arg1)))
8114 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8115
8116 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8117 return tem;
8118
8119 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8120 && (code == TRUTH_AND_EXPR
8121 || code == TRUTH_ANDIF_EXPR
8122 || code == TRUTH_OR_EXPR
8123 || code == TRUTH_ORIF_EXPR))
8124 {
8125 enum tree_code ncode, icode;
8126
8127 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8128 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8129 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8130
8131 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8132 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8133 We don't want to pack more than two leafs to a non-IF AND/OR
8134 expression.
8135 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8136 equal to IF-CODE, then we don't want to add right-hand operand.
8137 If the inner right-hand side of left-hand operand has
8138 side-effects, or isn't simple, then we can't add to it,
8139 as otherwise we might destroy if-sequence. */
8140 if (TREE_CODE (arg0) == icode
8141 && simple_operand_p_2 (arg1)
8142 /* Needed for sequence points to handle trappings, and
8143 side-effects. */
8144 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8145 {
8146 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8147 arg1);
8148 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8149 tem);
8150 }
8151 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8152 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8153 else if (TREE_CODE (arg1) == icode
8154 && simple_operand_p_2 (arg0)
8155 /* Needed for sequence points to handle trappings, and
8156 side-effects. */
8157 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8158 {
8159 tem = fold_build2_loc (loc, ncode, type,
8160 arg0, TREE_OPERAND (arg1, 0));
8161 return fold_build2_loc (loc, icode, type, tem,
8162 TREE_OPERAND (arg1, 1));
8163 }
8164 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8165 into (A OR B).
8166 For sequence point consistancy, we need to check for trapping,
8167 and side-effects. */
8168 else if (code == icode && simple_operand_p_2 (arg0)
8169 && simple_operand_p_2 (arg1))
8170 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8171 }
8172
8173 return NULL_TREE;
8174 }
8175
8176 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8177 by changing CODE to reduce the magnitude of constants involved in
8178 ARG0 of the comparison.
8179 Returns a canonicalized comparison tree if a simplification was
8180 possible, otherwise returns NULL_TREE.
8181 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8182 valid if signed overflow is undefined. */
8183
8184 static tree
8185 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8186 tree arg0, tree arg1,
8187 bool *strict_overflow_p)
8188 {
8189 enum tree_code code0 = TREE_CODE (arg0);
8190 tree t, cst0 = NULL_TREE;
8191 int sgn0;
8192
8193 /* Match A +- CST code arg1. We can change this only if overflow
8194 is undefined. */
8195 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8196 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8197 /* In principle pointers also have undefined overflow behavior,
8198 but that causes problems elsewhere. */
8199 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8200 && (code0 == MINUS_EXPR
8201 || code0 == PLUS_EXPR)
8202 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
8203 return NULL_TREE;
8204
8205 /* Identify the constant in arg0 and its sign. */
8206 cst0 = TREE_OPERAND (arg0, 1);
8207 sgn0 = tree_int_cst_sgn (cst0);
8208
8209 /* Overflowed constants and zero will cause problems. */
8210 if (integer_zerop (cst0)
8211 || TREE_OVERFLOW (cst0))
8212 return NULL_TREE;
8213
8214 /* See if we can reduce the magnitude of the constant in
8215 arg0 by changing the comparison code. */
8216 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8217 if (code == LT_EXPR
8218 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8219 code = LE_EXPR;
8220 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8221 else if (code == GT_EXPR
8222 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8223 code = GE_EXPR;
8224 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8225 else if (code == LE_EXPR
8226 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8227 code = LT_EXPR;
8228 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8229 else if (code == GE_EXPR
8230 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8231 code = GT_EXPR;
8232 else
8233 return NULL_TREE;
8234 *strict_overflow_p = true;
8235
8236 /* Now build the constant reduced in magnitude. But not if that
8237 would produce one outside of its types range. */
8238 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8239 && ((sgn0 == 1
8240 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8241 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8242 || (sgn0 == -1
8243 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8244 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8245 return NULL_TREE;
8246
8247 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8248 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8249 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8250 t = fold_convert (TREE_TYPE (arg1), t);
8251
8252 return fold_build2_loc (loc, code, type, t, arg1);
8253 }
8254
8255 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8256 overflow further. Try to decrease the magnitude of constants involved
8257 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8258 and put sole constants at the second argument position.
8259 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8260
8261 static tree
8262 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8263 tree arg0, tree arg1)
8264 {
8265 tree t;
8266 bool strict_overflow_p;
8267 const char * const warnmsg = G_("assuming signed overflow does not occur "
8268 "when reducing constant in comparison");
8269
8270 /* Try canonicalization by simplifying arg0. */
8271 strict_overflow_p = false;
8272 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8273 &strict_overflow_p);
8274 if (t)
8275 {
8276 if (strict_overflow_p)
8277 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8278 return t;
8279 }
8280
8281 /* Try canonicalization by simplifying arg1 using the swapped
8282 comparison. */
8283 code = swap_tree_comparison (code);
8284 strict_overflow_p = false;
8285 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8286 &strict_overflow_p);
8287 if (t && strict_overflow_p)
8288 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8289 return t;
8290 }
8291
8292 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8293 space. This is used to avoid issuing overflow warnings for
8294 expressions like &p->x which can not wrap. */
8295
8296 static bool
8297 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8298 {
8299 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8300 return true;
8301
8302 if (bitpos < 0)
8303 return true;
8304
8305 wide_int wi_offset;
8306 int precision = TYPE_PRECISION (TREE_TYPE (base));
8307 if (offset == NULL_TREE)
8308 wi_offset = wi::zero (precision);
8309 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8310 return true;
8311 else
8312 wi_offset = offset;
8313
8314 bool overflow;
8315 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8316 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8317 if (overflow)
8318 return true;
8319
8320 if (!wi::fits_uhwi_p (total))
8321 return true;
8322
8323 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8324 if (size <= 0)
8325 return true;
8326
8327 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8328 array. */
8329 if (TREE_CODE (base) == ADDR_EXPR)
8330 {
8331 HOST_WIDE_INT base_size;
8332
8333 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8334 if (base_size > 0 && size < base_size)
8335 size = base_size;
8336 }
8337
8338 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8339 }
8340
8341 /* Subroutine of fold_binary. This routine performs all of the
8342 transformations that are common to the equality/inequality
8343 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8344 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8345 fold_binary should call fold_binary. Fold a comparison with
8346 tree code CODE and type TYPE with operands OP0 and OP1. Return
8347 the folded comparison or NULL_TREE. */
8348
8349 static tree
8350 fold_comparison (location_t loc, enum tree_code code, tree type,
8351 tree op0, tree op1)
8352 {
8353 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8354 tree arg0, arg1, tem;
8355
8356 arg0 = op0;
8357 arg1 = op1;
8358
8359 STRIP_SIGN_NOPS (arg0);
8360 STRIP_SIGN_NOPS (arg1);
8361
8362 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
8363 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8364 && (equality_code
8365 || (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8366 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
8367 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8368 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8369 && TREE_CODE (arg1) == INTEGER_CST
8370 && !TREE_OVERFLOW (arg1))
8371 {
8372 const enum tree_code
8373 reverse_op = TREE_CODE (arg0) == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR;
8374 tree const1 = TREE_OPERAND (arg0, 1);
8375 tree const2 = fold_convert_loc (loc, TREE_TYPE (const1), arg1);
8376 tree variable = TREE_OPERAND (arg0, 0);
8377 tree new_const = int_const_binop (reverse_op, const2, const1);
8378
8379 /* If the constant operation overflowed this can be
8380 simplified as a comparison against INT_MAX/INT_MIN. */
8381 if (TREE_OVERFLOW (new_const)
8382 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
8383 {
8384 int const1_sgn = tree_int_cst_sgn (const1);
8385 enum tree_code code2 = code;
8386
8387 /* Get the sign of the constant on the lhs if the
8388 operation were VARIABLE + CONST1. */
8389 if (TREE_CODE (arg0) == MINUS_EXPR)
8390 const1_sgn = -const1_sgn;
8391
8392 /* The sign of the constant determines if we overflowed
8393 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8394 Canonicalize to the INT_MIN overflow by swapping the comparison
8395 if necessary. */
8396 if (const1_sgn == -1)
8397 code2 = swap_tree_comparison (code);
8398
8399 /* We now can look at the canonicalized case
8400 VARIABLE + 1 CODE2 INT_MIN
8401 and decide on the result. */
8402 switch (code2)
8403 {
8404 case EQ_EXPR:
8405 case LT_EXPR:
8406 case LE_EXPR:
8407 return
8408 omit_one_operand_loc (loc, type, boolean_false_node, variable);
8409
8410 case NE_EXPR:
8411 case GE_EXPR:
8412 case GT_EXPR:
8413 return
8414 omit_one_operand_loc (loc, type, boolean_true_node, variable);
8415
8416 default:
8417 gcc_unreachable ();
8418 }
8419 }
8420 else
8421 {
8422 if (!equality_code)
8423 fold_overflow_warning ("assuming signed overflow does not occur "
8424 "when changing X +- C1 cmp C2 to "
8425 "X cmp C2 -+ C1",
8426 WARN_STRICT_OVERFLOW_COMPARISON);
8427 return fold_build2_loc (loc, code, type, variable, new_const);
8428 }
8429 }
8430
8431 /* For comparisons of pointers we can decompose it to a compile time
8432 comparison of the base objects and the offsets into the object.
8433 This requires at least one operand being an ADDR_EXPR or a
8434 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8435 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8436 && (TREE_CODE (arg0) == ADDR_EXPR
8437 || TREE_CODE (arg1) == ADDR_EXPR
8438 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8439 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8440 {
8441 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8442 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8443 machine_mode mode;
8444 int volatilep, reversep, unsignedp;
8445 bool indirect_base0 = false, indirect_base1 = false;
8446
8447 /* Get base and offset for the access. Strip ADDR_EXPR for
8448 get_inner_reference, but put it back by stripping INDIRECT_REF
8449 off the base object if possible. indirect_baseN will be true
8450 if baseN is not an address but refers to the object itself. */
8451 base0 = arg0;
8452 if (TREE_CODE (arg0) == ADDR_EXPR)
8453 {
8454 base0
8455 = get_inner_reference (TREE_OPERAND (arg0, 0),
8456 &bitsize, &bitpos0, &offset0, &mode,
8457 &unsignedp, &reversep, &volatilep, false);
8458 if (TREE_CODE (base0) == INDIRECT_REF)
8459 base0 = TREE_OPERAND (base0, 0);
8460 else
8461 indirect_base0 = true;
8462 }
8463 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8464 {
8465 base0 = TREE_OPERAND (arg0, 0);
8466 STRIP_SIGN_NOPS (base0);
8467 if (TREE_CODE (base0) == ADDR_EXPR)
8468 {
8469 base0
8470 = get_inner_reference (TREE_OPERAND (base0, 0),
8471 &bitsize, &bitpos0, &offset0, &mode,
8472 &unsignedp, &reversep, &volatilep,
8473 false);
8474 if (TREE_CODE (base0) == INDIRECT_REF)
8475 base0 = TREE_OPERAND (base0, 0);
8476 else
8477 indirect_base0 = true;
8478 }
8479 if (offset0 == NULL_TREE || integer_zerop (offset0))
8480 offset0 = TREE_OPERAND (arg0, 1);
8481 else
8482 offset0 = size_binop (PLUS_EXPR, offset0,
8483 TREE_OPERAND (arg0, 1));
8484 if (TREE_CODE (offset0) == INTEGER_CST)
8485 {
8486 offset_int tem = wi::sext (wi::to_offset (offset0),
8487 TYPE_PRECISION (sizetype));
8488 tem = wi::lshift (tem, LOG2_BITS_PER_UNIT);
8489 tem += bitpos0;
8490 if (wi::fits_shwi_p (tem))
8491 {
8492 bitpos0 = tem.to_shwi ();
8493 offset0 = NULL_TREE;
8494 }
8495 }
8496 }
8497
8498 base1 = arg1;
8499 if (TREE_CODE (arg1) == ADDR_EXPR)
8500 {
8501 base1
8502 = get_inner_reference (TREE_OPERAND (arg1, 0),
8503 &bitsize, &bitpos1, &offset1, &mode,
8504 &unsignedp, &reversep, &volatilep, false);
8505 if (TREE_CODE (base1) == INDIRECT_REF)
8506 base1 = TREE_OPERAND (base1, 0);
8507 else
8508 indirect_base1 = true;
8509 }
8510 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8511 {
8512 base1 = TREE_OPERAND (arg1, 0);
8513 STRIP_SIGN_NOPS (base1);
8514 if (TREE_CODE (base1) == ADDR_EXPR)
8515 {
8516 base1
8517 = get_inner_reference (TREE_OPERAND (base1, 0),
8518 &bitsize, &bitpos1, &offset1, &mode,
8519 &unsignedp, &reversep, &volatilep,
8520 false);
8521 if (TREE_CODE (base1) == INDIRECT_REF)
8522 base1 = TREE_OPERAND (base1, 0);
8523 else
8524 indirect_base1 = true;
8525 }
8526 if (offset1 == NULL_TREE || integer_zerop (offset1))
8527 offset1 = TREE_OPERAND (arg1, 1);
8528 else
8529 offset1 = size_binop (PLUS_EXPR, offset1,
8530 TREE_OPERAND (arg1, 1));
8531 if (TREE_CODE (offset1) == INTEGER_CST)
8532 {
8533 offset_int tem = wi::sext (wi::to_offset (offset1),
8534 TYPE_PRECISION (sizetype));
8535 tem = wi::lshift (tem, LOG2_BITS_PER_UNIT);
8536 tem += bitpos1;
8537 if (wi::fits_shwi_p (tem))
8538 {
8539 bitpos1 = tem.to_shwi ();
8540 offset1 = NULL_TREE;
8541 }
8542 }
8543 }
8544
8545 /* If we have equivalent bases we might be able to simplify. */
8546 if (indirect_base0 == indirect_base1
8547 && operand_equal_p (base0, base1,
8548 indirect_base0 ? OEP_ADDRESS_OF : 0))
8549 {
8550 /* We can fold this expression to a constant if the non-constant
8551 offset parts are equal. */
8552 if ((offset0 == offset1
8553 || (offset0 && offset1
8554 && operand_equal_p (offset0, offset1, 0)))
8555 && (code == EQ_EXPR
8556 || code == NE_EXPR
8557 || (indirect_base0 && DECL_P (base0))
8558 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8559
8560 {
8561 if (!equality_code
8562 && bitpos0 != bitpos1
8563 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8564 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8565 fold_overflow_warning (("assuming pointer wraparound does not "
8566 "occur when comparing P +- C1 with "
8567 "P +- C2"),
8568 WARN_STRICT_OVERFLOW_CONDITIONAL);
8569
8570 switch (code)
8571 {
8572 case EQ_EXPR:
8573 return constant_boolean_node (bitpos0 == bitpos1, type);
8574 case NE_EXPR:
8575 return constant_boolean_node (bitpos0 != bitpos1, type);
8576 case LT_EXPR:
8577 return constant_boolean_node (bitpos0 < bitpos1, type);
8578 case LE_EXPR:
8579 return constant_boolean_node (bitpos0 <= bitpos1, type);
8580 case GE_EXPR:
8581 return constant_boolean_node (bitpos0 >= bitpos1, type);
8582 case GT_EXPR:
8583 return constant_boolean_node (bitpos0 > bitpos1, type);
8584 default:;
8585 }
8586 }
8587 /* We can simplify the comparison to a comparison of the variable
8588 offset parts if the constant offset parts are equal.
8589 Be careful to use signed sizetype here because otherwise we
8590 mess with array offsets in the wrong way. This is possible
8591 because pointer arithmetic is restricted to retain within an
8592 object and overflow on pointer differences is undefined as of
8593 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8594 else if (bitpos0 == bitpos1
8595 && (equality_code
8596 || (indirect_base0 && DECL_P (base0))
8597 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8598 {
8599 /* By converting to signed sizetype we cover middle-end pointer
8600 arithmetic which operates on unsigned pointer types of size
8601 type size and ARRAY_REF offsets which are properly sign or
8602 zero extended from their type in case it is narrower than
8603 sizetype. */
8604 if (offset0 == NULL_TREE)
8605 offset0 = build_int_cst (ssizetype, 0);
8606 else
8607 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8608 if (offset1 == NULL_TREE)
8609 offset1 = build_int_cst (ssizetype, 0);
8610 else
8611 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8612
8613 if (!equality_code
8614 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8615 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8616 fold_overflow_warning (("assuming pointer wraparound does not "
8617 "occur when comparing P +- C1 with "
8618 "P +- C2"),
8619 WARN_STRICT_OVERFLOW_COMPARISON);
8620
8621 return fold_build2_loc (loc, code, type, offset0, offset1);
8622 }
8623 }
8624 /* For equal offsets we can simplify to a comparison of the
8625 base addresses. */
8626 else if (bitpos0 == bitpos1
8627 && (indirect_base0
8628 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8629 && (indirect_base1
8630 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8631 && ((offset0 == offset1)
8632 || (offset0 && offset1
8633 && operand_equal_p (offset0, offset1, 0))))
8634 {
8635 if (indirect_base0)
8636 base0 = build_fold_addr_expr_loc (loc, base0);
8637 if (indirect_base1)
8638 base1 = build_fold_addr_expr_loc (loc, base1);
8639 return fold_build2_loc (loc, code, type, base0, base1);
8640 }
8641 }
8642
8643 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8644 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8645 the resulting offset is smaller in absolute value than the
8646 original one and has the same sign. */
8647 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8648 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8649 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8650 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8651 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8652 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8653 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8654 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8655 {
8656 tree const1 = TREE_OPERAND (arg0, 1);
8657 tree const2 = TREE_OPERAND (arg1, 1);
8658 tree variable1 = TREE_OPERAND (arg0, 0);
8659 tree variable2 = TREE_OPERAND (arg1, 0);
8660 tree cst;
8661 const char * const warnmsg = G_("assuming signed overflow does not "
8662 "occur when combining constants around "
8663 "a comparison");
8664
8665 /* Put the constant on the side where it doesn't overflow and is
8666 of lower absolute value and of same sign than before. */
8667 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8668 ? MINUS_EXPR : PLUS_EXPR,
8669 const2, const1);
8670 if (!TREE_OVERFLOW (cst)
8671 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
8672 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
8673 {
8674 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8675 return fold_build2_loc (loc, code, type,
8676 variable1,
8677 fold_build2_loc (loc, TREE_CODE (arg1),
8678 TREE_TYPE (arg1),
8679 variable2, cst));
8680 }
8681
8682 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8683 ? MINUS_EXPR : PLUS_EXPR,
8684 const1, const2);
8685 if (!TREE_OVERFLOW (cst)
8686 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
8687 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
8688 {
8689 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8690 return fold_build2_loc (loc, code, type,
8691 fold_build2_loc (loc, TREE_CODE (arg0),
8692 TREE_TYPE (arg0),
8693 variable1, cst),
8694 variable2);
8695 }
8696 }
8697
8698 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
8699 if (tem)
8700 return tem;
8701
8702 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8703 constant, we can simplify it. */
8704 if (TREE_CODE (arg1) == INTEGER_CST
8705 && (TREE_CODE (arg0) == MIN_EXPR
8706 || TREE_CODE (arg0) == MAX_EXPR)
8707 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8708 {
8709 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
8710 if (tem)
8711 return tem;
8712 }
8713
8714 /* If we are comparing an expression that just has comparisons
8715 of two integer values, arithmetic expressions of those comparisons,
8716 and constants, we can simplify it. There are only three cases
8717 to check: the two values can either be equal, the first can be
8718 greater, or the second can be greater. Fold the expression for
8719 those three values. Since each value must be 0 or 1, we have
8720 eight possibilities, each of which corresponds to the constant 0
8721 or 1 or one of the six possible comparisons.
8722
8723 This handles common cases like (a > b) == 0 but also handles
8724 expressions like ((x > y) - (y > x)) > 0, which supposedly
8725 occur in macroized code. */
8726
8727 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8728 {
8729 tree cval1 = 0, cval2 = 0;
8730 int save_p = 0;
8731
8732 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8733 /* Don't handle degenerate cases here; they should already
8734 have been handled anyway. */
8735 && cval1 != 0 && cval2 != 0
8736 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8737 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8738 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8739 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8740 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8741 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8742 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8743 {
8744 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8745 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8746
8747 /* We can't just pass T to eval_subst in case cval1 or cval2
8748 was the same as ARG1. */
8749
8750 tree high_result
8751 = fold_build2_loc (loc, code, type,
8752 eval_subst (loc, arg0, cval1, maxval,
8753 cval2, minval),
8754 arg1);
8755 tree equal_result
8756 = fold_build2_loc (loc, code, type,
8757 eval_subst (loc, arg0, cval1, maxval,
8758 cval2, maxval),
8759 arg1);
8760 tree low_result
8761 = fold_build2_loc (loc, code, type,
8762 eval_subst (loc, arg0, cval1, minval,
8763 cval2, maxval),
8764 arg1);
8765
8766 /* All three of these results should be 0 or 1. Confirm they are.
8767 Then use those values to select the proper code to use. */
8768
8769 if (TREE_CODE (high_result) == INTEGER_CST
8770 && TREE_CODE (equal_result) == INTEGER_CST
8771 && TREE_CODE (low_result) == INTEGER_CST)
8772 {
8773 /* Make a 3-bit mask with the high-order bit being the
8774 value for `>', the next for '=', and the low for '<'. */
8775 switch ((integer_onep (high_result) * 4)
8776 + (integer_onep (equal_result) * 2)
8777 + integer_onep (low_result))
8778 {
8779 case 0:
8780 /* Always false. */
8781 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
8782 case 1:
8783 code = LT_EXPR;
8784 break;
8785 case 2:
8786 code = EQ_EXPR;
8787 break;
8788 case 3:
8789 code = LE_EXPR;
8790 break;
8791 case 4:
8792 code = GT_EXPR;
8793 break;
8794 case 5:
8795 code = NE_EXPR;
8796 break;
8797 case 6:
8798 code = GE_EXPR;
8799 break;
8800 case 7:
8801 /* Always true. */
8802 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
8803 }
8804
8805 if (save_p)
8806 {
8807 tem = save_expr (build2 (code, type, cval1, cval2));
8808 SET_EXPR_LOCATION (tem, loc);
8809 return tem;
8810 }
8811 return fold_build2_loc (loc, code, type, cval1, cval2);
8812 }
8813 }
8814 }
8815
8816 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8817 into a single range test. */
8818 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
8819 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
8820 && TREE_CODE (arg1) == INTEGER_CST
8821 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8822 && !integer_zerop (TREE_OPERAND (arg0, 1))
8823 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8824 && !TREE_OVERFLOW (arg1))
8825 {
8826 tem = fold_div_compare (loc, code, type, arg0, arg1);
8827 if (tem != NULL_TREE)
8828 return tem;
8829 }
8830
8831 return NULL_TREE;
8832 }
8833
8834
8835 /* Subroutine of fold_binary. Optimize complex multiplications of the
8836 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8837 argument EXPR represents the expression "z" of type TYPE. */
8838
8839 static tree
8840 fold_mult_zconjz (location_t loc, tree type, tree expr)
8841 {
8842 tree itype = TREE_TYPE (type);
8843 tree rpart, ipart, tem;
8844
8845 if (TREE_CODE (expr) == COMPLEX_EXPR)
8846 {
8847 rpart = TREE_OPERAND (expr, 0);
8848 ipart = TREE_OPERAND (expr, 1);
8849 }
8850 else if (TREE_CODE (expr) == COMPLEX_CST)
8851 {
8852 rpart = TREE_REALPART (expr);
8853 ipart = TREE_IMAGPART (expr);
8854 }
8855 else
8856 {
8857 expr = save_expr (expr);
8858 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
8859 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
8860 }
8861
8862 rpart = save_expr (rpart);
8863 ipart = save_expr (ipart);
8864 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
8865 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
8866 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
8867 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
8868 build_zero_cst (itype));
8869 }
8870
8871
8872 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
8873 CONSTRUCTOR ARG into array ELTS and return true if successful. */
8874
8875 static bool
8876 vec_cst_ctor_to_array (tree arg, tree *elts)
8877 {
8878 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
8879
8880 if (TREE_CODE (arg) == VECTOR_CST)
8881 {
8882 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
8883 elts[i] = VECTOR_CST_ELT (arg, i);
8884 }
8885 else if (TREE_CODE (arg) == CONSTRUCTOR)
8886 {
8887 constructor_elt *elt;
8888
8889 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
8890 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
8891 return false;
8892 else
8893 elts[i] = elt->value;
8894 }
8895 else
8896 return false;
8897 for (; i < nelts; i++)
8898 elts[i]
8899 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
8900 return true;
8901 }
8902
8903 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
8904 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
8905 NULL_TREE otherwise. */
8906
8907 static tree
8908 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
8909 {
8910 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8911 tree *elts;
8912 bool need_ctor = false;
8913
8914 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
8915 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
8916 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
8917 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
8918 return NULL_TREE;
8919
8920 elts = XALLOCAVEC (tree, nelts * 3);
8921 if (!vec_cst_ctor_to_array (arg0, elts)
8922 || !vec_cst_ctor_to_array (arg1, elts + nelts))
8923 return NULL_TREE;
8924
8925 for (i = 0; i < nelts; i++)
8926 {
8927 if (!CONSTANT_CLASS_P (elts[sel[i]]))
8928 need_ctor = true;
8929 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
8930 }
8931
8932 if (need_ctor)
8933 {
8934 vec<constructor_elt, va_gc> *v;
8935 vec_alloc (v, nelts);
8936 for (i = 0; i < nelts; i++)
8937 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
8938 return build_constructor (type, v);
8939 }
8940 else
8941 return build_vector (type, &elts[2 * nelts]);
8942 }
8943
8944 /* Try to fold a pointer difference of type TYPE two address expressions of
8945 array references AREF0 and AREF1 using location LOC. Return a
8946 simplified expression for the difference or NULL_TREE. */
8947
8948 static tree
8949 fold_addr_of_array_ref_difference (location_t loc, tree type,
8950 tree aref0, tree aref1)
8951 {
8952 tree base0 = TREE_OPERAND (aref0, 0);
8953 tree base1 = TREE_OPERAND (aref1, 0);
8954 tree base_offset = build_int_cst (type, 0);
8955
8956 /* If the bases are array references as well, recurse. If the bases
8957 are pointer indirections compute the difference of the pointers.
8958 If the bases are equal, we are set. */
8959 if ((TREE_CODE (base0) == ARRAY_REF
8960 && TREE_CODE (base1) == ARRAY_REF
8961 && (base_offset
8962 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
8963 || (INDIRECT_REF_P (base0)
8964 && INDIRECT_REF_P (base1)
8965 && (base_offset
8966 = fold_binary_loc (loc, MINUS_EXPR, type,
8967 fold_convert (type, TREE_OPERAND (base0, 0)),
8968 fold_convert (type,
8969 TREE_OPERAND (base1, 0)))))
8970 || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
8971 {
8972 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
8973 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
8974 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
8975 tree diff = build2 (MINUS_EXPR, type, op0, op1);
8976 return fold_build2_loc (loc, PLUS_EXPR, type,
8977 base_offset,
8978 fold_build2_loc (loc, MULT_EXPR, type,
8979 diff, esz));
8980 }
8981 return NULL_TREE;
8982 }
8983
8984 /* If the real or vector real constant CST of type TYPE has an exact
8985 inverse, return it, else return NULL. */
8986
8987 tree
8988 exact_inverse (tree type, tree cst)
8989 {
8990 REAL_VALUE_TYPE r;
8991 tree unit_type, *elts;
8992 machine_mode mode;
8993 unsigned vec_nelts, i;
8994
8995 switch (TREE_CODE (cst))
8996 {
8997 case REAL_CST:
8998 r = TREE_REAL_CST (cst);
8999
9000 if (exact_real_inverse (TYPE_MODE (type), &r))
9001 return build_real (type, r);
9002
9003 return NULL_TREE;
9004
9005 case VECTOR_CST:
9006 vec_nelts = VECTOR_CST_NELTS (cst);
9007 elts = XALLOCAVEC (tree, vec_nelts);
9008 unit_type = TREE_TYPE (type);
9009 mode = TYPE_MODE (unit_type);
9010
9011 for (i = 0; i < vec_nelts; i++)
9012 {
9013 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9014 if (!exact_real_inverse (mode, &r))
9015 return NULL_TREE;
9016 elts[i] = build_real (unit_type, r);
9017 }
9018
9019 return build_vector (type, elts);
9020
9021 default:
9022 return NULL_TREE;
9023 }
9024 }
9025
9026 /* Mask out the tz least significant bits of X of type TYPE where
9027 tz is the number of trailing zeroes in Y. */
9028 static wide_int
9029 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
9030 {
9031 int tz = wi::ctz (y);
9032 if (tz > 0)
9033 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
9034 return x;
9035 }
9036
9037 /* Return true when T is an address and is known to be nonzero.
9038 For floating point we further ensure that T is not denormal.
9039 Similar logic is present in nonzero_address in rtlanal.h.
9040
9041 If the return value is based on the assumption that signed overflow
9042 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9043 change *STRICT_OVERFLOW_P. */
9044
9045 static bool
9046 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
9047 {
9048 tree type = TREE_TYPE (t);
9049 enum tree_code code;
9050
9051 /* Doing something useful for floating point would need more work. */
9052 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9053 return false;
9054
9055 code = TREE_CODE (t);
9056 switch (TREE_CODE_CLASS (code))
9057 {
9058 case tcc_unary:
9059 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9060 strict_overflow_p);
9061 case tcc_binary:
9062 case tcc_comparison:
9063 return tree_binary_nonzero_warnv_p (code, type,
9064 TREE_OPERAND (t, 0),
9065 TREE_OPERAND (t, 1),
9066 strict_overflow_p);
9067 case tcc_constant:
9068 case tcc_declaration:
9069 case tcc_reference:
9070 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9071
9072 default:
9073 break;
9074 }
9075
9076 switch (code)
9077 {
9078 case TRUTH_NOT_EXPR:
9079 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9080 strict_overflow_p);
9081
9082 case TRUTH_AND_EXPR:
9083 case TRUTH_OR_EXPR:
9084 case TRUTH_XOR_EXPR:
9085 return tree_binary_nonzero_warnv_p (code, type,
9086 TREE_OPERAND (t, 0),
9087 TREE_OPERAND (t, 1),
9088 strict_overflow_p);
9089
9090 case COND_EXPR:
9091 case CONSTRUCTOR:
9092 case OBJ_TYPE_REF:
9093 case ASSERT_EXPR:
9094 case ADDR_EXPR:
9095 case WITH_SIZE_EXPR:
9096 case SSA_NAME:
9097 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9098
9099 case COMPOUND_EXPR:
9100 case MODIFY_EXPR:
9101 case BIND_EXPR:
9102 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
9103 strict_overflow_p);
9104
9105 case SAVE_EXPR:
9106 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
9107 strict_overflow_p);
9108
9109 case CALL_EXPR:
9110 {
9111 tree fndecl = get_callee_fndecl (t);
9112 if (!fndecl) return false;
9113 if (flag_delete_null_pointer_checks && !flag_check_new
9114 && DECL_IS_OPERATOR_NEW (fndecl)
9115 && !TREE_NOTHROW (fndecl))
9116 return true;
9117 if (flag_delete_null_pointer_checks
9118 && lookup_attribute ("returns_nonnull",
9119 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9120 return true;
9121 return alloca_call_p (t);
9122 }
9123
9124 default:
9125 break;
9126 }
9127 return false;
9128 }
9129
9130 /* Return true when T is an address and is known to be nonzero.
9131 Handle warnings about undefined signed overflow. */
9132
9133 static bool
9134 tree_expr_nonzero_p (tree t)
9135 {
9136 bool ret, strict_overflow_p;
9137
9138 strict_overflow_p = false;
9139 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9140 if (strict_overflow_p)
9141 fold_overflow_warning (("assuming signed overflow does not occur when "
9142 "determining that expression is always "
9143 "non-zero"),
9144 WARN_STRICT_OVERFLOW_MISC);
9145 return ret;
9146 }
9147
9148 /* Return true if T is known not to be equal to an integer W. */
9149
9150 bool
9151 expr_not_equal_to (tree t, const wide_int &w)
9152 {
9153 wide_int min, max, nz;
9154 value_range_type rtype;
9155 switch (TREE_CODE (t))
9156 {
9157 case INTEGER_CST:
9158 return wi::ne_p (t, w);
9159
9160 case SSA_NAME:
9161 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
9162 return false;
9163 rtype = get_range_info (t, &min, &max);
9164 if (rtype == VR_RANGE)
9165 {
9166 if (wi::lt_p (max, w, TYPE_SIGN (TREE_TYPE (t))))
9167 return true;
9168 if (wi::lt_p (w, min, TYPE_SIGN (TREE_TYPE (t))))
9169 return true;
9170 }
9171 else if (rtype == VR_ANTI_RANGE
9172 && wi::le_p (min, w, TYPE_SIGN (TREE_TYPE (t)))
9173 && wi::le_p (w, max, TYPE_SIGN (TREE_TYPE (t))))
9174 return true;
9175 /* If T has some known zero bits and W has any of those bits set,
9176 then T is known not to be equal to W. */
9177 if (wi::ne_p (wi::zext (wi::bit_and_not (w, get_nonzero_bits (t)),
9178 TYPE_PRECISION (TREE_TYPE (t))), 0))
9179 return true;
9180 return false;
9181
9182 default:
9183 return false;
9184 }
9185 }
9186
9187 /* Fold a binary expression of code CODE and type TYPE with operands
9188 OP0 and OP1. LOC is the location of the resulting expression.
9189 Return the folded expression if folding is successful. Otherwise,
9190 return NULL_TREE. */
9191
9192 tree
9193 fold_binary_loc (location_t loc,
9194 enum tree_code code, tree type, tree op0, tree op1)
9195 {
9196 enum tree_code_class kind = TREE_CODE_CLASS (code);
9197 tree arg0, arg1, tem;
9198 tree t1 = NULL_TREE;
9199 bool strict_overflow_p;
9200 unsigned int prec;
9201
9202 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9203 && TREE_CODE_LENGTH (code) == 2
9204 && op0 != NULL_TREE
9205 && op1 != NULL_TREE);
9206
9207 arg0 = op0;
9208 arg1 = op1;
9209
9210 /* Strip any conversions that don't change the mode. This is
9211 safe for every expression, except for a comparison expression
9212 because its signedness is derived from its operands. So, in
9213 the latter case, only strip conversions that don't change the
9214 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9215 preserved.
9216
9217 Note that this is done as an internal manipulation within the
9218 constant folder, in order to find the simplest representation
9219 of the arguments so that their form can be studied. In any
9220 cases, the appropriate type conversions should be put back in
9221 the tree that will get out of the constant folder. */
9222
9223 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9224 {
9225 STRIP_SIGN_NOPS (arg0);
9226 STRIP_SIGN_NOPS (arg1);
9227 }
9228 else
9229 {
9230 STRIP_NOPS (arg0);
9231 STRIP_NOPS (arg1);
9232 }
9233
9234 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9235 constant but we can't do arithmetic on them. */
9236 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
9237 {
9238 tem = const_binop (code, type, arg0, arg1);
9239 if (tem != NULL_TREE)
9240 {
9241 if (TREE_TYPE (tem) != type)
9242 tem = fold_convert_loc (loc, type, tem);
9243 return tem;
9244 }
9245 }
9246
9247 /* If this is a commutative operation, and ARG0 is a constant, move it
9248 to ARG1 to reduce the number of tests below. */
9249 if (commutative_tree_code (code)
9250 && tree_swap_operands_p (arg0, arg1, true))
9251 return fold_build2_loc (loc, code, type, op1, op0);
9252
9253 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9254 to ARG1 to reduce the number of tests below. */
9255 if (kind == tcc_comparison
9256 && tree_swap_operands_p (arg0, arg1, true))
9257 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9258
9259 tem = generic_simplify (loc, code, type, op0, op1);
9260 if (tem)
9261 return tem;
9262
9263 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9264
9265 First check for cases where an arithmetic operation is applied to a
9266 compound, conditional, or comparison operation. Push the arithmetic
9267 operation inside the compound or conditional to see if any folding
9268 can then be done. Convert comparison to conditional for this purpose.
9269 The also optimizes non-constant cases that used to be done in
9270 expand_expr.
9271
9272 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9273 one of the operands is a comparison and the other is a comparison, a
9274 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9275 code below would make the expression more complex. Change it to a
9276 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9277 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9278
9279 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9280 || code == EQ_EXPR || code == NE_EXPR)
9281 && TREE_CODE (type) != VECTOR_TYPE
9282 && ((truth_value_p (TREE_CODE (arg0))
9283 && (truth_value_p (TREE_CODE (arg1))
9284 || (TREE_CODE (arg1) == BIT_AND_EXPR
9285 && integer_onep (TREE_OPERAND (arg1, 1)))))
9286 || (truth_value_p (TREE_CODE (arg1))
9287 && (truth_value_p (TREE_CODE (arg0))
9288 || (TREE_CODE (arg0) == BIT_AND_EXPR
9289 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9290 {
9291 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9292 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9293 : TRUTH_XOR_EXPR,
9294 boolean_type_node,
9295 fold_convert_loc (loc, boolean_type_node, arg0),
9296 fold_convert_loc (loc, boolean_type_node, arg1));
9297
9298 if (code == EQ_EXPR)
9299 tem = invert_truthvalue_loc (loc, tem);
9300
9301 return fold_convert_loc (loc, type, tem);
9302 }
9303
9304 if (TREE_CODE_CLASS (code) == tcc_binary
9305 || TREE_CODE_CLASS (code) == tcc_comparison)
9306 {
9307 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9308 {
9309 tem = fold_build2_loc (loc, code, type,
9310 fold_convert_loc (loc, TREE_TYPE (op0),
9311 TREE_OPERAND (arg0, 1)), op1);
9312 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9313 tem);
9314 }
9315 if (TREE_CODE (arg1) == COMPOUND_EXPR
9316 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9317 {
9318 tem = fold_build2_loc (loc, code, type, op0,
9319 fold_convert_loc (loc, TREE_TYPE (op1),
9320 TREE_OPERAND (arg1, 1)));
9321 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9322 tem);
9323 }
9324
9325 if (TREE_CODE (arg0) == COND_EXPR
9326 || TREE_CODE (arg0) == VEC_COND_EXPR
9327 || COMPARISON_CLASS_P (arg0))
9328 {
9329 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9330 arg0, arg1,
9331 /*cond_first_p=*/1);
9332 if (tem != NULL_TREE)
9333 return tem;
9334 }
9335
9336 if (TREE_CODE (arg1) == COND_EXPR
9337 || TREE_CODE (arg1) == VEC_COND_EXPR
9338 || COMPARISON_CLASS_P (arg1))
9339 {
9340 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9341 arg1, arg0,
9342 /*cond_first_p=*/0);
9343 if (tem != NULL_TREE)
9344 return tem;
9345 }
9346 }
9347
9348 switch (code)
9349 {
9350 case MEM_REF:
9351 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9352 if (TREE_CODE (arg0) == ADDR_EXPR
9353 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9354 {
9355 tree iref = TREE_OPERAND (arg0, 0);
9356 return fold_build2 (MEM_REF, type,
9357 TREE_OPERAND (iref, 0),
9358 int_const_binop (PLUS_EXPR, arg1,
9359 TREE_OPERAND (iref, 1)));
9360 }
9361
9362 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9363 if (TREE_CODE (arg0) == ADDR_EXPR
9364 && handled_component_p (TREE_OPERAND (arg0, 0)))
9365 {
9366 tree base;
9367 HOST_WIDE_INT coffset;
9368 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9369 &coffset);
9370 if (!base)
9371 return NULL_TREE;
9372 return fold_build2 (MEM_REF, type,
9373 build_fold_addr_expr (base),
9374 int_const_binop (PLUS_EXPR, arg1,
9375 size_int (coffset)));
9376 }
9377
9378 return NULL_TREE;
9379
9380 case POINTER_PLUS_EXPR:
9381 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9382 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9383 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9384 return fold_convert_loc (loc, type,
9385 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9386 fold_convert_loc (loc, sizetype,
9387 arg1),
9388 fold_convert_loc (loc, sizetype,
9389 arg0)));
9390
9391 return NULL_TREE;
9392
9393 case PLUS_EXPR:
9394 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
9395 {
9396 /* X + (X / CST) * -CST is X % CST. */
9397 if (TREE_CODE (arg1) == MULT_EXPR
9398 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9399 && operand_equal_p (arg0,
9400 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9401 {
9402 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9403 tree cst1 = TREE_OPERAND (arg1, 1);
9404 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9405 cst1, cst0);
9406 if (sum && integer_zerop (sum))
9407 return fold_convert_loc (loc, type,
9408 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9409 TREE_TYPE (arg0), arg0,
9410 cst0));
9411 }
9412 }
9413
9414 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
9415 one. Make sure the type is not saturating and has the signedness of
9416 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9417 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9418 if ((TREE_CODE (arg0) == MULT_EXPR
9419 || TREE_CODE (arg1) == MULT_EXPR)
9420 && !TYPE_SATURATING (type)
9421 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9422 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9423 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9424 {
9425 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9426 if (tem)
9427 return tem;
9428 }
9429
9430 if (! FLOAT_TYPE_P (type))
9431 {
9432 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9433 (plus (plus (mult) (mult)) (foo)) so that we can
9434 take advantage of the factoring cases below. */
9435 if (ANY_INTEGRAL_TYPE_P (type)
9436 && TYPE_OVERFLOW_WRAPS (type)
9437 && (((TREE_CODE (arg0) == PLUS_EXPR
9438 || TREE_CODE (arg0) == MINUS_EXPR)
9439 && TREE_CODE (arg1) == MULT_EXPR)
9440 || ((TREE_CODE (arg1) == PLUS_EXPR
9441 || TREE_CODE (arg1) == MINUS_EXPR)
9442 && TREE_CODE (arg0) == MULT_EXPR)))
9443 {
9444 tree parg0, parg1, parg, marg;
9445 enum tree_code pcode;
9446
9447 if (TREE_CODE (arg1) == MULT_EXPR)
9448 parg = arg0, marg = arg1;
9449 else
9450 parg = arg1, marg = arg0;
9451 pcode = TREE_CODE (parg);
9452 parg0 = TREE_OPERAND (parg, 0);
9453 parg1 = TREE_OPERAND (parg, 1);
9454 STRIP_NOPS (parg0);
9455 STRIP_NOPS (parg1);
9456
9457 if (TREE_CODE (parg0) == MULT_EXPR
9458 && TREE_CODE (parg1) != MULT_EXPR)
9459 return fold_build2_loc (loc, pcode, type,
9460 fold_build2_loc (loc, PLUS_EXPR, type,
9461 fold_convert_loc (loc, type,
9462 parg0),
9463 fold_convert_loc (loc, type,
9464 marg)),
9465 fold_convert_loc (loc, type, parg1));
9466 if (TREE_CODE (parg0) != MULT_EXPR
9467 && TREE_CODE (parg1) == MULT_EXPR)
9468 return
9469 fold_build2_loc (loc, PLUS_EXPR, type,
9470 fold_convert_loc (loc, type, parg0),
9471 fold_build2_loc (loc, pcode, type,
9472 fold_convert_loc (loc, type, marg),
9473 fold_convert_loc (loc, type,
9474 parg1)));
9475 }
9476 }
9477 else
9478 {
9479 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9480 to __complex__ ( x, y ). This is not the same for SNaNs or
9481 if signed zeros are involved. */
9482 if (!HONOR_SNANS (element_mode (arg0))
9483 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9484 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9485 {
9486 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9487 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9488 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9489 bool arg0rz = false, arg0iz = false;
9490 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9491 || (arg0i && (arg0iz = real_zerop (arg0i))))
9492 {
9493 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9494 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9495 if (arg0rz && arg1i && real_zerop (arg1i))
9496 {
9497 tree rp = arg1r ? arg1r
9498 : build1 (REALPART_EXPR, rtype, arg1);
9499 tree ip = arg0i ? arg0i
9500 : build1 (IMAGPART_EXPR, rtype, arg0);
9501 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9502 }
9503 else if (arg0iz && arg1r && real_zerop (arg1r))
9504 {
9505 tree rp = arg0r ? arg0r
9506 : build1 (REALPART_EXPR, rtype, arg0);
9507 tree ip = arg1i ? arg1i
9508 : build1 (IMAGPART_EXPR, rtype, arg1);
9509 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9510 }
9511 }
9512 }
9513
9514 if (flag_unsafe_math_optimizations
9515 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9516 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9517 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9518 return tem;
9519
9520 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9521 We associate floats only if the user has specified
9522 -fassociative-math. */
9523 if (flag_associative_math
9524 && TREE_CODE (arg1) == PLUS_EXPR
9525 && TREE_CODE (arg0) != MULT_EXPR)
9526 {
9527 tree tree10 = TREE_OPERAND (arg1, 0);
9528 tree tree11 = TREE_OPERAND (arg1, 1);
9529 if (TREE_CODE (tree11) == MULT_EXPR
9530 && TREE_CODE (tree10) == MULT_EXPR)
9531 {
9532 tree tree0;
9533 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
9534 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
9535 }
9536 }
9537 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9538 We associate floats only if the user has specified
9539 -fassociative-math. */
9540 if (flag_associative_math
9541 && TREE_CODE (arg0) == PLUS_EXPR
9542 && TREE_CODE (arg1) != MULT_EXPR)
9543 {
9544 tree tree00 = TREE_OPERAND (arg0, 0);
9545 tree tree01 = TREE_OPERAND (arg0, 1);
9546 if (TREE_CODE (tree01) == MULT_EXPR
9547 && TREE_CODE (tree00) == MULT_EXPR)
9548 {
9549 tree tree0;
9550 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
9551 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
9552 }
9553 }
9554 }
9555
9556 bit_rotate:
9557 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9558 is a rotate of A by C1 bits. */
9559 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9560 is a rotate of A by B bits. */
9561 {
9562 enum tree_code code0, code1;
9563 tree rtype;
9564 code0 = TREE_CODE (arg0);
9565 code1 = TREE_CODE (arg1);
9566 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9567 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9568 && operand_equal_p (TREE_OPERAND (arg0, 0),
9569 TREE_OPERAND (arg1, 0), 0)
9570 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9571 TYPE_UNSIGNED (rtype))
9572 /* Only create rotates in complete modes. Other cases are not
9573 expanded properly. */
9574 && (element_precision (rtype)
9575 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
9576 {
9577 tree tree01, tree11;
9578 enum tree_code code01, code11;
9579
9580 tree01 = TREE_OPERAND (arg0, 1);
9581 tree11 = TREE_OPERAND (arg1, 1);
9582 STRIP_NOPS (tree01);
9583 STRIP_NOPS (tree11);
9584 code01 = TREE_CODE (tree01);
9585 code11 = TREE_CODE (tree11);
9586 if (code01 == INTEGER_CST
9587 && code11 == INTEGER_CST
9588 && (wi::to_widest (tree01) + wi::to_widest (tree11)
9589 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9590 {
9591 tem = build2_loc (loc, LROTATE_EXPR,
9592 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9593 TREE_OPERAND (arg0, 0),
9594 code0 == LSHIFT_EXPR
9595 ? TREE_OPERAND (arg0, 1)
9596 : TREE_OPERAND (arg1, 1));
9597 return fold_convert_loc (loc, type, tem);
9598 }
9599 else if (code11 == MINUS_EXPR)
9600 {
9601 tree tree110, tree111;
9602 tree110 = TREE_OPERAND (tree11, 0);
9603 tree111 = TREE_OPERAND (tree11, 1);
9604 STRIP_NOPS (tree110);
9605 STRIP_NOPS (tree111);
9606 if (TREE_CODE (tree110) == INTEGER_CST
9607 && 0 == compare_tree_int (tree110,
9608 element_precision
9609 (TREE_TYPE (TREE_OPERAND
9610 (arg0, 0))))
9611 && operand_equal_p (tree01, tree111, 0))
9612 return
9613 fold_convert_loc (loc, type,
9614 build2 ((code0 == LSHIFT_EXPR
9615 ? LROTATE_EXPR
9616 : RROTATE_EXPR),
9617 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9618 TREE_OPERAND (arg0, 0),
9619 TREE_OPERAND (arg0, 1)));
9620 }
9621 else if (code01 == MINUS_EXPR)
9622 {
9623 tree tree010, tree011;
9624 tree010 = TREE_OPERAND (tree01, 0);
9625 tree011 = TREE_OPERAND (tree01, 1);
9626 STRIP_NOPS (tree010);
9627 STRIP_NOPS (tree011);
9628 if (TREE_CODE (tree010) == INTEGER_CST
9629 && 0 == compare_tree_int (tree010,
9630 element_precision
9631 (TREE_TYPE (TREE_OPERAND
9632 (arg0, 0))))
9633 && operand_equal_p (tree11, tree011, 0))
9634 return fold_convert_loc
9635 (loc, type,
9636 build2 ((code0 != LSHIFT_EXPR
9637 ? LROTATE_EXPR
9638 : RROTATE_EXPR),
9639 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9640 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1)));
9641 }
9642 }
9643 }
9644
9645 associate:
9646 /* In most languages, can't associate operations on floats through
9647 parentheses. Rather than remember where the parentheses were, we
9648 don't associate floats at all, unless the user has specified
9649 -fassociative-math.
9650 And, we need to make sure type is not saturating. */
9651
9652 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9653 && !TYPE_SATURATING (type))
9654 {
9655 tree var0, con0, lit0, minus_lit0;
9656 tree var1, con1, lit1, minus_lit1;
9657 tree atype = type;
9658 bool ok = true;
9659
9660 /* Split both trees into variables, constants, and literals. Then
9661 associate each group together, the constants with literals,
9662 then the result with variables. This increases the chances of
9663 literals being recombined later and of generating relocatable
9664 expressions for the sum of a constant and literal. */
9665 var0 = split_tree (loc, arg0, type, code,
9666 &con0, &lit0, &minus_lit0, 0);
9667 var1 = split_tree (loc, arg1, type, code,
9668 &con1, &lit1, &minus_lit1, code == MINUS_EXPR);
9669
9670 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9671 if (code == MINUS_EXPR)
9672 code = PLUS_EXPR;
9673
9674 /* With undefined overflow prefer doing association in a type
9675 which wraps on overflow, if that is one of the operand types. */
9676 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9677 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9678 {
9679 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9680 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
9681 atype = TREE_TYPE (arg0);
9682 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9683 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
9684 atype = TREE_TYPE (arg1);
9685 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
9686 }
9687
9688 /* With undefined overflow we can only associate constants with one
9689 variable, and constants whose association doesn't overflow. */
9690 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9691 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
9692 {
9693 if (var0 && var1)
9694 {
9695 tree tmp0 = var0;
9696 tree tmp1 = var1;
9697 bool one_neg = false;
9698
9699 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9700 {
9701 tmp0 = TREE_OPERAND (tmp0, 0);
9702 one_neg = !one_neg;
9703 }
9704 if (CONVERT_EXPR_P (tmp0)
9705 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9706 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9707 <= TYPE_PRECISION (atype)))
9708 tmp0 = TREE_OPERAND (tmp0, 0);
9709 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9710 {
9711 tmp1 = TREE_OPERAND (tmp1, 0);
9712 one_neg = !one_neg;
9713 }
9714 if (CONVERT_EXPR_P (tmp1)
9715 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9716 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9717 <= TYPE_PRECISION (atype)))
9718 tmp1 = TREE_OPERAND (tmp1, 0);
9719 /* The only case we can still associate with two variables
9720 is if they cancel out. */
9721 if (!one_neg
9722 || !operand_equal_p (tmp0, tmp1, 0))
9723 ok = false;
9724 }
9725 }
9726
9727 /* Only do something if we found more than two objects. Otherwise,
9728 nothing has changed and we risk infinite recursion. */
9729 if (ok
9730 && (2 < ((var0 != 0) + (var1 != 0)
9731 + (con0 != 0) + (con1 != 0)
9732 + (lit0 != 0) + (lit1 != 0)
9733 + (minus_lit0 != 0) + (minus_lit1 != 0))))
9734 {
9735 bool any_overflows = false;
9736 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
9737 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
9738 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
9739 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
9740 var0 = associate_trees (loc, var0, var1, code, atype);
9741 con0 = associate_trees (loc, con0, con1, code, atype);
9742 lit0 = associate_trees (loc, lit0, lit1, code, atype);
9743 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
9744 code, atype);
9745
9746 /* Preserve the MINUS_EXPR if the negative part of the literal is
9747 greater than the positive part. Otherwise, the multiplicative
9748 folding code (i.e extract_muldiv) may be fooled in case
9749 unsigned constants are subtracted, like in the following
9750 example: ((X*2 + 4) - 8U)/2. */
9751 if (minus_lit0 && lit0)
9752 {
9753 if (TREE_CODE (lit0) == INTEGER_CST
9754 && TREE_CODE (minus_lit0) == INTEGER_CST
9755 && tree_int_cst_lt (lit0, minus_lit0))
9756 {
9757 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
9758 MINUS_EXPR, atype);
9759 lit0 = 0;
9760 }
9761 else
9762 {
9763 lit0 = associate_trees (loc, lit0, minus_lit0,
9764 MINUS_EXPR, atype);
9765 minus_lit0 = 0;
9766 }
9767 }
9768
9769 /* Don't introduce overflows through reassociation. */
9770 if (!any_overflows
9771 && ((lit0 && TREE_OVERFLOW_P (lit0))
9772 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0))))
9773 return NULL_TREE;
9774
9775 if (minus_lit0)
9776 {
9777 if (con0 == 0)
9778 return
9779 fold_convert_loc (loc, type,
9780 associate_trees (loc, var0, minus_lit0,
9781 MINUS_EXPR, atype));
9782 else
9783 {
9784 con0 = associate_trees (loc, con0, minus_lit0,
9785 MINUS_EXPR, atype);
9786 return
9787 fold_convert_loc (loc, type,
9788 associate_trees (loc, var0, con0,
9789 PLUS_EXPR, atype));
9790 }
9791 }
9792
9793 con0 = associate_trees (loc, con0, lit0, code, atype);
9794 return
9795 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
9796 code, atype));
9797 }
9798 }
9799
9800 return NULL_TREE;
9801
9802 case MINUS_EXPR:
9803 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9804 if (TREE_CODE (arg0) == NEGATE_EXPR
9805 && negate_expr_p (op1)
9806 && reorder_operands_p (arg0, arg1))
9807 return fold_build2_loc (loc, MINUS_EXPR, type,
9808 negate_expr (op1),
9809 fold_convert_loc (loc, type,
9810 TREE_OPERAND (arg0, 0)));
9811
9812 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
9813 __complex__ ( x, -y ). This is not the same for SNaNs or if
9814 signed zeros are involved. */
9815 if (!HONOR_SNANS (element_mode (arg0))
9816 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9817 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9818 {
9819 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9820 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9821 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9822 bool arg0rz = false, arg0iz = false;
9823 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9824 || (arg0i && (arg0iz = real_zerop (arg0i))))
9825 {
9826 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9827 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9828 if (arg0rz && arg1i && real_zerop (arg1i))
9829 {
9830 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9831 arg1r ? arg1r
9832 : build1 (REALPART_EXPR, rtype, arg1));
9833 tree ip = arg0i ? arg0i
9834 : build1 (IMAGPART_EXPR, rtype, arg0);
9835 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9836 }
9837 else if (arg0iz && arg1r && real_zerop (arg1r))
9838 {
9839 tree rp = arg0r ? arg0r
9840 : build1 (REALPART_EXPR, rtype, arg0);
9841 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9842 arg1i ? arg1i
9843 : build1 (IMAGPART_EXPR, rtype, arg1));
9844 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9845 }
9846 }
9847 }
9848
9849 /* A - B -> A + (-B) if B is easily negatable. */
9850 if (negate_expr_p (op1)
9851 && ! TYPE_OVERFLOW_SANITIZED (type)
9852 && ((FLOAT_TYPE_P (type)
9853 /* Avoid this transformation if B is a positive REAL_CST. */
9854 && (TREE_CODE (op1) != REAL_CST
9855 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1))))
9856 || INTEGRAL_TYPE_P (type)))
9857 return fold_build2_loc (loc, PLUS_EXPR, type,
9858 fold_convert_loc (loc, type, arg0),
9859 negate_expr (op1));
9860
9861 /* Fold &a[i] - &a[j] to i-j. */
9862 if (TREE_CODE (arg0) == ADDR_EXPR
9863 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9864 && TREE_CODE (arg1) == ADDR_EXPR
9865 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9866 {
9867 tree tem = fold_addr_of_array_ref_difference (loc, type,
9868 TREE_OPERAND (arg0, 0),
9869 TREE_OPERAND (arg1, 0));
9870 if (tem)
9871 return tem;
9872 }
9873
9874 if (FLOAT_TYPE_P (type)
9875 && flag_unsafe_math_optimizations
9876 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9877 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9878 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9879 return tem;
9880
9881 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
9882 one. Make sure the type is not saturating and has the signedness of
9883 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9884 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9885 if ((TREE_CODE (arg0) == MULT_EXPR
9886 || TREE_CODE (arg1) == MULT_EXPR)
9887 && !TYPE_SATURATING (type)
9888 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9889 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9890 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9891 {
9892 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9893 if (tem)
9894 return tem;
9895 }
9896
9897 goto associate;
9898
9899 case MULT_EXPR:
9900 if (! FLOAT_TYPE_P (type))
9901 {
9902 /* Transform x * -C into -x * C if x is easily negatable. */
9903 if (TREE_CODE (op1) == INTEGER_CST
9904 && tree_int_cst_sgn (op1) == -1
9905 && negate_expr_p (op0)
9906 && (tem = negate_expr (op1)) != op1
9907 && ! TREE_OVERFLOW (tem))
9908 return fold_build2_loc (loc, MULT_EXPR, type,
9909 fold_convert_loc (loc, type,
9910 negate_expr (op0)), tem);
9911
9912 /* (A + A) * C -> A * 2 * C */
9913 if (TREE_CODE (arg0) == PLUS_EXPR
9914 && TREE_CODE (arg1) == INTEGER_CST
9915 && operand_equal_p (TREE_OPERAND (arg0, 0),
9916 TREE_OPERAND (arg0, 1), 0))
9917 return fold_build2_loc (loc, MULT_EXPR, type,
9918 omit_one_operand_loc (loc, type,
9919 TREE_OPERAND (arg0, 0),
9920 TREE_OPERAND (arg0, 1)),
9921 fold_build2_loc (loc, MULT_EXPR, type,
9922 build_int_cst (type, 2) , arg1));
9923
9924 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
9925 sign-changing only. */
9926 if (TREE_CODE (arg1) == INTEGER_CST
9927 && TREE_CODE (arg0) == EXACT_DIV_EXPR
9928 && operand_equal_p (arg1, TREE_OPERAND (arg0, 1), 0))
9929 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
9930
9931 strict_overflow_p = false;
9932 if (TREE_CODE (arg1) == INTEGER_CST
9933 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
9934 &strict_overflow_p)))
9935 {
9936 if (strict_overflow_p)
9937 fold_overflow_warning (("assuming signed overflow does not "
9938 "occur when simplifying "
9939 "multiplication"),
9940 WARN_STRICT_OVERFLOW_MISC);
9941 return fold_convert_loc (loc, type, tem);
9942 }
9943
9944 /* Optimize z * conj(z) for integer complex numbers. */
9945 if (TREE_CODE (arg0) == CONJ_EXPR
9946 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9947 return fold_mult_zconjz (loc, type, arg1);
9948 if (TREE_CODE (arg1) == CONJ_EXPR
9949 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9950 return fold_mult_zconjz (loc, type, arg0);
9951 }
9952 else
9953 {
9954 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
9955 This is not the same for NaNs or if signed zeros are
9956 involved. */
9957 if (!HONOR_NANS (arg0)
9958 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9959 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9960 && TREE_CODE (arg1) == COMPLEX_CST
9961 && real_zerop (TREE_REALPART (arg1)))
9962 {
9963 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9964 if (real_onep (TREE_IMAGPART (arg1)))
9965 return
9966 fold_build2_loc (loc, COMPLEX_EXPR, type,
9967 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
9968 rtype, arg0)),
9969 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
9970 else if (real_minus_onep (TREE_IMAGPART (arg1)))
9971 return
9972 fold_build2_loc (loc, COMPLEX_EXPR, type,
9973 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
9974 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
9975 rtype, arg0)));
9976 }
9977
9978 /* Optimize z * conj(z) for floating point complex numbers.
9979 Guarded by flag_unsafe_math_optimizations as non-finite
9980 imaginary components don't produce scalar results. */
9981 if (flag_unsafe_math_optimizations
9982 && TREE_CODE (arg0) == CONJ_EXPR
9983 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9984 return fold_mult_zconjz (loc, type, arg1);
9985 if (flag_unsafe_math_optimizations
9986 && TREE_CODE (arg1) == CONJ_EXPR
9987 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9988 return fold_mult_zconjz (loc, type, arg0);
9989
9990 if (flag_unsafe_math_optimizations)
9991 {
9992
9993 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
9994 if (!in_gimple_form
9995 && optimize
9996 && operand_equal_p (arg0, arg1, 0))
9997 {
9998 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
9999
10000 if (powfn)
10001 {
10002 tree arg = build_real (type, dconst2);
10003 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10004 }
10005 }
10006 }
10007 }
10008 goto associate;
10009
10010 case BIT_IOR_EXPR:
10011 /* Canonicalize (X & C1) | C2. */
10012 if (TREE_CODE (arg0) == BIT_AND_EXPR
10013 && TREE_CODE (arg1) == INTEGER_CST
10014 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10015 {
10016 int width = TYPE_PRECISION (type), w;
10017 wide_int c1 = TREE_OPERAND (arg0, 1);
10018 wide_int c2 = arg1;
10019
10020 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10021 if ((c1 & c2) == c1)
10022 return omit_one_operand_loc (loc, type, arg1,
10023 TREE_OPERAND (arg0, 0));
10024
10025 wide_int msk = wi::mask (width, false,
10026 TYPE_PRECISION (TREE_TYPE (arg1)));
10027
10028 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10029 if (msk.and_not (c1 | c2) == 0)
10030 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10031 TREE_OPERAND (arg0, 0), arg1);
10032
10033 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10034 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10035 mode which allows further optimizations. */
10036 c1 &= msk;
10037 c2 &= msk;
10038 wide_int c3 = c1.and_not (c2);
10039 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
10040 {
10041 wide_int mask = wi::mask (w, false,
10042 TYPE_PRECISION (type));
10043 if (((c1 | c2) & mask) == mask && c1.and_not (mask) == 0)
10044 {
10045 c3 = mask;
10046 break;
10047 }
10048 }
10049
10050 if (c3 != c1)
10051 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10052 fold_build2_loc (loc, BIT_AND_EXPR, type,
10053 TREE_OPERAND (arg0, 0),
10054 wide_int_to_tree (type,
10055 c3)),
10056 arg1);
10057 }
10058
10059 /* See if this can be simplified into a rotate first. If that
10060 is unsuccessful continue in the association code. */
10061 goto bit_rotate;
10062
10063 case BIT_XOR_EXPR:
10064 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10065 if (TREE_CODE (arg0) == BIT_AND_EXPR
10066 && INTEGRAL_TYPE_P (type)
10067 && integer_onep (TREE_OPERAND (arg0, 1))
10068 && integer_onep (arg1))
10069 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
10070 build_zero_cst (TREE_TYPE (arg0)));
10071
10072 /* See if this can be simplified into a rotate first. If that
10073 is unsuccessful continue in the association code. */
10074 goto bit_rotate;
10075
10076 case BIT_AND_EXPR:
10077 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10078 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10079 && INTEGRAL_TYPE_P (type)
10080 && integer_onep (TREE_OPERAND (arg0, 1))
10081 && integer_onep (arg1))
10082 {
10083 tree tem2;
10084 tem = TREE_OPERAND (arg0, 0);
10085 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
10086 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
10087 tem, tem2);
10088 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
10089 build_zero_cst (TREE_TYPE (tem)));
10090 }
10091 /* Fold ~X & 1 as (X & 1) == 0. */
10092 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10093 && INTEGRAL_TYPE_P (type)
10094 && integer_onep (arg1))
10095 {
10096 tree tem2;
10097 tem = TREE_OPERAND (arg0, 0);
10098 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
10099 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
10100 tem, tem2);
10101 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
10102 build_zero_cst (TREE_TYPE (tem)));
10103 }
10104 /* Fold !X & 1 as X == 0. */
10105 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10106 && integer_onep (arg1))
10107 {
10108 tem = TREE_OPERAND (arg0, 0);
10109 return fold_build2_loc (loc, EQ_EXPR, type, tem,
10110 build_zero_cst (TREE_TYPE (tem)));
10111 }
10112
10113 /* Fold (X ^ Y) & Y as ~X & Y. */
10114 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10115 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10116 {
10117 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10118 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10119 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10120 fold_convert_loc (loc, type, arg1));
10121 }
10122 /* Fold (X ^ Y) & X as ~Y & X. */
10123 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10124 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10125 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10126 {
10127 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10128 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10129 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10130 fold_convert_loc (loc, type, arg1));
10131 }
10132 /* Fold X & (X ^ Y) as X & ~Y. */
10133 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10134 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10135 {
10136 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10137 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10138 fold_convert_loc (loc, type, arg0),
10139 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
10140 }
10141 /* Fold X & (Y ^ X) as ~Y & X. */
10142 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10143 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10144 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10145 {
10146 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10147 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10148 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10149 fold_convert_loc (loc, type, arg0));
10150 }
10151
10152 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
10153 multiple of 1 << CST. */
10154 if (TREE_CODE (arg1) == INTEGER_CST)
10155 {
10156 wide_int cst1 = arg1;
10157 wide_int ncst1 = -cst1;
10158 if ((cst1 & ncst1) == ncst1
10159 && multiple_of_p (type, arg0,
10160 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
10161 return fold_convert_loc (loc, type, arg0);
10162 }
10163
10164 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
10165 bits from CST2. */
10166 if (TREE_CODE (arg1) == INTEGER_CST
10167 && TREE_CODE (arg0) == MULT_EXPR
10168 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10169 {
10170 wide_int warg1 = arg1;
10171 wide_int masked = mask_with_tz (type, warg1, TREE_OPERAND (arg0, 1));
10172
10173 if (masked == 0)
10174 return omit_two_operands_loc (loc, type, build_zero_cst (type),
10175 arg0, arg1);
10176 else if (masked != warg1)
10177 {
10178 /* Avoid the transform if arg1 is a mask of some
10179 mode which allows further optimizations. */
10180 int pop = wi::popcount (warg1);
10181 if (!(pop >= BITS_PER_UNIT
10182 && exact_log2 (pop) != -1
10183 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
10184 return fold_build2_loc (loc, code, type, op0,
10185 wide_int_to_tree (type, masked));
10186 }
10187 }
10188
10189 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
10190 ((A & N) + B) & M -> (A + B) & M
10191 Similarly if (N & M) == 0,
10192 ((A | N) + B) & M -> (A + B) & M
10193 and for - instead of + (or unary - instead of +)
10194 and/or ^ instead of |.
10195 If B is constant and (B & M) == 0, fold into A & M. */
10196 if (TREE_CODE (arg1) == INTEGER_CST)
10197 {
10198 wide_int cst1 = arg1;
10199 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
10200 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10201 && (TREE_CODE (arg0) == PLUS_EXPR
10202 || TREE_CODE (arg0) == MINUS_EXPR
10203 || TREE_CODE (arg0) == NEGATE_EXPR)
10204 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
10205 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
10206 {
10207 tree pmop[2];
10208 int which = 0;
10209 wide_int cst0;
10210
10211 /* Now we know that arg0 is (C + D) or (C - D) or
10212 -C and arg1 (M) is == (1LL << cst) - 1.
10213 Store C into PMOP[0] and D into PMOP[1]. */
10214 pmop[0] = TREE_OPERAND (arg0, 0);
10215 pmop[1] = NULL;
10216 if (TREE_CODE (arg0) != NEGATE_EXPR)
10217 {
10218 pmop[1] = TREE_OPERAND (arg0, 1);
10219 which = 1;
10220 }
10221
10222 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
10223 which = -1;
10224
10225 for (; which >= 0; which--)
10226 switch (TREE_CODE (pmop[which]))
10227 {
10228 case BIT_AND_EXPR:
10229 case BIT_IOR_EXPR:
10230 case BIT_XOR_EXPR:
10231 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
10232 != INTEGER_CST)
10233 break;
10234 cst0 = TREE_OPERAND (pmop[which], 1);
10235 cst0 &= cst1;
10236 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
10237 {
10238 if (cst0 != cst1)
10239 break;
10240 }
10241 else if (cst0 != 0)
10242 break;
10243 /* If C or D is of the form (A & N) where
10244 (N & M) == M, or of the form (A | N) or
10245 (A ^ N) where (N & M) == 0, replace it with A. */
10246 pmop[which] = TREE_OPERAND (pmop[which], 0);
10247 break;
10248 case INTEGER_CST:
10249 /* If C or D is a N where (N & M) == 0, it can be
10250 omitted (assumed 0). */
10251 if ((TREE_CODE (arg0) == PLUS_EXPR
10252 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
10253 && (cst1 & pmop[which]) == 0)
10254 pmop[which] = NULL;
10255 break;
10256 default:
10257 break;
10258 }
10259
10260 /* Only build anything new if we optimized one or both arguments
10261 above. */
10262 if (pmop[0] != TREE_OPERAND (arg0, 0)
10263 || (TREE_CODE (arg0) != NEGATE_EXPR
10264 && pmop[1] != TREE_OPERAND (arg0, 1)))
10265 {
10266 tree utype = TREE_TYPE (arg0);
10267 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10268 {
10269 /* Perform the operations in a type that has defined
10270 overflow behavior. */
10271 utype = unsigned_type_for (TREE_TYPE (arg0));
10272 if (pmop[0] != NULL)
10273 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
10274 if (pmop[1] != NULL)
10275 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
10276 }
10277
10278 if (TREE_CODE (arg0) == NEGATE_EXPR)
10279 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
10280 else if (TREE_CODE (arg0) == PLUS_EXPR)
10281 {
10282 if (pmop[0] != NULL && pmop[1] != NULL)
10283 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
10284 pmop[0], pmop[1]);
10285 else if (pmop[0] != NULL)
10286 tem = pmop[0];
10287 else if (pmop[1] != NULL)
10288 tem = pmop[1];
10289 else
10290 return build_int_cst (type, 0);
10291 }
10292 else if (pmop[0] == NULL)
10293 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
10294 else
10295 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
10296 pmop[0], pmop[1]);
10297 /* TEM is now the new binary +, - or unary - replacement. */
10298 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
10299 fold_convert_loc (loc, utype, arg1));
10300 return fold_convert_loc (loc, type, tem);
10301 }
10302 }
10303 }
10304
10305 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10306 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10307 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10308 {
10309 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10310
10311 wide_int mask = wide_int::from (arg1, prec, UNSIGNED);
10312 if (mask == -1)
10313 return
10314 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10315 }
10316
10317 goto associate;
10318
10319 case RDIV_EXPR:
10320 /* Don't touch a floating-point divide by zero unless the mode
10321 of the constant can represent infinity. */
10322 if (TREE_CODE (arg1) == REAL_CST
10323 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10324 && real_zerop (arg1))
10325 return NULL_TREE;
10326
10327 /* (-A) / (-B) -> A / B */
10328 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10329 return fold_build2_loc (loc, RDIV_EXPR, type,
10330 TREE_OPERAND (arg0, 0),
10331 negate_expr (arg1));
10332 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10333 return fold_build2_loc (loc, RDIV_EXPR, type,
10334 negate_expr (arg0),
10335 TREE_OPERAND (arg1, 0));
10336 return NULL_TREE;
10337
10338 case TRUNC_DIV_EXPR:
10339 /* Fall through */
10340
10341 case FLOOR_DIV_EXPR:
10342 /* Simplify A / (B << N) where A and B are positive and B is
10343 a power of 2, to A >> (N + log2(B)). */
10344 strict_overflow_p = false;
10345 if (TREE_CODE (arg1) == LSHIFT_EXPR
10346 && (TYPE_UNSIGNED (type)
10347 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
10348 {
10349 tree sval = TREE_OPERAND (arg1, 0);
10350 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10351 {
10352 tree sh_cnt = TREE_OPERAND (arg1, 1);
10353 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
10354 wi::exact_log2 (sval));
10355
10356 if (strict_overflow_p)
10357 fold_overflow_warning (("assuming signed overflow does not "
10358 "occur when simplifying A / (B << N)"),
10359 WARN_STRICT_OVERFLOW_MISC);
10360
10361 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
10362 sh_cnt, pow2);
10363 return fold_build2_loc (loc, RSHIFT_EXPR, type,
10364 fold_convert_loc (loc, type, arg0), sh_cnt);
10365 }
10366 }
10367
10368 /* Fall through */
10369
10370 case ROUND_DIV_EXPR:
10371 case CEIL_DIV_EXPR:
10372 case EXACT_DIV_EXPR:
10373 if (integer_zerop (arg1))
10374 return NULL_TREE;
10375
10376 /* Convert -A / -B to A / B when the type is signed and overflow is
10377 undefined. */
10378 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10379 && TREE_CODE (arg0) == NEGATE_EXPR
10380 && negate_expr_p (op1))
10381 {
10382 if (INTEGRAL_TYPE_P (type))
10383 fold_overflow_warning (("assuming signed overflow does not occur "
10384 "when distributing negation across "
10385 "division"),
10386 WARN_STRICT_OVERFLOW_MISC);
10387 return fold_build2_loc (loc, code, type,
10388 fold_convert_loc (loc, type,
10389 TREE_OPERAND (arg0, 0)),
10390 negate_expr (op1));
10391 }
10392 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10393 && TREE_CODE (arg1) == NEGATE_EXPR
10394 && negate_expr_p (op0))
10395 {
10396 if (INTEGRAL_TYPE_P (type))
10397 fold_overflow_warning (("assuming signed overflow does not occur "
10398 "when distributing negation across "
10399 "division"),
10400 WARN_STRICT_OVERFLOW_MISC);
10401 return fold_build2_loc (loc, code, type,
10402 negate_expr (op0),
10403 fold_convert_loc (loc, type,
10404 TREE_OPERAND (arg1, 0)));
10405 }
10406
10407 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10408 operation, EXACT_DIV_EXPR.
10409
10410 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10411 At one time others generated faster code, it's not clear if they do
10412 after the last round to changes to the DIV code in expmed.c. */
10413 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10414 && multiple_of_p (type, arg0, arg1))
10415 return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
10416 fold_convert (type, arg0),
10417 fold_convert (type, arg1));
10418
10419 strict_overflow_p = false;
10420 if (TREE_CODE (arg1) == INTEGER_CST
10421 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10422 &strict_overflow_p)))
10423 {
10424 if (strict_overflow_p)
10425 fold_overflow_warning (("assuming signed overflow does not occur "
10426 "when simplifying division"),
10427 WARN_STRICT_OVERFLOW_MISC);
10428 return fold_convert_loc (loc, type, tem);
10429 }
10430
10431 return NULL_TREE;
10432
10433 case CEIL_MOD_EXPR:
10434 case FLOOR_MOD_EXPR:
10435 case ROUND_MOD_EXPR:
10436 case TRUNC_MOD_EXPR:
10437 strict_overflow_p = false;
10438 if (TREE_CODE (arg1) == INTEGER_CST
10439 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10440 &strict_overflow_p)))
10441 {
10442 if (strict_overflow_p)
10443 fold_overflow_warning (("assuming signed overflow does not occur "
10444 "when simplifying modulus"),
10445 WARN_STRICT_OVERFLOW_MISC);
10446 return fold_convert_loc (loc, type, tem);
10447 }
10448
10449 return NULL_TREE;
10450
10451 case LROTATE_EXPR:
10452 case RROTATE_EXPR:
10453 case RSHIFT_EXPR:
10454 case LSHIFT_EXPR:
10455 /* Since negative shift count is not well-defined,
10456 don't try to compute it in the compiler. */
10457 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10458 return NULL_TREE;
10459
10460 prec = element_precision (type);
10461
10462 /* If we have a rotate of a bit operation with the rotate count and
10463 the second operand of the bit operation both constant,
10464 permute the two operations. */
10465 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10466 && (TREE_CODE (arg0) == BIT_AND_EXPR
10467 || TREE_CODE (arg0) == BIT_IOR_EXPR
10468 || TREE_CODE (arg0) == BIT_XOR_EXPR)
10469 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10470 return fold_build2_loc (loc, TREE_CODE (arg0), type,
10471 fold_build2_loc (loc, code, type,
10472 TREE_OPERAND (arg0, 0), arg1),
10473 fold_build2_loc (loc, code, type,
10474 TREE_OPERAND (arg0, 1), arg1));
10475
10476 /* Two consecutive rotates adding up to the some integer
10477 multiple of the precision of the type can be ignored. */
10478 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10479 && TREE_CODE (arg0) == RROTATE_EXPR
10480 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10481 && wi::umod_trunc (wi::add (arg1, TREE_OPERAND (arg0, 1)),
10482 prec) == 0)
10483 return TREE_OPERAND (arg0, 0);
10484
10485 return NULL_TREE;
10486
10487 case MIN_EXPR:
10488 case MAX_EXPR:
10489 goto associate;
10490
10491 case TRUTH_ANDIF_EXPR:
10492 /* Note that the operands of this must be ints
10493 and their values must be 0 or 1.
10494 ("true" is a fixed value perhaps depending on the language.) */
10495 /* If first arg is constant zero, return it. */
10496 if (integer_zerop (arg0))
10497 return fold_convert_loc (loc, type, arg0);
10498 case TRUTH_AND_EXPR:
10499 /* If either arg is constant true, drop it. */
10500 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10501 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10502 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10503 /* Preserve sequence points. */
10504 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10505 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10506 /* If second arg is constant zero, result is zero, but first arg
10507 must be evaluated. */
10508 if (integer_zerop (arg1))
10509 return omit_one_operand_loc (loc, type, arg1, arg0);
10510 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10511 case will be handled here. */
10512 if (integer_zerop (arg0))
10513 return omit_one_operand_loc (loc, type, arg0, arg1);
10514
10515 /* !X && X is always false. */
10516 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10517 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10518 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
10519 /* X && !X is always false. */
10520 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10521 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10522 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10523
10524 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10525 means A >= Y && A != MAX, but in this case we know that
10526 A < X <= MAX. */
10527
10528 if (!TREE_SIDE_EFFECTS (arg0)
10529 && !TREE_SIDE_EFFECTS (arg1))
10530 {
10531 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
10532 if (tem && !operand_equal_p (tem, arg0, 0))
10533 return fold_build2_loc (loc, code, type, tem, arg1);
10534
10535 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
10536 if (tem && !operand_equal_p (tem, arg1, 0))
10537 return fold_build2_loc (loc, code, type, arg0, tem);
10538 }
10539
10540 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10541 != NULL_TREE)
10542 return tem;
10543
10544 return NULL_TREE;
10545
10546 case TRUTH_ORIF_EXPR:
10547 /* Note that the operands of this must be ints
10548 and their values must be 0 or true.
10549 ("true" is a fixed value perhaps depending on the language.) */
10550 /* If first arg is constant true, return it. */
10551 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10552 return fold_convert_loc (loc, type, arg0);
10553 case TRUTH_OR_EXPR:
10554 /* If either arg is constant zero, drop it. */
10555 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10556 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10557 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10558 /* Preserve sequence points. */
10559 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10560 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10561 /* If second arg is constant true, result is true, but we must
10562 evaluate first arg. */
10563 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10564 return omit_one_operand_loc (loc, type, arg1, arg0);
10565 /* Likewise for first arg, but note this only occurs here for
10566 TRUTH_OR_EXPR. */
10567 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10568 return omit_one_operand_loc (loc, type, arg0, arg1);
10569
10570 /* !X || X is always true. */
10571 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10572 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10573 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10574 /* X || !X is always true. */
10575 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10576 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10577 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10578
10579 /* (X && !Y) || (!X && Y) is X ^ Y */
10580 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
10581 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
10582 {
10583 tree a0, a1, l0, l1, n0, n1;
10584
10585 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10586 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10587
10588 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10589 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10590
10591 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
10592 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
10593
10594 if ((operand_equal_p (n0, a0, 0)
10595 && operand_equal_p (n1, a1, 0))
10596 || (operand_equal_p (n0, a1, 0)
10597 && operand_equal_p (n1, a0, 0)))
10598 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
10599 }
10600
10601 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10602 != NULL_TREE)
10603 return tem;
10604
10605 return NULL_TREE;
10606
10607 case TRUTH_XOR_EXPR:
10608 /* If the second arg is constant zero, drop it. */
10609 if (integer_zerop (arg1))
10610 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10611 /* If the second arg is constant true, this is a logical inversion. */
10612 if (integer_onep (arg1))
10613 {
10614 tem = invert_truthvalue_loc (loc, arg0);
10615 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
10616 }
10617 /* Identical arguments cancel to zero. */
10618 if (operand_equal_p (arg0, arg1, 0))
10619 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10620
10621 /* !X ^ X is always true. */
10622 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10623 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10624 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10625
10626 /* X ^ !X is always true. */
10627 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10628 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10629 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10630
10631 return NULL_TREE;
10632
10633 case EQ_EXPR:
10634 case NE_EXPR:
10635 STRIP_NOPS (arg0);
10636 STRIP_NOPS (arg1);
10637
10638 tem = fold_comparison (loc, code, type, op0, op1);
10639 if (tem != NULL_TREE)
10640 return tem;
10641
10642 /* bool_var != 1 becomes !bool_var. */
10643 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10644 && code == NE_EXPR)
10645 return fold_convert_loc (loc, type,
10646 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10647 TREE_TYPE (arg0), arg0));
10648
10649 /* bool_var == 0 becomes !bool_var. */
10650 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10651 && code == EQ_EXPR)
10652 return fold_convert_loc (loc, type,
10653 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10654 TREE_TYPE (arg0), arg0));
10655
10656 /* !exp != 0 becomes !exp */
10657 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
10658 && code == NE_EXPR)
10659 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10660
10661 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
10662 if ((TREE_CODE (arg0) == PLUS_EXPR
10663 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
10664 || TREE_CODE (arg0) == MINUS_EXPR)
10665 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
10666 0)),
10667 arg1, 0)
10668 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10669 || POINTER_TYPE_P (TREE_TYPE (arg0))))
10670 {
10671 tree val = TREE_OPERAND (arg0, 1);
10672 val = fold_build2_loc (loc, code, type, val,
10673 build_int_cst (TREE_TYPE (val), 0));
10674 return omit_two_operands_loc (loc, type, val,
10675 TREE_OPERAND (arg0, 0), arg1);
10676 }
10677
10678 /* Transform comparisons of the form X CMP X +- Y to Y CMP 0. */
10679 if ((TREE_CODE (arg1) == PLUS_EXPR
10680 || TREE_CODE (arg1) == POINTER_PLUS_EXPR
10681 || TREE_CODE (arg1) == MINUS_EXPR)
10682 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg1,
10683 0)),
10684 arg0, 0)
10685 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10686 || POINTER_TYPE_P (TREE_TYPE (arg1))))
10687 {
10688 tree val = TREE_OPERAND (arg1, 1);
10689 val = fold_build2_loc (loc, code, type, val,
10690 build_int_cst (TREE_TYPE (val), 0));
10691 return omit_two_operands_loc (loc, type, val,
10692 TREE_OPERAND (arg1, 0), arg0);
10693 }
10694
10695 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
10696 if (TREE_CODE (arg0) == MINUS_EXPR
10697 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
10698 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
10699 1)),
10700 arg1, 0)
10701 && wi::extract_uhwi (TREE_OPERAND (arg0, 0), 0, 1) == 1)
10702 return omit_two_operands_loc (loc, type,
10703 code == NE_EXPR
10704 ? boolean_true_node : boolean_false_node,
10705 TREE_OPERAND (arg0, 1), arg1);
10706
10707 /* Transform comparisons of the form X CMP C - X if C % 2 == 1. */
10708 if (TREE_CODE (arg1) == MINUS_EXPR
10709 && TREE_CODE (TREE_OPERAND (arg1, 0)) == INTEGER_CST
10710 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg1,
10711 1)),
10712 arg0, 0)
10713 && wi::extract_uhwi (TREE_OPERAND (arg1, 0), 0, 1) == 1)
10714 return omit_two_operands_loc (loc, type,
10715 code == NE_EXPR
10716 ? boolean_true_node : boolean_false_node,
10717 TREE_OPERAND (arg1, 1), arg0);
10718
10719 /* If this is an EQ or NE comparison with zero and ARG0 is
10720 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10721 two operations, but the latter can be done in one less insn
10722 on machines that have only two-operand insns or on which a
10723 constant cannot be the first operand. */
10724 if (TREE_CODE (arg0) == BIT_AND_EXPR
10725 && integer_zerop (arg1))
10726 {
10727 tree arg00 = TREE_OPERAND (arg0, 0);
10728 tree arg01 = TREE_OPERAND (arg0, 1);
10729 if (TREE_CODE (arg00) == LSHIFT_EXPR
10730 && integer_onep (TREE_OPERAND (arg00, 0)))
10731 {
10732 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
10733 arg01, TREE_OPERAND (arg00, 1));
10734 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10735 build_int_cst (TREE_TYPE (arg0), 1));
10736 return fold_build2_loc (loc, code, type,
10737 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10738 arg1);
10739 }
10740 else if (TREE_CODE (arg01) == LSHIFT_EXPR
10741 && integer_onep (TREE_OPERAND (arg01, 0)))
10742 {
10743 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
10744 arg00, TREE_OPERAND (arg01, 1));
10745 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10746 build_int_cst (TREE_TYPE (arg0), 1));
10747 return fold_build2_loc (loc, code, type,
10748 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10749 arg1);
10750 }
10751 }
10752
10753 /* If this is an NE or EQ comparison of zero against the result of a
10754 signed MOD operation whose second operand is a power of 2, make
10755 the MOD operation unsigned since it is simpler and equivalent. */
10756 if (integer_zerop (arg1)
10757 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10758 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10759 || TREE_CODE (arg0) == CEIL_MOD_EXPR
10760 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10761 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10762 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10763 {
10764 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
10765 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
10766 fold_convert_loc (loc, newtype,
10767 TREE_OPERAND (arg0, 0)),
10768 fold_convert_loc (loc, newtype,
10769 TREE_OPERAND (arg0, 1)));
10770
10771 return fold_build2_loc (loc, code, type, newmod,
10772 fold_convert_loc (loc, newtype, arg1));
10773 }
10774
10775 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10776 C1 is a valid shift constant, and C2 is a power of two, i.e.
10777 a single bit. */
10778 if (TREE_CODE (arg0) == BIT_AND_EXPR
10779 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10780 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10781 == INTEGER_CST
10782 && integer_pow2p (TREE_OPERAND (arg0, 1))
10783 && integer_zerop (arg1))
10784 {
10785 tree itype = TREE_TYPE (arg0);
10786 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10787 prec = TYPE_PRECISION (itype);
10788
10789 /* Check for a valid shift count. */
10790 if (wi::ltu_p (arg001, prec))
10791 {
10792 tree arg01 = TREE_OPERAND (arg0, 1);
10793 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10794 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10795 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10796 can be rewritten as (X & (C2 << C1)) != 0. */
10797 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
10798 {
10799 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
10800 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
10801 return fold_build2_loc (loc, code, type, tem,
10802 fold_convert_loc (loc, itype, arg1));
10803 }
10804 /* Otherwise, for signed (arithmetic) shifts,
10805 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10806 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10807 else if (!TYPE_UNSIGNED (itype))
10808 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10809 arg000, build_int_cst (itype, 0));
10810 /* Otherwise, of unsigned (logical) shifts,
10811 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10812 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10813 else
10814 return omit_one_operand_loc (loc, type,
10815 code == EQ_EXPR ? integer_one_node
10816 : integer_zero_node,
10817 arg000);
10818 }
10819 }
10820
10821 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
10822 Similarly for NE_EXPR. */
10823 if (TREE_CODE (arg0) == BIT_AND_EXPR
10824 && TREE_CODE (arg1) == INTEGER_CST
10825 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10826 {
10827 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
10828 TREE_TYPE (TREE_OPERAND (arg0, 1)),
10829 TREE_OPERAND (arg0, 1));
10830 tree dandnotc
10831 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
10832 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
10833 notc);
10834 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10835 if (integer_nonzerop (dandnotc))
10836 return omit_one_operand_loc (loc, type, rslt, arg0);
10837 }
10838
10839 /* If this is a comparison of a field, we may be able to simplify it. */
10840 if ((TREE_CODE (arg0) == COMPONENT_REF
10841 || TREE_CODE (arg0) == BIT_FIELD_REF)
10842 /* Handle the constant case even without -O
10843 to make sure the warnings are given. */
10844 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
10845 {
10846 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
10847 if (t1)
10848 return t1;
10849 }
10850
10851 /* Optimize comparisons of strlen vs zero to a compare of the
10852 first character of the string vs zero. To wit,
10853 strlen(ptr) == 0 => *ptr == 0
10854 strlen(ptr) != 0 => *ptr != 0
10855 Other cases should reduce to one of these two (or a constant)
10856 due to the return value of strlen being unsigned. */
10857 if (TREE_CODE (arg0) == CALL_EXPR
10858 && integer_zerop (arg1))
10859 {
10860 tree fndecl = get_callee_fndecl (arg0);
10861
10862 if (fndecl
10863 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
10864 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
10865 && call_expr_nargs (arg0) == 1
10866 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
10867 {
10868 tree iref = build_fold_indirect_ref_loc (loc,
10869 CALL_EXPR_ARG (arg0, 0));
10870 return fold_build2_loc (loc, code, type, iref,
10871 build_int_cst (TREE_TYPE (iref), 0));
10872 }
10873 }
10874
10875 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10876 of X. Similarly fold (X >> C) == 0 into X >= 0. */
10877 if (TREE_CODE (arg0) == RSHIFT_EXPR
10878 && integer_zerop (arg1)
10879 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10880 {
10881 tree arg00 = TREE_OPERAND (arg0, 0);
10882 tree arg01 = TREE_OPERAND (arg0, 1);
10883 tree itype = TREE_TYPE (arg00);
10884 if (wi::eq_p (arg01, element_precision (itype) - 1))
10885 {
10886 if (TYPE_UNSIGNED (itype))
10887 {
10888 itype = signed_type_for (itype);
10889 arg00 = fold_convert_loc (loc, itype, arg00);
10890 }
10891 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
10892 type, arg00, build_zero_cst (itype));
10893 }
10894 }
10895
10896 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
10897 (X & C) == 0 when C is a single bit. */
10898 if (TREE_CODE (arg0) == BIT_AND_EXPR
10899 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
10900 && integer_zerop (arg1)
10901 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10902 {
10903 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
10904 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
10905 TREE_OPERAND (arg0, 1));
10906 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
10907 type, tem,
10908 fold_convert_loc (loc, TREE_TYPE (arg0),
10909 arg1));
10910 }
10911
10912 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
10913 constant C is a power of two, i.e. a single bit. */
10914 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10915 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10916 && integer_zerop (arg1)
10917 && integer_pow2p (TREE_OPERAND (arg0, 1))
10918 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10919 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10920 {
10921 tree arg00 = TREE_OPERAND (arg0, 0);
10922 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10923 arg00, build_int_cst (TREE_TYPE (arg00), 0));
10924 }
10925
10926 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
10927 when is C is a power of two, i.e. a single bit. */
10928 if (TREE_CODE (arg0) == BIT_AND_EXPR
10929 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
10930 && integer_zerop (arg1)
10931 && integer_pow2p (TREE_OPERAND (arg0, 1))
10932 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10933 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10934 {
10935 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10936 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
10937 arg000, TREE_OPERAND (arg0, 1));
10938 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10939 tem, build_int_cst (TREE_TYPE (tem), 0));
10940 }
10941
10942 if (integer_zerop (arg1)
10943 && tree_expr_nonzero_p (arg0))
10944 {
10945 tree res = constant_boolean_node (code==NE_EXPR, type);
10946 return omit_one_operand_loc (loc, type, res, arg0);
10947 }
10948
10949 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
10950 if (TREE_CODE (arg0) == BIT_AND_EXPR
10951 && TREE_CODE (arg1) == BIT_AND_EXPR)
10952 {
10953 tree arg00 = TREE_OPERAND (arg0, 0);
10954 tree arg01 = TREE_OPERAND (arg0, 1);
10955 tree arg10 = TREE_OPERAND (arg1, 0);
10956 tree arg11 = TREE_OPERAND (arg1, 1);
10957 tree itype = TREE_TYPE (arg0);
10958
10959 if (operand_equal_p (arg01, arg11, 0))
10960 return fold_build2_loc (loc, code, type,
10961 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10962 fold_build2_loc (loc,
10963 BIT_XOR_EXPR, itype,
10964 arg00, arg10),
10965 arg01),
10966 build_zero_cst (itype));
10967
10968 if (operand_equal_p (arg01, arg10, 0))
10969 return fold_build2_loc (loc, code, type,
10970 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10971 fold_build2_loc (loc,
10972 BIT_XOR_EXPR, itype,
10973 arg00, arg11),
10974 arg01),
10975 build_zero_cst (itype));
10976
10977 if (operand_equal_p (arg00, arg11, 0))
10978 return fold_build2_loc (loc, code, type,
10979 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10980 fold_build2_loc (loc,
10981 BIT_XOR_EXPR, itype,
10982 arg01, arg10),
10983 arg00),
10984 build_zero_cst (itype));
10985
10986 if (operand_equal_p (arg00, arg10, 0))
10987 return fold_build2_loc (loc, code, type,
10988 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10989 fold_build2_loc (loc,
10990 BIT_XOR_EXPR, itype,
10991 arg01, arg11),
10992 arg00),
10993 build_zero_cst (itype));
10994 }
10995
10996 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10997 && TREE_CODE (arg1) == BIT_XOR_EXPR)
10998 {
10999 tree arg00 = TREE_OPERAND (arg0, 0);
11000 tree arg01 = TREE_OPERAND (arg0, 1);
11001 tree arg10 = TREE_OPERAND (arg1, 0);
11002 tree arg11 = TREE_OPERAND (arg1, 1);
11003 tree itype = TREE_TYPE (arg0);
11004
11005 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
11006 operand_equal_p guarantees no side-effects so we don't need
11007 to use omit_one_operand on Z. */
11008 if (operand_equal_p (arg01, arg11, 0))
11009 return fold_build2_loc (loc, code, type, arg00,
11010 fold_convert_loc (loc, TREE_TYPE (arg00),
11011 arg10));
11012 if (operand_equal_p (arg01, arg10, 0))
11013 return fold_build2_loc (loc, code, type, arg00,
11014 fold_convert_loc (loc, TREE_TYPE (arg00),
11015 arg11));
11016 if (operand_equal_p (arg00, arg11, 0))
11017 return fold_build2_loc (loc, code, type, arg01,
11018 fold_convert_loc (loc, TREE_TYPE (arg01),
11019 arg10));
11020 if (operand_equal_p (arg00, arg10, 0))
11021 return fold_build2_loc (loc, code, type, arg01,
11022 fold_convert_loc (loc, TREE_TYPE (arg01),
11023 arg11));
11024
11025 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
11026 if (TREE_CODE (arg01) == INTEGER_CST
11027 && TREE_CODE (arg11) == INTEGER_CST)
11028 {
11029 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
11030 fold_convert_loc (loc, itype, arg11));
11031 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
11032 return fold_build2_loc (loc, code, type, tem,
11033 fold_convert_loc (loc, itype, arg10));
11034 }
11035 }
11036
11037 /* Attempt to simplify equality/inequality comparisons of complex
11038 values. Only lower the comparison if the result is known or
11039 can be simplified to a single scalar comparison. */
11040 if ((TREE_CODE (arg0) == COMPLEX_EXPR
11041 || TREE_CODE (arg0) == COMPLEX_CST)
11042 && (TREE_CODE (arg1) == COMPLEX_EXPR
11043 || TREE_CODE (arg1) == COMPLEX_CST))
11044 {
11045 tree real0, imag0, real1, imag1;
11046 tree rcond, icond;
11047
11048 if (TREE_CODE (arg0) == COMPLEX_EXPR)
11049 {
11050 real0 = TREE_OPERAND (arg0, 0);
11051 imag0 = TREE_OPERAND (arg0, 1);
11052 }
11053 else
11054 {
11055 real0 = TREE_REALPART (arg0);
11056 imag0 = TREE_IMAGPART (arg0);
11057 }
11058
11059 if (TREE_CODE (arg1) == COMPLEX_EXPR)
11060 {
11061 real1 = TREE_OPERAND (arg1, 0);
11062 imag1 = TREE_OPERAND (arg1, 1);
11063 }
11064 else
11065 {
11066 real1 = TREE_REALPART (arg1);
11067 imag1 = TREE_IMAGPART (arg1);
11068 }
11069
11070 rcond = fold_binary_loc (loc, code, type, real0, real1);
11071 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
11072 {
11073 if (integer_zerop (rcond))
11074 {
11075 if (code == EQ_EXPR)
11076 return omit_two_operands_loc (loc, type, boolean_false_node,
11077 imag0, imag1);
11078 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
11079 }
11080 else
11081 {
11082 if (code == NE_EXPR)
11083 return omit_two_operands_loc (loc, type, boolean_true_node,
11084 imag0, imag1);
11085 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
11086 }
11087 }
11088
11089 icond = fold_binary_loc (loc, code, type, imag0, imag1);
11090 if (icond && TREE_CODE (icond) == INTEGER_CST)
11091 {
11092 if (integer_zerop (icond))
11093 {
11094 if (code == EQ_EXPR)
11095 return omit_two_operands_loc (loc, type, boolean_false_node,
11096 real0, real1);
11097 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
11098 }
11099 else
11100 {
11101 if (code == NE_EXPR)
11102 return omit_two_operands_loc (loc, type, boolean_true_node,
11103 real0, real1);
11104 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
11105 }
11106 }
11107 }
11108
11109 return NULL_TREE;
11110
11111 case LT_EXPR:
11112 case GT_EXPR:
11113 case LE_EXPR:
11114 case GE_EXPR:
11115 tem = fold_comparison (loc, code, type, op0, op1);
11116 if (tem != NULL_TREE)
11117 return tem;
11118
11119 /* Transform comparisons of the form X +- C CMP X. */
11120 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11121 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11122 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
11123 && !HONOR_SNANS (arg0))
11124 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11125 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
11126 {
11127 tree arg01 = TREE_OPERAND (arg0, 1);
11128 enum tree_code code0 = TREE_CODE (arg0);
11129 int is_positive;
11130
11131 if (TREE_CODE (arg01) == REAL_CST)
11132 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
11133 else
11134 is_positive = tree_int_cst_sgn (arg01);
11135
11136 /* (X - c) > X becomes false. */
11137 if (code == GT_EXPR
11138 && ((code0 == MINUS_EXPR && is_positive >= 0)
11139 || (code0 == PLUS_EXPR && is_positive <= 0)))
11140 {
11141 if (TREE_CODE (arg01) == INTEGER_CST
11142 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11143 fold_overflow_warning (("assuming signed overflow does not "
11144 "occur when assuming that (X - c) > X "
11145 "is always false"),
11146 WARN_STRICT_OVERFLOW_ALL);
11147 return constant_boolean_node (0, type);
11148 }
11149
11150 /* Likewise (X + c) < X becomes false. */
11151 if (code == LT_EXPR
11152 && ((code0 == PLUS_EXPR && is_positive >= 0)
11153 || (code0 == MINUS_EXPR && is_positive <= 0)))
11154 {
11155 if (TREE_CODE (arg01) == INTEGER_CST
11156 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11157 fold_overflow_warning (("assuming signed overflow does not "
11158 "occur when assuming that "
11159 "(X + c) < X is always false"),
11160 WARN_STRICT_OVERFLOW_ALL);
11161 return constant_boolean_node (0, type);
11162 }
11163
11164 /* Convert (X - c) <= X to true. */
11165 if (!HONOR_NANS (arg1)
11166 && code == LE_EXPR
11167 && ((code0 == MINUS_EXPR && is_positive >= 0)
11168 || (code0 == PLUS_EXPR && is_positive <= 0)))
11169 {
11170 if (TREE_CODE (arg01) == INTEGER_CST
11171 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11172 fold_overflow_warning (("assuming signed overflow does not "
11173 "occur when assuming that "
11174 "(X - c) <= X is always true"),
11175 WARN_STRICT_OVERFLOW_ALL);
11176 return constant_boolean_node (1, type);
11177 }
11178
11179 /* Convert (X + c) >= X to true. */
11180 if (!HONOR_NANS (arg1)
11181 && code == GE_EXPR
11182 && ((code0 == PLUS_EXPR && is_positive >= 0)
11183 || (code0 == MINUS_EXPR && is_positive <= 0)))
11184 {
11185 if (TREE_CODE (arg01) == INTEGER_CST
11186 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11187 fold_overflow_warning (("assuming signed overflow does not "
11188 "occur when assuming that "
11189 "(X + c) >= X is always true"),
11190 WARN_STRICT_OVERFLOW_ALL);
11191 return constant_boolean_node (1, type);
11192 }
11193
11194 if (TREE_CODE (arg01) == INTEGER_CST)
11195 {
11196 /* Convert X + c > X and X - c < X to true for integers. */
11197 if (code == GT_EXPR
11198 && ((code0 == PLUS_EXPR && is_positive > 0)
11199 || (code0 == MINUS_EXPR && is_positive < 0)))
11200 {
11201 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11202 fold_overflow_warning (("assuming signed overflow does "
11203 "not occur when assuming that "
11204 "(X + c) > X is always true"),
11205 WARN_STRICT_OVERFLOW_ALL);
11206 return constant_boolean_node (1, type);
11207 }
11208
11209 if (code == LT_EXPR
11210 && ((code0 == MINUS_EXPR && is_positive > 0)
11211 || (code0 == PLUS_EXPR && is_positive < 0)))
11212 {
11213 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11214 fold_overflow_warning (("assuming signed overflow does "
11215 "not occur when assuming that "
11216 "(X - c) < X is always true"),
11217 WARN_STRICT_OVERFLOW_ALL);
11218 return constant_boolean_node (1, type);
11219 }
11220
11221 /* Convert X + c <= X and X - c >= X to false for integers. */
11222 if (code == LE_EXPR
11223 && ((code0 == PLUS_EXPR && is_positive > 0)
11224 || (code0 == MINUS_EXPR && is_positive < 0)))
11225 {
11226 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11227 fold_overflow_warning (("assuming signed overflow does "
11228 "not occur when assuming that "
11229 "(X + c) <= X is always false"),
11230 WARN_STRICT_OVERFLOW_ALL);
11231 return constant_boolean_node (0, type);
11232 }
11233
11234 if (code == GE_EXPR
11235 && ((code0 == MINUS_EXPR && is_positive > 0)
11236 || (code0 == PLUS_EXPR && is_positive < 0)))
11237 {
11238 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11239 fold_overflow_warning (("assuming signed overflow does "
11240 "not occur when assuming that "
11241 "(X - c) >= X is always false"),
11242 WARN_STRICT_OVERFLOW_ALL);
11243 return constant_boolean_node (0, type);
11244 }
11245 }
11246 }
11247
11248 /* If we are comparing an ABS_EXPR with a constant, we can
11249 convert all the cases into explicit comparisons, but they may
11250 well not be faster than doing the ABS and one comparison.
11251 But ABS (X) <= C is a range comparison, which becomes a subtraction
11252 and a comparison, and is probably faster. */
11253 if (code == LE_EXPR
11254 && TREE_CODE (arg1) == INTEGER_CST
11255 && TREE_CODE (arg0) == ABS_EXPR
11256 && ! TREE_SIDE_EFFECTS (arg0)
11257 && (0 != (tem = negate_expr (arg1)))
11258 && TREE_CODE (tem) == INTEGER_CST
11259 && !TREE_OVERFLOW (tem))
11260 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
11261 build2 (GE_EXPR, type,
11262 TREE_OPERAND (arg0, 0), tem),
11263 build2 (LE_EXPR, type,
11264 TREE_OPERAND (arg0, 0), arg1));
11265
11266 /* Convert ABS_EXPR<x> >= 0 to true. */
11267 strict_overflow_p = false;
11268 if (code == GE_EXPR
11269 && (integer_zerop (arg1)
11270 || (! HONOR_NANS (arg0)
11271 && real_zerop (arg1)))
11272 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11273 {
11274 if (strict_overflow_p)
11275 fold_overflow_warning (("assuming signed overflow does not occur "
11276 "when simplifying comparison of "
11277 "absolute value and zero"),
11278 WARN_STRICT_OVERFLOW_CONDITIONAL);
11279 return omit_one_operand_loc (loc, type,
11280 constant_boolean_node (true, type),
11281 arg0);
11282 }
11283
11284 /* Convert ABS_EXPR<x> < 0 to false. */
11285 strict_overflow_p = false;
11286 if (code == LT_EXPR
11287 && (integer_zerop (arg1) || real_zerop (arg1))
11288 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11289 {
11290 if (strict_overflow_p)
11291 fold_overflow_warning (("assuming signed overflow does not occur "
11292 "when simplifying comparison of "
11293 "absolute value and zero"),
11294 WARN_STRICT_OVERFLOW_CONDITIONAL);
11295 return omit_one_operand_loc (loc, type,
11296 constant_boolean_node (false, type),
11297 arg0);
11298 }
11299
11300 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11301 and similarly for >= into !=. */
11302 if ((code == LT_EXPR || code == GE_EXPR)
11303 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11304 && TREE_CODE (arg1) == LSHIFT_EXPR
11305 && integer_onep (TREE_OPERAND (arg1, 0)))
11306 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11307 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11308 TREE_OPERAND (arg1, 1)),
11309 build_zero_cst (TREE_TYPE (arg0)));
11310
11311 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
11312 otherwise Y might be >= # of bits in X's type and thus e.g.
11313 (unsigned char) (1 << Y) for Y 15 might be 0.
11314 If the cast is widening, then 1 << Y should have unsigned type,
11315 otherwise if Y is number of bits in the signed shift type minus 1,
11316 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
11317 31 might be 0xffffffff80000000. */
11318 if ((code == LT_EXPR || code == GE_EXPR)
11319 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11320 && CONVERT_EXPR_P (arg1)
11321 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
11322 && (element_precision (TREE_TYPE (arg1))
11323 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
11324 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
11325 || (element_precision (TREE_TYPE (arg1))
11326 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
11327 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11328 {
11329 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11330 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
11331 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11332 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
11333 build_zero_cst (TREE_TYPE (arg0)));
11334 }
11335
11336 return NULL_TREE;
11337
11338 case UNORDERED_EXPR:
11339 case ORDERED_EXPR:
11340 case UNLT_EXPR:
11341 case UNLE_EXPR:
11342 case UNGT_EXPR:
11343 case UNGE_EXPR:
11344 case UNEQ_EXPR:
11345 case LTGT_EXPR:
11346 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
11347 {
11348 tree targ0 = strip_float_extensions (arg0);
11349 tree targ1 = strip_float_extensions (arg1);
11350 tree newtype = TREE_TYPE (targ0);
11351
11352 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
11353 newtype = TREE_TYPE (targ1);
11354
11355 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
11356 return fold_build2_loc (loc, code, type,
11357 fold_convert_loc (loc, newtype, targ0),
11358 fold_convert_loc (loc, newtype, targ1));
11359 }
11360
11361 return NULL_TREE;
11362
11363 case COMPOUND_EXPR:
11364 /* When pedantic, a compound expression can be neither an lvalue
11365 nor an integer constant expression. */
11366 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11367 return NULL_TREE;
11368 /* Don't let (0, 0) be null pointer constant. */
11369 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11370 : fold_convert_loc (loc, type, arg1);
11371 return pedantic_non_lvalue_loc (loc, tem);
11372
11373 case ASSERT_EXPR:
11374 /* An ASSERT_EXPR should never be passed to fold_binary. */
11375 gcc_unreachable ();
11376
11377 default:
11378 return NULL_TREE;
11379 } /* switch (code) */
11380 }
11381
11382 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
11383 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
11384 of GOTO_EXPR. */
11385
11386 static tree
11387 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
11388 {
11389 switch (TREE_CODE (*tp))
11390 {
11391 case LABEL_EXPR:
11392 return *tp;
11393
11394 case GOTO_EXPR:
11395 *walk_subtrees = 0;
11396
11397 /* ... fall through ... */
11398
11399 default:
11400 return NULL_TREE;
11401 }
11402 }
11403
11404 /* Return whether the sub-tree ST contains a label which is accessible from
11405 outside the sub-tree. */
11406
11407 static bool
11408 contains_label_p (tree st)
11409 {
11410 return
11411 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
11412 }
11413
11414 /* Fold a ternary expression of code CODE and type TYPE with operands
11415 OP0, OP1, and OP2. Return the folded expression if folding is
11416 successful. Otherwise, return NULL_TREE. */
11417
11418 tree
11419 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
11420 tree op0, tree op1, tree op2)
11421 {
11422 tree tem;
11423 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
11424 enum tree_code_class kind = TREE_CODE_CLASS (code);
11425
11426 gcc_assert (IS_EXPR_CODE_CLASS (kind)
11427 && TREE_CODE_LENGTH (code) == 3);
11428
11429 /* If this is a commutative operation, and OP0 is a constant, move it
11430 to OP1 to reduce the number of tests below. */
11431 if (commutative_ternary_tree_code (code)
11432 && tree_swap_operands_p (op0, op1, true))
11433 return fold_build3_loc (loc, code, type, op1, op0, op2);
11434
11435 tem = generic_simplify (loc, code, type, op0, op1, op2);
11436 if (tem)
11437 return tem;
11438
11439 /* Strip any conversions that don't change the mode. This is safe
11440 for every expression, except for a comparison expression because
11441 its signedness is derived from its operands. So, in the latter
11442 case, only strip conversions that don't change the signedness.
11443
11444 Note that this is done as an internal manipulation within the
11445 constant folder, in order to find the simplest representation of
11446 the arguments so that their form can be studied. In any cases,
11447 the appropriate type conversions should be put back in the tree
11448 that will get out of the constant folder. */
11449 if (op0)
11450 {
11451 arg0 = op0;
11452 STRIP_NOPS (arg0);
11453 }
11454
11455 if (op1)
11456 {
11457 arg1 = op1;
11458 STRIP_NOPS (arg1);
11459 }
11460
11461 if (op2)
11462 {
11463 arg2 = op2;
11464 STRIP_NOPS (arg2);
11465 }
11466
11467 switch (code)
11468 {
11469 case COMPONENT_REF:
11470 if (TREE_CODE (arg0) == CONSTRUCTOR
11471 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11472 {
11473 unsigned HOST_WIDE_INT idx;
11474 tree field, value;
11475 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11476 if (field == arg1)
11477 return value;
11478 }
11479 return NULL_TREE;
11480
11481 case COND_EXPR:
11482 case VEC_COND_EXPR:
11483 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11484 so all simple results must be passed through pedantic_non_lvalue. */
11485 if (TREE_CODE (arg0) == INTEGER_CST)
11486 {
11487 tree unused_op = integer_zerop (arg0) ? op1 : op2;
11488 tem = integer_zerop (arg0) ? op2 : op1;
11489 /* Only optimize constant conditions when the selected branch
11490 has the same type as the COND_EXPR. This avoids optimizing
11491 away "c ? x : throw", where the throw has a void type.
11492 Avoid throwing away that operand which contains label. */
11493 if ((!TREE_SIDE_EFFECTS (unused_op)
11494 || !contains_label_p (unused_op))
11495 && (! VOID_TYPE_P (TREE_TYPE (tem))
11496 || VOID_TYPE_P (type)))
11497 return pedantic_non_lvalue_loc (loc, tem);
11498 return NULL_TREE;
11499 }
11500 else if (TREE_CODE (arg0) == VECTOR_CST)
11501 {
11502 if ((TREE_CODE (arg1) == VECTOR_CST
11503 || TREE_CODE (arg1) == CONSTRUCTOR)
11504 && (TREE_CODE (arg2) == VECTOR_CST
11505 || TREE_CODE (arg2) == CONSTRUCTOR))
11506 {
11507 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
11508 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
11509 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
11510 for (i = 0; i < nelts; i++)
11511 {
11512 tree val = VECTOR_CST_ELT (arg0, i);
11513 if (integer_all_onesp (val))
11514 sel[i] = i;
11515 else if (integer_zerop (val))
11516 sel[i] = nelts + i;
11517 else /* Currently unreachable. */
11518 return NULL_TREE;
11519 }
11520 tree t = fold_vec_perm (type, arg1, arg2, sel);
11521 if (t != NULL_TREE)
11522 return t;
11523 }
11524 }
11525
11526 /* If we have A op B ? A : C, we may be able to convert this to a
11527 simpler expression, depending on the operation and the values
11528 of B and C. Signed zeros prevent all of these transformations,
11529 for reasons given above each one.
11530
11531 Also try swapping the arguments and inverting the conditional. */
11532 if (COMPARISON_CLASS_P (arg0)
11533 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11534 arg1, TREE_OPERAND (arg0, 1))
11535 && !HONOR_SIGNED_ZEROS (element_mode (arg1)))
11536 {
11537 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
11538 if (tem)
11539 return tem;
11540 }
11541
11542 if (COMPARISON_CLASS_P (arg0)
11543 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11544 op2,
11545 TREE_OPERAND (arg0, 1))
11546 && !HONOR_SIGNED_ZEROS (element_mode (op2)))
11547 {
11548 location_t loc0 = expr_location_or (arg0, loc);
11549 tem = fold_invert_truthvalue (loc0, arg0);
11550 if (tem && COMPARISON_CLASS_P (tem))
11551 {
11552 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
11553 if (tem)
11554 return tem;
11555 }
11556 }
11557
11558 /* If the second operand is simpler than the third, swap them
11559 since that produces better jump optimization results. */
11560 if (truth_value_p (TREE_CODE (arg0))
11561 && tree_swap_operands_p (op1, op2, false))
11562 {
11563 location_t loc0 = expr_location_or (arg0, loc);
11564 /* See if this can be inverted. If it can't, possibly because
11565 it was a floating-point inequality comparison, don't do
11566 anything. */
11567 tem = fold_invert_truthvalue (loc0, arg0);
11568 if (tem)
11569 return fold_build3_loc (loc, code, type, tem, op2, op1);
11570 }
11571
11572 /* Convert A ? 1 : 0 to simply A. */
11573 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
11574 : (integer_onep (op1)
11575 && !VECTOR_TYPE_P (type)))
11576 && integer_zerop (op2)
11577 /* If we try to convert OP0 to our type, the
11578 call to fold will try to move the conversion inside
11579 a COND, which will recurse. In that case, the COND_EXPR
11580 is probably the best choice, so leave it alone. */
11581 && type == TREE_TYPE (arg0))
11582 return pedantic_non_lvalue_loc (loc, arg0);
11583
11584 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11585 over COND_EXPR in cases such as floating point comparisons. */
11586 if (integer_zerop (op1)
11587 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
11588 : (integer_onep (op2)
11589 && !VECTOR_TYPE_P (type)))
11590 && truth_value_p (TREE_CODE (arg0)))
11591 return pedantic_non_lvalue_loc (loc,
11592 fold_convert_loc (loc, type,
11593 invert_truthvalue_loc (loc,
11594 arg0)));
11595
11596 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11597 if (TREE_CODE (arg0) == LT_EXPR
11598 && integer_zerop (TREE_OPERAND (arg0, 1))
11599 && integer_zerop (op2)
11600 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11601 {
11602 /* sign_bit_p looks through both zero and sign extensions,
11603 but for this optimization only sign extensions are
11604 usable. */
11605 tree tem2 = TREE_OPERAND (arg0, 0);
11606 while (tem != tem2)
11607 {
11608 if (TREE_CODE (tem2) != NOP_EXPR
11609 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
11610 {
11611 tem = NULL_TREE;
11612 break;
11613 }
11614 tem2 = TREE_OPERAND (tem2, 0);
11615 }
11616 /* sign_bit_p only checks ARG1 bits within A's precision.
11617 If <sign bit of A> has wider type than A, bits outside
11618 of A's precision in <sign bit of A> need to be checked.
11619 If they are all 0, this optimization needs to be done
11620 in unsigned A's type, if they are all 1 in signed A's type,
11621 otherwise this can't be done. */
11622 if (tem
11623 && TYPE_PRECISION (TREE_TYPE (tem))
11624 < TYPE_PRECISION (TREE_TYPE (arg1))
11625 && TYPE_PRECISION (TREE_TYPE (tem))
11626 < TYPE_PRECISION (type))
11627 {
11628 int inner_width, outer_width;
11629 tree tem_type;
11630
11631 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
11632 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
11633 if (outer_width > TYPE_PRECISION (type))
11634 outer_width = TYPE_PRECISION (type);
11635
11636 wide_int mask = wi::shifted_mask
11637 (inner_width, outer_width - inner_width, false,
11638 TYPE_PRECISION (TREE_TYPE (arg1)));
11639
11640 wide_int common = mask & arg1;
11641 if (common == mask)
11642 {
11643 tem_type = signed_type_for (TREE_TYPE (tem));
11644 tem = fold_convert_loc (loc, tem_type, tem);
11645 }
11646 else if (common == 0)
11647 {
11648 tem_type = unsigned_type_for (TREE_TYPE (tem));
11649 tem = fold_convert_loc (loc, tem_type, tem);
11650 }
11651 else
11652 tem = NULL;
11653 }
11654
11655 if (tem)
11656 return
11657 fold_convert_loc (loc, type,
11658 fold_build2_loc (loc, BIT_AND_EXPR,
11659 TREE_TYPE (tem), tem,
11660 fold_convert_loc (loc,
11661 TREE_TYPE (tem),
11662 arg1)));
11663 }
11664
11665 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11666 already handled above. */
11667 if (TREE_CODE (arg0) == BIT_AND_EXPR
11668 && integer_onep (TREE_OPERAND (arg0, 1))
11669 && integer_zerop (op2)
11670 && integer_pow2p (arg1))
11671 {
11672 tree tem = TREE_OPERAND (arg0, 0);
11673 STRIP_NOPS (tem);
11674 if (TREE_CODE (tem) == RSHIFT_EXPR
11675 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
11676 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
11677 tree_to_uhwi (TREE_OPERAND (tem, 1)))
11678 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11679 TREE_OPERAND (tem, 0), arg1);
11680 }
11681
11682 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11683 is probably obsolete because the first operand should be a
11684 truth value (that's why we have the two cases above), but let's
11685 leave it in until we can confirm this for all front-ends. */
11686 if (integer_zerop (op2)
11687 && TREE_CODE (arg0) == NE_EXPR
11688 && integer_zerop (TREE_OPERAND (arg0, 1))
11689 && integer_pow2p (arg1)
11690 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11691 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11692 arg1, OEP_ONLY_CONST))
11693 return pedantic_non_lvalue_loc (loc,
11694 fold_convert_loc (loc, type,
11695 TREE_OPERAND (arg0, 0)));
11696
11697 /* Disable the transformations below for vectors, since
11698 fold_binary_op_with_conditional_arg may undo them immediately,
11699 yielding an infinite loop. */
11700 if (code == VEC_COND_EXPR)
11701 return NULL_TREE;
11702
11703 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11704 if (integer_zerop (op2)
11705 && truth_value_p (TREE_CODE (arg0))
11706 && truth_value_p (TREE_CODE (arg1))
11707 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11708 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
11709 : TRUTH_ANDIF_EXPR,
11710 type, fold_convert_loc (loc, type, arg0), arg1);
11711
11712 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11713 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
11714 && truth_value_p (TREE_CODE (arg0))
11715 && truth_value_p (TREE_CODE (arg1))
11716 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11717 {
11718 location_t loc0 = expr_location_or (arg0, loc);
11719 /* Only perform transformation if ARG0 is easily inverted. */
11720 tem = fold_invert_truthvalue (loc0, arg0);
11721 if (tem)
11722 return fold_build2_loc (loc, code == VEC_COND_EXPR
11723 ? BIT_IOR_EXPR
11724 : TRUTH_ORIF_EXPR,
11725 type, fold_convert_loc (loc, type, tem),
11726 arg1);
11727 }
11728
11729 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11730 if (integer_zerop (arg1)
11731 && truth_value_p (TREE_CODE (arg0))
11732 && truth_value_p (TREE_CODE (op2))
11733 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11734 {
11735 location_t loc0 = expr_location_or (arg0, loc);
11736 /* Only perform transformation if ARG0 is easily inverted. */
11737 tem = fold_invert_truthvalue (loc0, arg0);
11738 if (tem)
11739 return fold_build2_loc (loc, code == VEC_COND_EXPR
11740 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
11741 type, fold_convert_loc (loc, type, tem),
11742 op2);
11743 }
11744
11745 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11746 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
11747 && truth_value_p (TREE_CODE (arg0))
11748 && truth_value_p (TREE_CODE (op2))
11749 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11750 return fold_build2_loc (loc, code == VEC_COND_EXPR
11751 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
11752 type, fold_convert_loc (loc, type, arg0), op2);
11753
11754 return NULL_TREE;
11755
11756 case CALL_EXPR:
11757 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
11758 of fold_ternary on them. */
11759 gcc_unreachable ();
11760
11761 case BIT_FIELD_REF:
11762 if ((TREE_CODE (arg0) == VECTOR_CST
11763 || (TREE_CODE (arg0) == CONSTRUCTOR
11764 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
11765 && (type == TREE_TYPE (TREE_TYPE (arg0))
11766 || (TREE_CODE (type) == VECTOR_TYPE
11767 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
11768 {
11769 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
11770 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
11771 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
11772 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
11773
11774 if (n != 0
11775 && (idx % width) == 0
11776 && (n % width) == 0
11777 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
11778 {
11779 idx = idx / width;
11780 n = n / width;
11781
11782 if (TREE_CODE (arg0) == VECTOR_CST)
11783 {
11784 if (n == 1)
11785 return VECTOR_CST_ELT (arg0, idx);
11786
11787 tree *vals = XALLOCAVEC (tree, n);
11788 for (unsigned i = 0; i < n; ++i)
11789 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
11790 return build_vector (type, vals);
11791 }
11792
11793 /* Constructor elements can be subvectors. */
11794 unsigned HOST_WIDE_INT k = 1;
11795 if (CONSTRUCTOR_NELTS (arg0) != 0)
11796 {
11797 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
11798 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
11799 k = TYPE_VECTOR_SUBPARTS (cons_elem);
11800 }
11801
11802 /* We keep an exact subset of the constructor elements. */
11803 if ((idx % k) == 0 && (n % k) == 0)
11804 {
11805 if (CONSTRUCTOR_NELTS (arg0) == 0)
11806 return build_constructor (type, NULL);
11807 idx /= k;
11808 n /= k;
11809 if (n == 1)
11810 {
11811 if (idx < CONSTRUCTOR_NELTS (arg0))
11812 return CONSTRUCTOR_ELT (arg0, idx)->value;
11813 return build_zero_cst (type);
11814 }
11815
11816 vec<constructor_elt, va_gc> *vals;
11817 vec_alloc (vals, n);
11818 for (unsigned i = 0;
11819 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
11820 ++i)
11821 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
11822 CONSTRUCTOR_ELT
11823 (arg0, idx + i)->value);
11824 return build_constructor (type, vals);
11825 }
11826 /* The bitfield references a single constructor element. */
11827 else if (idx + n <= (idx / k + 1) * k)
11828 {
11829 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
11830 return build_zero_cst (type);
11831 else if (n == k)
11832 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
11833 else
11834 return fold_build3_loc (loc, code, type,
11835 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
11836 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
11837 }
11838 }
11839 }
11840
11841 /* A bit-field-ref that referenced the full argument can be stripped. */
11842 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11843 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1)
11844 && integer_zerop (op2))
11845 return fold_convert_loc (loc, type, arg0);
11846
11847 /* On constants we can use native encode/interpret to constant
11848 fold (nearly) all BIT_FIELD_REFs. */
11849 if (CONSTANT_CLASS_P (arg0)
11850 && can_native_interpret_type_p (type)
11851 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0)))
11852 /* This limitation should not be necessary, we just need to
11853 round this up to mode size. */
11854 && tree_to_uhwi (op1) % BITS_PER_UNIT == 0
11855 /* Need bit-shifting of the buffer to relax the following. */
11856 && tree_to_uhwi (op2) % BITS_PER_UNIT == 0)
11857 {
11858 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11859 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
11860 unsigned HOST_WIDE_INT clen;
11861 clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0)));
11862 /* ??? We cannot tell native_encode_expr to start at
11863 some random byte only. So limit us to a reasonable amount
11864 of work. */
11865 if (clen <= 4096)
11866 {
11867 unsigned char *b = XALLOCAVEC (unsigned char, clen);
11868 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
11869 if (len > 0
11870 && len * BITS_PER_UNIT >= bitpos + bitsize)
11871 {
11872 tree v = native_interpret_expr (type,
11873 b + bitpos / BITS_PER_UNIT,
11874 bitsize / BITS_PER_UNIT);
11875 if (v)
11876 return v;
11877 }
11878 }
11879 }
11880
11881 return NULL_TREE;
11882
11883 case FMA_EXPR:
11884 /* For integers we can decompose the FMA if possible. */
11885 if (TREE_CODE (arg0) == INTEGER_CST
11886 && TREE_CODE (arg1) == INTEGER_CST)
11887 return fold_build2_loc (loc, PLUS_EXPR, type,
11888 const_binop (MULT_EXPR, arg0, arg1), arg2);
11889 if (integer_zerop (arg2))
11890 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11891
11892 return fold_fma (loc, type, arg0, arg1, arg2);
11893
11894 case VEC_PERM_EXPR:
11895 if (TREE_CODE (arg2) == VECTOR_CST)
11896 {
11897 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask, mask2;
11898 unsigned char *sel = XALLOCAVEC (unsigned char, 2 * nelts);
11899 unsigned char *sel2 = sel + nelts;
11900 bool need_mask_canon = false;
11901 bool need_mask_canon2 = false;
11902 bool all_in_vec0 = true;
11903 bool all_in_vec1 = true;
11904 bool maybe_identity = true;
11905 bool single_arg = (op0 == op1);
11906 bool changed = false;
11907
11908 mask2 = 2 * nelts - 1;
11909 mask = single_arg ? (nelts - 1) : mask2;
11910 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
11911 for (i = 0; i < nelts; i++)
11912 {
11913 tree val = VECTOR_CST_ELT (arg2, i);
11914 if (TREE_CODE (val) != INTEGER_CST)
11915 return NULL_TREE;
11916
11917 /* Make sure that the perm value is in an acceptable
11918 range. */
11919 wide_int t = val;
11920 need_mask_canon |= wi::gtu_p (t, mask);
11921 need_mask_canon2 |= wi::gtu_p (t, mask2);
11922 sel[i] = t.to_uhwi () & mask;
11923 sel2[i] = t.to_uhwi () & mask2;
11924
11925 if (sel[i] < nelts)
11926 all_in_vec1 = false;
11927 else
11928 all_in_vec0 = false;
11929
11930 if ((sel[i] & (nelts-1)) != i)
11931 maybe_identity = false;
11932 }
11933
11934 if (maybe_identity)
11935 {
11936 if (all_in_vec0)
11937 return op0;
11938 if (all_in_vec1)
11939 return op1;
11940 }
11941
11942 if (all_in_vec0)
11943 op1 = op0;
11944 else if (all_in_vec1)
11945 {
11946 op0 = op1;
11947 for (i = 0; i < nelts; i++)
11948 sel[i] -= nelts;
11949 need_mask_canon = true;
11950 }
11951
11952 if ((TREE_CODE (op0) == VECTOR_CST
11953 || TREE_CODE (op0) == CONSTRUCTOR)
11954 && (TREE_CODE (op1) == VECTOR_CST
11955 || TREE_CODE (op1) == CONSTRUCTOR))
11956 {
11957 tree t = fold_vec_perm (type, op0, op1, sel);
11958 if (t != NULL_TREE)
11959 return t;
11960 }
11961
11962 if (op0 == op1 && !single_arg)
11963 changed = true;
11964
11965 /* Some targets are deficient and fail to expand a single
11966 argument permutation while still allowing an equivalent
11967 2-argument version. */
11968 if (need_mask_canon && arg2 == op2
11969 && !can_vec_perm_p (TYPE_MODE (type), false, sel)
11970 && can_vec_perm_p (TYPE_MODE (type), false, sel2))
11971 {
11972 need_mask_canon = need_mask_canon2;
11973 sel = sel2;
11974 }
11975
11976 if (need_mask_canon && arg2 == op2)
11977 {
11978 tree *tsel = XALLOCAVEC (tree, nelts);
11979 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
11980 for (i = 0; i < nelts; i++)
11981 tsel[i] = build_int_cst (eltype, sel[i]);
11982 op2 = build_vector (TREE_TYPE (arg2), tsel);
11983 changed = true;
11984 }
11985
11986 if (changed)
11987 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
11988 }
11989 return NULL_TREE;
11990
11991 default:
11992 return NULL_TREE;
11993 } /* switch (code) */
11994 }
11995
11996 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
11997 of an array (or vector). */
11998
11999 tree
12000 get_array_ctor_element_at_index (tree ctor, offset_int access_index)
12001 {
12002 tree index_type = NULL_TREE;
12003 offset_int low_bound = 0;
12004
12005 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
12006 {
12007 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
12008 if (domain_type && TYPE_MIN_VALUE (domain_type))
12009 {
12010 /* Static constructors for variably sized objects makes no sense. */
12011 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
12012 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
12013 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
12014 }
12015 }
12016
12017 if (index_type)
12018 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
12019 TYPE_SIGN (index_type));
12020
12021 offset_int index = low_bound - 1;
12022 if (index_type)
12023 index = wi::ext (index, TYPE_PRECISION (index_type),
12024 TYPE_SIGN (index_type));
12025
12026 offset_int max_index;
12027 unsigned HOST_WIDE_INT cnt;
12028 tree cfield, cval;
12029
12030 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
12031 {
12032 /* Array constructor might explicitly set index, or specify a range,
12033 or leave index NULL meaning that it is next index after previous
12034 one. */
12035 if (cfield)
12036 {
12037 if (TREE_CODE (cfield) == INTEGER_CST)
12038 max_index = index = wi::to_offset (cfield);
12039 else
12040 {
12041 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
12042 index = wi::to_offset (TREE_OPERAND (cfield, 0));
12043 max_index = wi::to_offset (TREE_OPERAND (cfield, 1));
12044 }
12045 }
12046 else
12047 {
12048 index += 1;
12049 if (index_type)
12050 index = wi::ext (index, TYPE_PRECISION (index_type),
12051 TYPE_SIGN (index_type));
12052 max_index = index;
12053 }
12054
12055 /* Do we have match? */
12056 if (wi::cmpu (access_index, index) >= 0
12057 && wi::cmpu (access_index, max_index) <= 0)
12058 return cval;
12059 }
12060 return NULL_TREE;
12061 }
12062
12063 /* Perform constant folding and related simplification of EXPR.
12064 The related simplifications include x*1 => x, x*0 => 0, etc.,
12065 and application of the associative law.
12066 NOP_EXPR conversions may be removed freely (as long as we
12067 are careful not to change the type of the overall expression).
12068 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
12069 but we can constant-fold them if they have constant operands. */
12070
12071 #ifdef ENABLE_FOLD_CHECKING
12072 # define fold(x) fold_1 (x)
12073 static tree fold_1 (tree);
12074 static
12075 #endif
12076 tree
12077 fold (tree expr)
12078 {
12079 const tree t = expr;
12080 enum tree_code code = TREE_CODE (t);
12081 enum tree_code_class kind = TREE_CODE_CLASS (code);
12082 tree tem;
12083 location_t loc = EXPR_LOCATION (expr);
12084
12085 /* Return right away if a constant. */
12086 if (kind == tcc_constant)
12087 return t;
12088
12089 /* CALL_EXPR-like objects with variable numbers of operands are
12090 treated specially. */
12091 if (kind == tcc_vl_exp)
12092 {
12093 if (code == CALL_EXPR)
12094 {
12095 tem = fold_call_expr (loc, expr, false);
12096 return tem ? tem : expr;
12097 }
12098 return expr;
12099 }
12100
12101 if (IS_EXPR_CODE_CLASS (kind))
12102 {
12103 tree type = TREE_TYPE (t);
12104 tree op0, op1, op2;
12105
12106 switch (TREE_CODE_LENGTH (code))
12107 {
12108 case 1:
12109 op0 = TREE_OPERAND (t, 0);
12110 tem = fold_unary_loc (loc, code, type, op0);
12111 return tem ? tem : expr;
12112 case 2:
12113 op0 = TREE_OPERAND (t, 0);
12114 op1 = TREE_OPERAND (t, 1);
12115 tem = fold_binary_loc (loc, code, type, op0, op1);
12116 return tem ? tem : expr;
12117 case 3:
12118 op0 = TREE_OPERAND (t, 0);
12119 op1 = TREE_OPERAND (t, 1);
12120 op2 = TREE_OPERAND (t, 2);
12121 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12122 return tem ? tem : expr;
12123 default:
12124 break;
12125 }
12126 }
12127
12128 switch (code)
12129 {
12130 case ARRAY_REF:
12131 {
12132 tree op0 = TREE_OPERAND (t, 0);
12133 tree op1 = TREE_OPERAND (t, 1);
12134
12135 if (TREE_CODE (op1) == INTEGER_CST
12136 && TREE_CODE (op0) == CONSTRUCTOR
12137 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
12138 {
12139 tree val = get_array_ctor_element_at_index (op0,
12140 wi::to_offset (op1));
12141 if (val)
12142 return val;
12143 }
12144
12145 return t;
12146 }
12147
12148 /* Return a VECTOR_CST if possible. */
12149 case CONSTRUCTOR:
12150 {
12151 tree type = TREE_TYPE (t);
12152 if (TREE_CODE (type) != VECTOR_TYPE)
12153 return t;
12154
12155 unsigned i;
12156 tree val;
12157 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
12158 if (! CONSTANT_CLASS_P (val))
12159 return t;
12160
12161 return build_vector_from_ctor (type, CONSTRUCTOR_ELTS (t));
12162 }
12163
12164 case CONST_DECL:
12165 return fold (DECL_INITIAL (t));
12166
12167 default:
12168 return t;
12169 } /* switch (code) */
12170 }
12171
12172 #ifdef ENABLE_FOLD_CHECKING
12173 #undef fold
12174
12175 static void fold_checksum_tree (const_tree, struct md5_ctx *,
12176 hash_table<nofree_ptr_hash<const tree_node> > *);
12177 static void fold_check_failed (const_tree, const_tree);
12178 void print_fold_checksum (const_tree);
12179
12180 /* When --enable-checking=fold, compute a digest of expr before
12181 and after actual fold call to see if fold did not accidentally
12182 change original expr. */
12183
12184 tree
12185 fold (tree expr)
12186 {
12187 tree ret;
12188 struct md5_ctx ctx;
12189 unsigned char checksum_before[16], checksum_after[16];
12190 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12191
12192 md5_init_ctx (&ctx);
12193 fold_checksum_tree (expr, &ctx, &ht);
12194 md5_finish_ctx (&ctx, checksum_before);
12195 ht.empty ();
12196
12197 ret = fold_1 (expr);
12198
12199 md5_init_ctx (&ctx);
12200 fold_checksum_tree (expr, &ctx, &ht);
12201 md5_finish_ctx (&ctx, checksum_after);
12202
12203 if (memcmp (checksum_before, checksum_after, 16))
12204 fold_check_failed (expr, ret);
12205
12206 return ret;
12207 }
12208
12209 void
12210 print_fold_checksum (const_tree expr)
12211 {
12212 struct md5_ctx ctx;
12213 unsigned char checksum[16], cnt;
12214 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12215
12216 md5_init_ctx (&ctx);
12217 fold_checksum_tree (expr, &ctx, &ht);
12218 md5_finish_ctx (&ctx, checksum);
12219 for (cnt = 0; cnt < 16; ++cnt)
12220 fprintf (stderr, "%02x", checksum[cnt]);
12221 putc ('\n', stderr);
12222 }
12223
12224 static void
12225 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
12226 {
12227 internal_error ("fold check: original tree changed by fold");
12228 }
12229
12230 static void
12231 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
12232 hash_table<nofree_ptr_hash <const tree_node> > *ht)
12233 {
12234 const tree_node **slot;
12235 enum tree_code code;
12236 union tree_node buf;
12237 int i, len;
12238
12239 recursive_label:
12240 if (expr == NULL)
12241 return;
12242 slot = ht->find_slot (expr, INSERT);
12243 if (*slot != NULL)
12244 return;
12245 *slot = expr;
12246 code = TREE_CODE (expr);
12247 if (TREE_CODE_CLASS (code) == tcc_declaration
12248 && HAS_DECL_ASSEMBLER_NAME_P (expr))
12249 {
12250 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
12251 memcpy ((char *) &buf, expr, tree_size (expr));
12252 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
12253 buf.decl_with_vis.symtab_node = NULL;
12254 expr = (tree) &buf;
12255 }
12256 else if (TREE_CODE_CLASS (code) == tcc_type
12257 && (TYPE_POINTER_TO (expr)
12258 || TYPE_REFERENCE_TO (expr)
12259 || TYPE_CACHED_VALUES_P (expr)
12260 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
12261 || TYPE_NEXT_VARIANT (expr)))
12262 {
12263 /* Allow these fields to be modified. */
12264 tree tmp;
12265 memcpy ((char *) &buf, expr, tree_size (expr));
12266 expr = tmp = (tree) &buf;
12267 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
12268 TYPE_POINTER_TO (tmp) = NULL;
12269 TYPE_REFERENCE_TO (tmp) = NULL;
12270 TYPE_NEXT_VARIANT (tmp) = NULL;
12271 if (TYPE_CACHED_VALUES_P (tmp))
12272 {
12273 TYPE_CACHED_VALUES_P (tmp) = 0;
12274 TYPE_CACHED_VALUES (tmp) = NULL;
12275 }
12276 }
12277 md5_process_bytes (expr, tree_size (expr), ctx);
12278 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
12279 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
12280 if (TREE_CODE_CLASS (code) != tcc_type
12281 && TREE_CODE_CLASS (code) != tcc_declaration
12282 && code != TREE_LIST
12283 && code != SSA_NAME
12284 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
12285 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
12286 switch (TREE_CODE_CLASS (code))
12287 {
12288 case tcc_constant:
12289 switch (code)
12290 {
12291 case STRING_CST:
12292 md5_process_bytes (TREE_STRING_POINTER (expr),
12293 TREE_STRING_LENGTH (expr), ctx);
12294 break;
12295 case COMPLEX_CST:
12296 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
12297 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
12298 break;
12299 case VECTOR_CST:
12300 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
12301 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
12302 break;
12303 default:
12304 break;
12305 }
12306 break;
12307 case tcc_exceptional:
12308 switch (code)
12309 {
12310 case TREE_LIST:
12311 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
12312 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
12313 expr = TREE_CHAIN (expr);
12314 goto recursive_label;
12315 break;
12316 case TREE_VEC:
12317 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
12318 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
12319 break;
12320 default:
12321 break;
12322 }
12323 break;
12324 case tcc_expression:
12325 case tcc_reference:
12326 case tcc_comparison:
12327 case tcc_unary:
12328 case tcc_binary:
12329 case tcc_statement:
12330 case tcc_vl_exp:
12331 len = TREE_OPERAND_LENGTH (expr);
12332 for (i = 0; i < len; ++i)
12333 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
12334 break;
12335 case tcc_declaration:
12336 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
12337 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
12338 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
12339 {
12340 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
12341 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
12342 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
12343 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
12344 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
12345 }
12346
12347 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
12348 {
12349 if (TREE_CODE (expr) == FUNCTION_DECL)
12350 {
12351 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
12352 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
12353 }
12354 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
12355 }
12356 break;
12357 case tcc_type:
12358 if (TREE_CODE (expr) == ENUMERAL_TYPE)
12359 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
12360 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
12361 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
12362 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
12363 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
12364 if (INTEGRAL_TYPE_P (expr)
12365 || SCALAR_FLOAT_TYPE_P (expr))
12366 {
12367 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12368 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
12369 }
12370 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
12371 if (TREE_CODE (expr) == RECORD_TYPE
12372 || TREE_CODE (expr) == UNION_TYPE
12373 || TREE_CODE (expr) == QUAL_UNION_TYPE)
12374 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
12375 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12376 break;
12377 default:
12378 break;
12379 }
12380 }
12381
12382 /* Helper function for outputting the checksum of a tree T. When
12383 debugging with gdb, you can "define mynext" to be "next" followed
12384 by "call debug_fold_checksum (op0)", then just trace down till the
12385 outputs differ. */
12386
12387 DEBUG_FUNCTION void
12388 debug_fold_checksum (const_tree t)
12389 {
12390 int i;
12391 unsigned char checksum[16];
12392 struct md5_ctx ctx;
12393 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12394
12395 md5_init_ctx (&ctx);
12396 fold_checksum_tree (t, &ctx, &ht);
12397 md5_finish_ctx (&ctx, checksum);
12398 ht.empty ();
12399
12400 for (i = 0; i < 16; i++)
12401 fprintf (stderr, "%d ", checksum[i]);
12402
12403 fprintf (stderr, "\n");
12404 }
12405
12406 #endif
12407
12408 /* Fold a unary tree expression with code CODE of type TYPE with an
12409 operand OP0. LOC is the location of the resulting expression.
12410 Return a folded expression if successful. Otherwise, return a tree
12411 expression with code CODE of type TYPE with an operand OP0. */
12412
12413 tree
12414 fold_build1_stat_loc (location_t loc,
12415 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
12416 {
12417 tree tem;
12418 #ifdef ENABLE_FOLD_CHECKING
12419 unsigned char checksum_before[16], checksum_after[16];
12420 struct md5_ctx ctx;
12421 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12422
12423 md5_init_ctx (&ctx);
12424 fold_checksum_tree (op0, &ctx, &ht);
12425 md5_finish_ctx (&ctx, checksum_before);
12426 ht.empty ();
12427 #endif
12428
12429 tem = fold_unary_loc (loc, code, type, op0);
12430 if (!tem)
12431 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
12432
12433 #ifdef ENABLE_FOLD_CHECKING
12434 md5_init_ctx (&ctx);
12435 fold_checksum_tree (op0, &ctx, &ht);
12436 md5_finish_ctx (&ctx, checksum_after);
12437
12438 if (memcmp (checksum_before, checksum_after, 16))
12439 fold_check_failed (op0, tem);
12440 #endif
12441 return tem;
12442 }
12443
12444 /* Fold a binary tree expression with code CODE of type TYPE with
12445 operands OP0 and OP1. LOC is the location of the resulting
12446 expression. Return a folded expression if successful. Otherwise,
12447 return a tree expression with code CODE of type TYPE with operands
12448 OP0 and OP1. */
12449
12450 tree
12451 fold_build2_stat_loc (location_t loc,
12452 enum tree_code code, tree type, tree op0, tree op1
12453 MEM_STAT_DECL)
12454 {
12455 tree tem;
12456 #ifdef ENABLE_FOLD_CHECKING
12457 unsigned char checksum_before_op0[16],
12458 checksum_before_op1[16],
12459 checksum_after_op0[16],
12460 checksum_after_op1[16];
12461 struct md5_ctx ctx;
12462 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12463
12464 md5_init_ctx (&ctx);
12465 fold_checksum_tree (op0, &ctx, &ht);
12466 md5_finish_ctx (&ctx, checksum_before_op0);
12467 ht.empty ();
12468
12469 md5_init_ctx (&ctx);
12470 fold_checksum_tree (op1, &ctx, &ht);
12471 md5_finish_ctx (&ctx, checksum_before_op1);
12472 ht.empty ();
12473 #endif
12474
12475 tem = fold_binary_loc (loc, code, type, op0, op1);
12476 if (!tem)
12477 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
12478
12479 #ifdef ENABLE_FOLD_CHECKING
12480 md5_init_ctx (&ctx);
12481 fold_checksum_tree (op0, &ctx, &ht);
12482 md5_finish_ctx (&ctx, checksum_after_op0);
12483 ht.empty ();
12484
12485 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12486 fold_check_failed (op0, tem);
12487
12488 md5_init_ctx (&ctx);
12489 fold_checksum_tree (op1, &ctx, &ht);
12490 md5_finish_ctx (&ctx, checksum_after_op1);
12491
12492 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12493 fold_check_failed (op1, tem);
12494 #endif
12495 return tem;
12496 }
12497
12498 /* Fold a ternary tree expression with code CODE of type TYPE with
12499 operands OP0, OP1, and OP2. Return a folded expression if
12500 successful. Otherwise, return a tree expression with code CODE of
12501 type TYPE with operands OP0, OP1, and OP2. */
12502
12503 tree
12504 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
12505 tree op0, tree op1, tree op2 MEM_STAT_DECL)
12506 {
12507 tree tem;
12508 #ifdef ENABLE_FOLD_CHECKING
12509 unsigned char checksum_before_op0[16],
12510 checksum_before_op1[16],
12511 checksum_before_op2[16],
12512 checksum_after_op0[16],
12513 checksum_after_op1[16],
12514 checksum_after_op2[16];
12515 struct md5_ctx ctx;
12516 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12517
12518 md5_init_ctx (&ctx);
12519 fold_checksum_tree (op0, &ctx, &ht);
12520 md5_finish_ctx (&ctx, checksum_before_op0);
12521 ht.empty ();
12522
12523 md5_init_ctx (&ctx);
12524 fold_checksum_tree (op1, &ctx, &ht);
12525 md5_finish_ctx (&ctx, checksum_before_op1);
12526 ht.empty ();
12527
12528 md5_init_ctx (&ctx);
12529 fold_checksum_tree (op2, &ctx, &ht);
12530 md5_finish_ctx (&ctx, checksum_before_op2);
12531 ht.empty ();
12532 #endif
12533
12534 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
12535 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12536 if (!tem)
12537 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
12538
12539 #ifdef ENABLE_FOLD_CHECKING
12540 md5_init_ctx (&ctx);
12541 fold_checksum_tree (op0, &ctx, &ht);
12542 md5_finish_ctx (&ctx, checksum_after_op0);
12543 ht.empty ();
12544
12545 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12546 fold_check_failed (op0, tem);
12547
12548 md5_init_ctx (&ctx);
12549 fold_checksum_tree (op1, &ctx, &ht);
12550 md5_finish_ctx (&ctx, checksum_after_op1);
12551 ht.empty ();
12552
12553 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12554 fold_check_failed (op1, tem);
12555
12556 md5_init_ctx (&ctx);
12557 fold_checksum_tree (op2, &ctx, &ht);
12558 md5_finish_ctx (&ctx, checksum_after_op2);
12559
12560 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12561 fold_check_failed (op2, tem);
12562 #endif
12563 return tem;
12564 }
12565
12566 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
12567 arguments in ARGARRAY, and a null static chain.
12568 Return a folded expression if successful. Otherwise, return a CALL_EXPR
12569 of type TYPE from the given operands as constructed by build_call_array. */
12570
12571 tree
12572 fold_build_call_array_loc (location_t loc, tree type, tree fn,
12573 int nargs, tree *argarray)
12574 {
12575 tree tem;
12576 #ifdef ENABLE_FOLD_CHECKING
12577 unsigned char checksum_before_fn[16],
12578 checksum_before_arglist[16],
12579 checksum_after_fn[16],
12580 checksum_after_arglist[16];
12581 struct md5_ctx ctx;
12582 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12583 int i;
12584
12585 md5_init_ctx (&ctx);
12586 fold_checksum_tree (fn, &ctx, &ht);
12587 md5_finish_ctx (&ctx, checksum_before_fn);
12588 ht.empty ();
12589
12590 md5_init_ctx (&ctx);
12591 for (i = 0; i < nargs; i++)
12592 fold_checksum_tree (argarray[i], &ctx, &ht);
12593 md5_finish_ctx (&ctx, checksum_before_arglist);
12594 ht.empty ();
12595 #endif
12596
12597 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
12598 if (!tem)
12599 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
12600
12601 #ifdef ENABLE_FOLD_CHECKING
12602 md5_init_ctx (&ctx);
12603 fold_checksum_tree (fn, &ctx, &ht);
12604 md5_finish_ctx (&ctx, checksum_after_fn);
12605 ht.empty ();
12606
12607 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
12608 fold_check_failed (fn, tem);
12609
12610 md5_init_ctx (&ctx);
12611 for (i = 0; i < nargs; i++)
12612 fold_checksum_tree (argarray[i], &ctx, &ht);
12613 md5_finish_ctx (&ctx, checksum_after_arglist);
12614
12615 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
12616 fold_check_failed (NULL_TREE, tem);
12617 #endif
12618 return tem;
12619 }
12620
12621 /* Perform constant folding and related simplification of initializer
12622 expression EXPR. These behave identically to "fold_buildN" but ignore
12623 potential run-time traps and exceptions that fold must preserve. */
12624
12625 #define START_FOLD_INIT \
12626 int saved_signaling_nans = flag_signaling_nans;\
12627 int saved_trapping_math = flag_trapping_math;\
12628 int saved_rounding_math = flag_rounding_math;\
12629 int saved_trapv = flag_trapv;\
12630 int saved_folding_initializer = folding_initializer;\
12631 flag_signaling_nans = 0;\
12632 flag_trapping_math = 0;\
12633 flag_rounding_math = 0;\
12634 flag_trapv = 0;\
12635 folding_initializer = 1;
12636
12637 #define END_FOLD_INIT \
12638 flag_signaling_nans = saved_signaling_nans;\
12639 flag_trapping_math = saved_trapping_math;\
12640 flag_rounding_math = saved_rounding_math;\
12641 flag_trapv = saved_trapv;\
12642 folding_initializer = saved_folding_initializer;
12643
12644 tree
12645 fold_build1_initializer_loc (location_t loc, enum tree_code code,
12646 tree type, tree op)
12647 {
12648 tree result;
12649 START_FOLD_INIT;
12650
12651 result = fold_build1_loc (loc, code, type, op);
12652
12653 END_FOLD_INIT;
12654 return result;
12655 }
12656
12657 tree
12658 fold_build2_initializer_loc (location_t loc, enum tree_code code,
12659 tree type, tree op0, tree op1)
12660 {
12661 tree result;
12662 START_FOLD_INIT;
12663
12664 result = fold_build2_loc (loc, code, type, op0, op1);
12665
12666 END_FOLD_INIT;
12667 return result;
12668 }
12669
12670 tree
12671 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
12672 int nargs, tree *argarray)
12673 {
12674 tree result;
12675 START_FOLD_INIT;
12676
12677 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
12678
12679 END_FOLD_INIT;
12680 return result;
12681 }
12682
12683 #undef START_FOLD_INIT
12684 #undef END_FOLD_INIT
12685
12686 /* Determine if first argument is a multiple of second argument. Return 0 if
12687 it is not, or we cannot easily determined it to be.
12688
12689 An example of the sort of thing we care about (at this point; this routine
12690 could surely be made more general, and expanded to do what the *_DIV_EXPR's
12691 fold cases do now) is discovering that
12692
12693 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12694
12695 is a multiple of
12696
12697 SAVE_EXPR (J * 8)
12698
12699 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12700
12701 This code also handles discovering that
12702
12703 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12704
12705 is a multiple of 8 so we don't have to worry about dealing with a
12706 possible remainder.
12707
12708 Note that we *look* inside a SAVE_EXPR only to determine how it was
12709 calculated; it is not safe for fold to do much of anything else with the
12710 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12711 at run time. For example, the latter example above *cannot* be implemented
12712 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12713 evaluation time of the original SAVE_EXPR is not necessarily the same at
12714 the time the new expression is evaluated. The only optimization of this
12715 sort that would be valid is changing
12716
12717 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12718
12719 divided by 8 to
12720
12721 SAVE_EXPR (I) * SAVE_EXPR (J)
12722
12723 (where the same SAVE_EXPR (J) is used in the original and the
12724 transformed version). */
12725
12726 int
12727 multiple_of_p (tree type, const_tree top, const_tree bottom)
12728 {
12729 if (operand_equal_p (top, bottom, 0))
12730 return 1;
12731
12732 if (TREE_CODE (type) != INTEGER_TYPE)
12733 return 0;
12734
12735 switch (TREE_CODE (top))
12736 {
12737 case BIT_AND_EXPR:
12738 /* Bitwise and provides a power of two multiple. If the mask is
12739 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
12740 if (!integer_pow2p (bottom))
12741 return 0;
12742 /* FALLTHRU */
12743
12744 case MULT_EXPR:
12745 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12746 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12747
12748 case PLUS_EXPR:
12749 case MINUS_EXPR:
12750 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12751 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12752
12753 case LSHIFT_EXPR:
12754 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
12755 {
12756 tree op1, t1;
12757
12758 op1 = TREE_OPERAND (top, 1);
12759 /* const_binop may not detect overflow correctly,
12760 so check for it explicitly here. */
12761 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
12762 && 0 != (t1 = fold_convert (type,
12763 const_binop (LSHIFT_EXPR,
12764 size_one_node,
12765 op1)))
12766 && !TREE_OVERFLOW (t1))
12767 return multiple_of_p (type, t1, bottom);
12768 }
12769 return 0;
12770
12771 case NOP_EXPR:
12772 /* Can't handle conversions from non-integral or wider integral type. */
12773 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
12774 || (TYPE_PRECISION (type)
12775 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
12776 return 0;
12777
12778 /* .. fall through ... */
12779
12780 case SAVE_EXPR:
12781 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
12782
12783 case COND_EXPR:
12784 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12785 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
12786
12787 case INTEGER_CST:
12788 if (TREE_CODE (bottom) != INTEGER_CST
12789 || integer_zerop (bottom)
12790 || (TYPE_UNSIGNED (type)
12791 && (tree_int_cst_sgn (top) < 0
12792 || tree_int_cst_sgn (bottom) < 0)))
12793 return 0;
12794 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
12795 SIGNED);
12796
12797 default:
12798 return 0;
12799 }
12800 }
12801
12802 #define tree_expr_nonnegative_warnv_p(X, Y) \
12803 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
12804
12805 #define RECURSE(X) \
12806 ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
12807
12808 /* Return true if CODE or TYPE is known to be non-negative. */
12809
12810 static bool
12811 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
12812 {
12813 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
12814 && truth_value_p (code))
12815 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
12816 have a signed:1 type (where the value is -1 and 0). */
12817 return true;
12818 return false;
12819 }
12820
12821 /* Return true if (CODE OP0) is known to be non-negative. If the return
12822 value is based on the assumption that signed overflow is undefined,
12823 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12824 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12825
12826 bool
12827 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12828 bool *strict_overflow_p, int depth)
12829 {
12830 if (TYPE_UNSIGNED (type))
12831 return true;
12832
12833 switch (code)
12834 {
12835 case ABS_EXPR:
12836 /* We can't return 1 if flag_wrapv is set because
12837 ABS_EXPR<INT_MIN> = INT_MIN. */
12838 if (!ANY_INTEGRAL_TYPE_P (type))
12839 return true;
12840 if (TYPE_OVERFLOW_UNDEFINED (type))
12841 {
12842 *strict_overflow_p = true;
12843 return true;
12844 }
12845 break;
12846
12847 case NON_LVALUE_EXPR:
12848 case FLOAT_EXPR:
12849 case FIX_TRUNC_EXPR:
12850 return RECURSE (op0);
12851
12852 CASE_CONVERT:
12853 {
12854 tree inner_type = TREE_TYPE (op0);
12855 tree outer_type = type;
12856
12857 if (TREE_CODE (outer_type) == REAL_TYPE)
12858 {
12859 if (TREE_CODE (inner_type) == REAL_TYPE)
12860 return RECURSE (op0);
12861 if (INTEGRAL_TYPE_P (inner_type))
12862 {
12863 if (TYPE_UNSIGNED (inner_type))
12864 return true;
12865 return RECURSE (op0);
12866 }
12867 }
12868 else if (INTEGRAL_TYPE_P (outer_type))
12869 {
12870 if (TREE_CODE (inner_type) == REAL_TYPE)
12871 return RECURSE (op0);
12872 if (INTEGRAL_TYPE_P (inner_type))
12873 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
12874 && TYPE_UNSIGNED (inner_type);
12875 }
12876 }
12877 break;
12878
12879 default:
12880 return tree_simple_nonnegative_warnv_p (code, type);
12881 }
12882
12883 /* We don't know sign of `t', so be conservative and return false. */
12884 return false;
12885 }
12886
12887 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
12888 value is based on the assumption that signed overflow is undefined,
12889 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12890 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12891
12892 bool
12893 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12894 tree op1, bool *strict_overflow_p,
12895 int depth)
12896 {
12897 if (TYPE_UNSIGNED (type))
12898 return true;
12899
12900 switch (code)
12901 {
12902 case POINTER_PLUS_EXPR:
12903 case PLUS_EXPR:
12904 if (FLOAT_TYPE_P (type))
12905 return RECURSE (op0) && RECURSE (op1);
12906
12907 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12908 both unsigned and at least 2 bits shorter than the result. */
12909 if (TREE_CODE (type) == INTEGER_TYPE
12910 && TREE_CODE (op0) == NOP_EXPR
12911 && TREE_CODE (op1) == NOP_EXPR)
12912 {
12913 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
12914 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
12915 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12916 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12917 {
12918 unsigned int prec = MAX (TYPE_PRECISION (inner1),
12919 TYPE_PRECISION (inner2)) + 1;
12920 return prec < TYPE_PRECISION (type);
12921 }
12922 }
12923 break;
12924
12925 case MULT_EXPR:
12926 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12927 {
12928 /* x * x is always non-negative for floating point x
12929 or without overflow. */
12930 if (operand_equal_p (op0, op1, 0)
12931 || (RECURSE (op0) && RECURSE (op1)))
12932 {
12933 if (ANY_INTEGRAL_TYPE_P (type)
12934 && TYPE_OVERFLOW_UNDEFINED (type))
12935 *strict_overflow_p = true;
12936 return true;
12937 }
12938 }
12939
12940 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12941 both unsigned and their total bits is shorter than the result. */
12942 if (TREE_CODE (type) == INTEGER_TYPE
12943 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
12944 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
12945 {
12946 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
12947 ? TREE_TYPE (TREE_OPERAND (op0, 0))
12948 : TREE_TYPE (op0);
12949 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
12950 ? TREE_TYPE (TREE_OPERAND (op1, 0))
12951 : TREE_TYPE (op1);
12952
12953 bool unsigned0 = TYPE_UNSIGNED (inner0);
12954 bool unsigned1 = TYPE_UNSIGNED (inner1);
12955
12956 if (TREE_CODE (op0) == INTEGER_CST)
12957 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
12958
12959 if (TREE_CODE (op1) == INTEGER_CST)
12960 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
12961
12962 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
12963 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
12964 {
12965 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
12966 ? tree_int_cst_min_precision (op0, UNSIGNED)
12967 : TYPE_PRECISION (inner0);
12968
12969 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
12970 ? tree_int_cst_min_precision (op1, UNSIGNED)
12971 : TYPE_PRECISION (inner1);
12972
12973 return precision0 + precision1 < TYPE_PRECISION (type);
12974 }
12975 }
12976 return false;
12977
12978 case BIT_AND_EXPR:
12979 case MAX_EXPR:
12980 return RECURSE (op0) || RECURSE (op1);
12981
12982 case BIT_IOR_EXPR:
12983 case BIT_XOR_EXPR:
12984 case MIN_EXPR:
12985 case RDIV_EXPR:
12986 case TRUNC_DIV_EXPR:
12987 case CEIL_DIV_EXPR:
12988 case FLOOR_DIV_EXPR:
12989 case ROUND_DIV_EXPR:
12990 return RECURSE (op0) && RECURSE (op1);
12991
12992 case TRUNC_MOD_EXPR:
12993 return RECURSE (op0);
12994
12995 case FLOOR_MOD_EXPR:
12996 return RECURSE (op1);
12997
12998 case CEIL_MOD_EXPR:
12999 case ROUND_MOD_EXPR:
13000 default:
13001 return tree_simple_nonnegative_warnv_p (code, type);
13002 }
13003
13004 /* We don't know sign of `t', so be conservative and return false. */
13005 return false;
13006 }
13007
13008 /* Return true if T is known to be non-negative. If the return
13009 value is based on the assumption that signed overflow is undefined,
13010 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13011 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13012
13013 bool
13014 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13015 {
13016 if (TYPE_UNSIGNED (TREE_TYPE (t)))
13017 return true;
13018
13019 switch (TREE_CODE (t))
13020 {
13021 case INTEGER_CST:
13022 return tree_int_cst_sgn (t) >= 0;
13023
13024 case REAL_CST:
13025 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
13026
13027 case FIXED_CST:
13028 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
13029
13030 case COND_EXPR:
13031 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
13032
13033 case SSA_NAME:
13034 /* Limit the depth of recursion to avoid quadratic behavior.
13035 This is expected to catch almost all occurrences in practice.
13036 If this code misses important cases that unbounded recursion
13037 would not, passes that need this information could be revised
13038 to provide it through dataflow propagation. */
13039 return (!name_registered_for_update_p (t)
13040 && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
13041 && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
13042 strict_overflow_p, depth));
13043
13044 default:
13045 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
13046 }
13047 }
13048
13049 /* Return true if T is known to be non-negative. If the return
13050 value is based on the assumption that signed overflow is undefined,
13051 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13052 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13053
13054 bool
13055 tree_call_nonnegative_warnv_p (tree type, combined_fn fn, tree arg0, tree arg1,
13056 bool *strict_overflow_p, int depth)
13057 {
13058 switch (fn)
13059 {
13060 CASE_CFN_ACOS:
13061 CASE_CFN_ACOSH:
13062 CASE_CFN_CABS:
13063 CASE_CFN_COSH:
13064 CASE_CFN_ERFC:
13065 CASE_CFN_EXP:
13066 CASE_CFN_EXP10:
13067 CASE_CFN_EXP2:
13068 CASE_CFN_FABS:
13069 CASE_CFN_FDIM:
13070 CASE_CFN_HYPOT:
13071 CASE_CFN_POW10:
13072 CASE_CFN_FFS:
13073 CASE_CFN_PARITY:
13074 CASE_CFN_POPCOUNT:
13075 CASE_CFN_CLZ:
13076 CASE_CFN_CLRSB:
13077 case CFN_BUILT_IN_BSWAP32:
13078 case CFN_BUILT_IN_BSWAP64:
13079 /* Always true. */
13080 return true;
13081
13082 CASE_CFN_SQRT:
13083 /* sqrt(-0.0) is -0.0. */
13084 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
13085 return true;
13086 return RECURSE (arg0);
13087
13088 CASE_CFN_ASINH:
13089 CASE_CFN_ATAN:
13090 CASE_CFN_ATANH:
13091 CASE_CFN_CBRT:
13092 CASE_CFN_CEIL:
13093 CASE_CFN_ERF:
13094 CASE_CFN_EXPM1:
13095 CASE_CFN_FLOOR:
13096 CASE_CFN_FMOD:
13097 CASE_CFN_FREXP:
13098 CASE_CFN_ICEIL:
13099 CASE_CFN_IFLOOR:
13100 CASE_CFN_IRINT:
13101 CASE_CFN_IROUND:
13102 CASE_CFN_LCEIL:
13103 CASE_CFN_LDEXP:
13104 CASE_CFN_LFLOOR:
13105 CASE_CFN_LLCEIL:
13106 CASE_CFN_LLFLOOR:
13107 CASE_CFN_LLRINT:
13108 CASE_CFN_LLROUND:
13109 CASE_CFN_LRINT:
13110 CASE_CFN_LROUND:
13111 CASE_CFN_MODF:
13112 CASE_CFN_NEARBYINT:
13113 CASE_CFN_RINT:
13114 CASE_CFN_ROUND:
13115 CASE_CFN_SCALB:
13116 CASE_CFN_SCALBLN:
13117 CASE_CFN_SCALBN:
13118 CASE_CFN_SIGNBIT:
13119 CASE_CFN_SIGNIFICAND:
13120 CASE_CFN_SINH:
13121 CASE_CFN_TANH:
13122 CASE_CFN_TRUNC:
13123 /* True if the 1st argument is nonnegative. */
13124 return RECURSE (arg0);
13125
13126 CASE_CFN_FMAX:
13127 /* True if the 1st OR 2nd arguments are nonnegative. */
13128 return RECURSE (arg0) || RECURSE (arg1);
13129
13130 CASE_CFN_FMIN:
13131 /* True if the 1st AND 2nd arguments are nonnegative. */
13132 return RECURSE (arg0) && RECURSE (arg1);
13133
13134 CASE_CFN_COPYSIGN:
13135 /* True if the 2nd argument is nonnegative. */
13136 return RECURSE (arg1);
13137
13138 CASE_CFN_POWI:
13139 /* True if the 1st argument is nonnegative or the second
13140 argument is an even integer. */
13141 if (TREE_CODE (arg1) == INTEGER_CST
13142 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
13143 return true;
13144 return RECURSE (arg0);
13145
13146 CASE_CFN_POW:
13147 /* True if the 1st argument is nonnegative or the second
13148 argument is an even integer valued real. */
13149 if (TREE_CODE (arg1) == REAL_CST)
13150 {
13151 REAL_VALUE_TYPE c;
13152 HOST_WIDE_INT n;
13153
13154 c = TREE_REAL_CST (arg1);
13155 n = real_to_integer (&c);
13156 if ((n & 1) == 0)
13157 {
13158 REAL_VALUE_TYPE cint;
13159 real_from_integer (&cint, VOIDmode, n, SIGNED);
13160 if (real_identical (&c, &cint))
13161 return true;
13162 }
13163 }
13164 return RECURSE (arg0);
13165
13166 default:
13167 break;
13168 }
13169 return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
13170 }
13171
13172 /* Return true if T is known to be non-negative. If the return
13173 value is based on the assumption that signed overflow is undefined,
13174 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13175 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13176
13177 static bool
13178 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13179 {
13180 enum tree_code code = TREE_CODE (t);
13181 if (TYPE_UNSIGNED (TREE_TYPE (t)))
13182 return true;
13183
13184 switch (code)
13185 {
13186 case TARGET_EXPR:
13187 {
13188 tree temp = TARGET_EXPR_SLOT (t);
13189 t = TARGET_EXPR_INITIAL (t);
13190
13191 /* If the initializer is non-void, then it's a normal expression
13192 that will be assigned to the slot. */
13193 if (!VOID_TYPE_P (t))
13194 return RECURSE (t);
13195
13196 /* Otherwise, the initializer sets the slot in some way. One common
13197 way is an assignment statement at the end of the initializer. */
13198 while (1)
13199 {
13200 if (TREE_CODE (t) == BIND_EXPR)
13201 t = expr_last (BIND_EXPR_BODY (t));
13202 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
13203 || TREE_CODE (t) == TRY_CATCH_EXPR)
13204 t = expr_last (TREE_OPERAND (t, 0));
13205 else if (TREE_CODE (t) == STATEMENT_LIST)
13206 t = expr_last (t);
13207 else
13208 break;
13209 }
13210 if (TREE_CODE (t) == MODIFY_EXPR
13211 && TREE_OPERAND (t, 0) == temp)
13212 return RECURSE (TREE_OPERAND (t, 1));
13213
13214 return false;
13215 }
13216
13217 case CALL_EXPR:
13218 {
13219 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
13220 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
13221
13222 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
13223 get_call_combined_fn (t),
13224 arg0,
13225 arg1,
13226 strict_overflow_p, depth);
13227 }
13228 case COMPOUND_EXPR:
13229 case MODIFY_EXPR:
13230 return RECURSE (TREE_OPERAND (t, 1));
13231
13232 case BIND_EXPR:
13233 return RECURSE (expr_last (TREE_OPERAND (t, 1)));
13234
13235 case SAVE_EXPR:
13236 return RECURSE (TREE_OPERAND (t, 0));
13237
13238 default:
13239 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
13240 }
13241 }
13242
13243 #undef RECURSE
13244 #undef tree_expr_nonnegative_warnv_p
13245
13246 /* Return true if T is known to be non-negative. If the return
13247 value is based on the assumption that signed overflow is undefined,
13248 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13249 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13250
13251 bool
13252 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13253 {
13254 enum tree_code code;
13255 if (t == error_mark_node)
13256 return false;
13257
13258 code = TREE_CODE (t);
13259 switch (TREE_CODE_CLASS (code))
13260 {
13261 case tcc_binary:
13262 case tcc_comparison:
13263 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13264 TREE_TYPE (t),
13265 TREE_OPERAND (t, 0),
13266 TREE_OPERAND (t, 1),
13267 strict_overflow_p, depth);
13268
13269 case tcc_unary:
13270 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13271 TREE_TYPE (t),
13272 TREE_OPERAND (t, 0),
13273 strict_overflow_p, depth);
13274
13275 case tcc_constant:
13276 case tcc_declaration:
13277 case tcc_reference:
13278 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13279
13280 default:
13281 break;
13282 }
13283
13284 switch (code)
13285 {
13286 case TRUTH_AND_EXPR:
13287 case TRUTH_OR_EXPR:
13288 case TRUTH_XOR_EXPR:
13289 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13290 TREE_TYPE (t),
13291 TREE_OPERAND (t, 0),
13292 TREE_OPERAND (t, 1),
13293 strict_overflow_p, depth);
13294 case TRUTH_NOT_EXPR:
13295 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13296 TREE_TYPE (t),
13297 TREE_OPERAND (t, 0),
13298 strict_overflow_p, depth);
13299
13300 case COND_EXPR:
13301 case CONSTRUCTOR:
13302 case OBJ_TYPE_REF:
13303 case ASSERT_EXPR:
13304 case ADDR_EXPR:
13305 case WITH_SIZE_EXPR:
13306 case SSA_NAME:
13307 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13308
13309 default:
13310 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
13311 }
13312 }
13313
13314 /* Return true if `t' is known to be non-negative. Handle warnings
13315 about undefined signed overflow. */
13316
13317 bool
13318 tree_expr_nonnegative_p (tree t)
13319 {
13320 bool ret, strict_overflow_p;
13321
13322 strict_overflow_p = false;
13323 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
13324 if (strict_overflow_p)
13325 fold_overflow_warning (("assuming signed overflow does not occur when "
13326 "determining that expression is always "
13327 "non-negative"),
13328 WARN_STRICT_OVERFLOW_MISC);
13329 return ret;
13330 }
13331
13332
13333 /* Return true when (CODE OP0) is an address and is known to be nonzero.
13334 For floating point we further ensure that T is not denormal.
13335 Similar logic is present in nonzero_address in rtlanal.h.
13336
13337 If the return value is based on the assumption that signed overflow
13338 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13339 change *STRICT_OVERFLOW_P. */
13340
13341 bool
13342 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
13343 bool *strict_overflow_p)
13344 {
13345 switch (code)
13346 {
13347 case ABS_EXPR:
13348 return tree_expr_nonzero_warnv_p (op0,
13349 strict_overflow_p);
13350
13351 case NOP_EXPR:
13352 {
13353 tree inner_type = TREE_TYPE (op0);
13354 tree outer_type = type;
13355
13356 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
13357 && tree_expr_nonzero_warnv_p (op0,
13358 strict_overflow_p));
13359 }
13360 break;
13361
13362 case NON_LVALUE_EXPR:
13363 return tree_expr_nonzero_warnv_p (op0,
13364 strict_overflow_p);
13365
13366 default:
13367 break;
13368 }
13369
13370 return false;
13371 }
13372
13373 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
13374 For floating point we further ensure that T is not denormal.
13375 Similar logic is present in nonzero_address in rtlanal.h.
13376
13377 If the return value is based on the assumption that signed overflow
13378 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13379 change *STRICT_OVERFLOW_P. */
13380
13381 bool
13382 tree_binary_nonzero_warnv_p (enum tree_code code,
13383 tree type,
13384 tree op0,
13385 tree op1, bool *strict_overflow_p)
13386 {
13387 bool sub_strict_overflow_p;
13388 switch (code)
13389 {
13390 case POINTER_PLUS_EXPR:
13391 case PLUS_EXPR:
13392 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
13393 {
13394 /* With the presence of negative values it is hard
13395 to say something. */
13396 sub_strict_overflow_p = false;
13397 if (!tree_expr_nonnegative_warnv_p (op0,
13398 &sub_strict_overflow_p)
13399 || !tree_expr_nonnegative_warnv_p (op1,
13400 &sub_strict_overflow_p))
13401 return false;
13402 /* One of operands must be positive and the other non-negative. */
13403 /* We don't set *STRICT_OVERFLOW_P here: even if this value
13404 overflows, on a twos-complement machine the sum of two
13405 nonnegative numbers can never be zero. */
13406 return (tree_expr_nonzero_warnv_p (op0,
13407 strict_overflow_p)
13408 || tree_expr_nonzero_warnv_p (op1,
13409 strict_overflow_p));
13410 }
13411 break;
13412
13413 case MULT_EXPR:
13414 if (TYPE_OVERFLOW_UNDEFINED (type))
13415 {
13416 if (tree_expr_nonzero_warnv_p (op0,
13417 strict_overflow_p)
13418 && tree_expr_nonzero_warnv_p (op1,
13419 strict_overflow_p))
13420 {
13421 *strict_overflow_p = true;
13422 return true;
13423 }
13424 }
13425 break;
13426
13427 case MIN_EXPR:
13428 sub_strict_overflow_p = false;
13429 if (tree_expr_nonzero_warnv_p (op0,
13430 &sub_strict_overflow_p)
13431 && tree_expr_nonzero_warnv_p (op1,
13432 &sub_strict_overflow_p))
13433 {
13434 if (sub_strict_overflow_p)
13435 *strict_overflow_p = true;
13436 }
13437 break;
13438
13439 case MAX_EXPR:
13440 sub_strict_overflow_p = false;
13441 if (tree_expr_nonzero_warnv_p (op0,
13442 &sub_strict_overflow_p))
13443 {
13444 if (sub_strict_overflow_p)
13445 *strict_overflow_p = true;
13446
13447 /* When both operands are nonzero, then MAX must be too. */
13448 if (tree_expr_nonzero_warnv_p (op1,
13449 strict_overflow_p))
13450 return true;
13451
13452 /* MAX where operand 0 is positive is positive. */
13453 return tree_expr_nonnegative_warnv_p (op0,
13454 strict_overflow_p);
13455 }
13456 /* MAX where operand 1 is positive is positive. */
13457 else if (tree_expr_nonzero_warnv_p (op1,
13458 &sub_strict_overflow_p)
13459 && tree_expr_nonnegative_warnv_p (op1,
13460 &sub_strict_overflow_p))
13461 {
13462 if (sub_strict_overflow_p)
13463 *strict_overflow_p = true;
13464 return true;
13465 }
13466 break;
13467
13468 case BIT_IOR_EXPR:
13469 return (tree_expr_nonzero_warnv_p (op1,
13470 strict_overflow_p)
13471 || tree_expr_nonzero_warnv_p (op0,
13472 strict_overflow_p));
13473
13474 default:
13475 break;
13476 }
13477
13478 return false;
13479 }
13480
13481 /* Return true when T is an address and is known to be nonzero.
13482 For floating point we further ensure that T is not denormal.
13483 Similar logic is present in nonzero_address in rtlanal.h.
13484
13485 If the return value is based on the assumption that signed overflow
13486 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13487 change *STRICT_OVERFLOW_P. */
13488
13489 bool
13490 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
13491 {
13492 bool sub_strict_overflow_p;
13493 switch (TREE_CODE (t))
13494 {
13495 case INTEGER_CST:
13496 return !integer_zerop (t);
13497
13498 case ADDR_EXPR:
13499 {
13500 tree base = TREE_OPERAND (t, 0);
13501
13502 if (!DECL_P (base))
13503 base = get_base_address (base);
13504
13505 if (!base)
13506 return false;
13507
13508 /* For objects in symbol table check if we know they are non-zero.
13509 Don't do anything for variables and functions before symtab is built;
13510 it is quite possible that they will be declared weak later. */
13511 if (DECL_P (base) && decl_in_symtab_p (base))
13512 {
13513 struct symtab_node *symbol;
13514
13515 symbol = symtab_node::get_create (base);
13516 if (symbol)
13517 return symbol->nonzero_address ();
13518 else
13519 return false;
13520 }
13521
13522 /* Function local objects are never NULL. */
13523 if (DECL_P (base)
13524 && (DECL_CONTEXT (base)
13525 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
13526 && auto_var_in_fn_p (base, DECL_CONTEXT (base))))
13527 return true;
13528
13529 /* Constants are never weak. */
13530 if (CONSTANT_CLASS_P (base))
13531 return true;
13532
13533 return false;
13534 }
13535
13536 case COND_EXPR:
13537 sub_strict_overflow_p = false;
13538 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13539 &sub_strict_overflow_p)
13540 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
13541 &sub_strict_overflow_p))
13542 {
13543 if (sub_strict_overflow_p)
13544 *strict_overflow_p = true;
13545 return true;
13546 }
13547 break;
13548
13549 default:
13550 break;
13551 }
13552 return false;
13553 }
13554
13555 #define integer_valued_real_p(X) \
13556 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
13557
13558 #define RECURSE(X) \
13559 ((integer_valued_real_p) (X, depth + 1))
13560
13561 /* Return true if the floating point result of (CODE OP0) has an
13562 integer value. We also allow +Inf, -Inf and NaN to be considered
13563 integer values. Return false for signaling NaN.
13564
13565 DEPTH is the current nesting depth of the query. */
13566
13567 bool
13568 integer_valued_real_unary_p (tree_code code, tree op0, int depth)
13569 {
13570 switch (code)
13571 {
13572 case FLOAT_EXPR:
13573 return true;
13574
13575 case ABS_EXPR:
13576 return RECURSE (op0);
13577
13578 CASE_CONVERT:
13579 {
13580 tree type = TREE_TYPE (op0);
13581 if (TREE_CODE (type) == INTEGER_TYPE)
13582 return true;
13583 if (TREE_CODE (type) == REAL_TYPE)
13584 return RECURSE (op0);
13585 break;
13586 }
13587
13588 default:
13589 break;
13590 }
13591 return false;
13592 }
13593
13594 /* Return true if the floating point result of (CODE OP0 OP1) has an
13595 integer value. We also allow +Inf, -Inf and NaN to be considered
13596 integer values. Return false for signaling NaN.
13597
13598 DEPTH is the current nesting depth of the query. */
13599
13600 bool
13601 integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth)
13602 {
13603 switch (code)
13604 {
13605 case PLUS_EXPR:
13606 case MINUS_EXPR:
13607 case MULT_EXPR:
13608 case MIN_EXPR:
13609 case MAX_EXPR:
13610 return RECURSE (op0) && RECURSE (op1);
13611
13612 default:
13613 break;
13614 }
13615 return false;
13616 }
13617
13618 /* Return true if the floating point result of calling FNDECL with arguments
13619 ARG0 and ARG1 has an integer value. We also allow +Inf, -Inf and NaN to be
13620 considered integer values. Return false for signaling NaN. If FNDECL
13621 takes fewer than 2 arguments, the remaining ARGn are null.
13622
13623 DEPTH is the current nesting depth of the query. */
13624
13625 bool
13626 integer_valued_real_call_p (combined_fn fn, tree arg0, tree arg1, int depth)
13627 {
13628 switch (fn)
13629 {
13630 CASE_CFN_CEIL:
13631 CASE_CFN_FLOOR:
13632 CASE_CFN_NEARBYINT:
13633 CASE_CFN_RINT:
13634 CASE_CFN_ROUND:
13635 CASE_CFN_TRUNC:
13636 return true;
13637
13638 CASE_CFN_FMIN:
13639 CASE_CFN_FMAX:
13640 return RECURSE (arg0) && RECURSE (arg1);
13641
13642 default:
13643 break;
13644 }
13645 return false;
13646 }
13647
13648 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
13649 has an integer value. We also allow +Inf, -Inf and NaN to be
13650 considered integer values. Return false for signaling NaN.
13651
13652 DEPTH is the current nesting depth of the query. */
13653
13654 bool
13655 integer_valued_real_single_p (tree t, int depth)
13656 {
13657 switch (TREE_CODE (t))
13658 {
13659 case REAL_CST:
13660 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
13661
13662 case COND_EXPR:
13663 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
13664
13665 case SSA_NAME:
13666 /* Limit the depth of recursion to avoid quadratic behavior.
13667 This is expected to catch almost all occurrences in practice.
13668 If this code misses important cases that unbounded recursion
13669 would not, passes that need this information could be revised
13670 to provide it through dataflow propagation. */
13671 return (!name_registered_for_update_p (t)
13672 && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
13673 && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t),
13674 depth));
13675
13676 default:
13677 break;
13678 }
13679 return false;
13680 }
13681
13682 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
13683 has an integer value. We also allow +Inf, -Inf and NaN to be
13684 considered integer values. Return false for signaling NaN.
13685
13686 DEPTH is the current nesting depth of the query. */
13687
13688 static bool
13689 integer_valued_real_invalid_p (tree t, int depth)
13690 {
13691 switch (TREE_CODE (t))
13692 {
13693 case COMPOUND_EXPR:
13694 case MODIFY_EXPR:
13695 case BIND_EXPR:
13696 return RECURSE (TREE_OPERAND (t, 1));
13697
13698 case SAVE_EXPR:
13699 return RECURSE (TREE_OPERAND (t, 0));
13700
13701 default:
13702 break;
13703 }
13704 return false;
13705 }
13706
13707 #undef RECURSE
13708 #undef integer_valued_real_p
13709
13710 /* Return true if the floating point expression T has an integer value.
13711 We also allow +Inf, -Inf and NaN to be considered integer values.
13712 Return false for signaling NaN.
13713
13714 DEPTH is the current nesting depth of the query. */
13715
13716 bool
13717 integer_valued_real_p (tree t, int depth)
13718 {
13719 if (t == error_mark_node)
13720 return false;
13721
13722 tree_code code = TREE_CODE (t);
13723 switch (TREE_CODE_CLASS (code))
13724 {
13725 case tcc_binary:
13726 case tcc_comparison:
13727 return integer_valued_real_binary_p (code, TREE_OPERAND (t, 0),
13728 TREE_OPERAND (t, 1), depth);
13729
13730 case tcc_unary:
13731 return integer_valued_real_unary_p (code, TREE_OPERAND (t, 0), depth);
13732
13733 case tcc_constant:
13734 case tcc_declaration:
13735 case tcc_reference:
13736 return integer_valued_real_single_p (t, depth);
13737
13738 default:
13739 break;
13740 }
13741
13742 switch (code)
13743 {
13744 case COND_EXPR:
13745 case SSA_NAME:
13746 return integer_valued_real_single_p (t, depth);
13747
13748 case CALL_EXPR:
13749 {
13750 tree arg0 = (call_expr_nargs (t) > 0
13751 ? CALL_EXPR_ARG (t, 0)
13752 : NULL_TREE);
13753 tree arg1 = (call_expr_nargs (t) > 1
13754 ? CALL_EXPR_ARG (t, 1)
13755 : NULL_TREE);
13756 return integer_valued_real_call_p (get_call_combined_fn (t),
13757 arg0, arg1, depth);
13758 }
13759
13760 default:
13761 return integer_valued_real_invalid_p (t, depth);
13762 }
13763 }
13764
13765 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
13766 attempt to fold the expression to a constant without modifying TYPE,
13767 OP0 or OP1.
13768
13769 If the expression could be simplified to a constant, then return
13770 the constant. If the expression would not be simplified to a
13771 constant, then return NULL_TREE. */
13772
13773 tree
13774 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
13775 {
13776 tree tem = fold_binary (code, type, op0, op1);
13777 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13778 }
13779
13780 /* Given the components of a unary expression CODE, TYPE and OP0,
13781 attempt to fold the expression to a constant without modifying
13782 TYPE or OP0.
13783
13784 If the expression could be simplified to a constant, then return
13785 the constant. If the expression would not be simplified to a
13786 constant, then return NULL_TREE. */
13787
13788 tree
13789 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
13790 {
13791 tree tem = fold_unary (code, type, op0);
13792 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13793 }
13794
13795 /* If EXP represents referencing an element in a constant string
13796 (either via pointer arithmetic or array indexing), return the
13797 tree representing the value accessed, otherwise return NULL. */
13798
13799 tree
13800 fold_read_from_constant_string (tree exp)
13801 {
13802 if ((TREE_CODE (exp) == INDIRECT_REF
13803 || TREE_CODE (exp) == ARRAY_REF)
13804 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
13805 {
13806 tree exp1 = TREE_OPERAND (exp, 0);
13807 tree index;
13808 tree string;
13809 location_t loc = EXPR_LOCATION (exp);
13810
13811 if (TREE_CODE (exp) == INDIRECT_REF)
13812 string = string_constant (exp1, &index);
13813 else
13814 {
13815 tree low_bound = array_ref_low_bound (exp);
13816 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
13817
13818 /* Optimize the special-case of a zero lower bound.
13819
13820 We convert the low_bound to sizetype to avoid some problems
13821 with constant folding. (E.g. suppose the lower bound is 1,
13822 and its mode is QI. Without the conversion,l (ARRAY
13823 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
13824 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
13825 if (! integer_zerop (low_bound))
13826 index = size_diffop_loc (loc, index,
13827 fold_convert_loc (loc, sizetype, low_bound));
13828
13829 string = exp1;
13830 }
13831
13832 if (string
13833 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
13834 && TREE_CODE (string) == STRING_CST
13835 && TREE_CODE (index) == INTEGER_CST
13836 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
13837 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
13838 == MODE_INT)
13839 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
13840 return build_int_cst_type (TREE_TYPE (exp),
13841 (TREE_STRING_POINTER (string)
13842 [TREE_INT_CST_LOW (index)]));
13843 }
13844 return NULL;
13845 }
13846
13847 /* Return the tree for neg (ARG0) when ARG0 is known to be either
13848 an integer constant, real, or fixed-point constant.
13849
13850 TYPE is the type of the result. */
13851
13852 static tree
13853 fold_negate_const (tree arg0, tree type)
13854 {
13855 tree t = NULL_TREE;
13856
13857 switch (TREE_CODE (arg0))
13858 {
13859 case INTEGER_CST:
13860 {
13861 bool overflow;
13862 wide_int val = wi::neg (arg0, &overflow);
13863 t = force_fit_type (type, val, 1,
13864 (overflow | TREE_OVERFLOW (arg0))
13865 && !TYPE_UNSIGNED (type));
13866 break;
13867 }
13868
13869 case REAL_CST:
13870 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13871 break;
13872
13873 case FIXED_CST:
13874 {
13875 FIXED_VALUE_TYPE f;
13876 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
13877 &(TREE_FIXED_CST (arg0)), NULL,
13878 TYPE_SATURATING (type));
13879 t = build_fixed (type, f);
13880 /* Propagate overflow flags. */
13881 if (overflow_p | TREE_OVERFLOW (arg0))
13882 TREE_OVERFLOW (t) = 1;
13883 break;
13884 }
13885
13886 default:
13887 gcc_unreachable ();
13888 }
13889
13890 return t;
13891 }
13892
13893 /* Return the tree for abs (ARG0) when ARG0 is known to be either
13894 an integer constant or real constant.
13895
13896 TYPE is the type of the result. */
13897
13898 tree
13899 fold_abs_const (tree arg0, tree type)
13900 {
13901 tree t = NULL_TREE;
13902
13903 switch (TREE_CODE (arg0))
13904 {
13905 case INTEGER_CST:
13906 {
13907 /* If the value is unsigned or non-negative, then the absolute value
13908 is the same as the ordinary value. */
13909 if (!wi::neg_p (arg0, TYPE_SIGN (type)))
13910 t = arg0;
13911
13912 /* If the value is negative, then the absolute value is
13913 its negation. */
13914 else
13915 {
13916 bool overflow;
13917 wide_int val = wi::neg (arg0, &overflow);
13918 t = force_fit_type (type, val, -1,
13919 overflow | TREE_OVERFLOW (arg0));
13920 }
13921 }
13922 break;
13923
13924 case REAL_CST:
13925 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
13926 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13927 else
13928 t = arg0;
13929 break;
13930
13931 default:
13932 gcc_unreachable ();
13933 }
13934
13935 return t;
13936 }
13937
13938 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13939 constant. TYPE is the type of the result. */
13940
13941 static tree
13942 fold_not_const (const_tree arg0, tree type)
13943 {
13944 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
13945
13946 return force_fit_type (type, wi::bit_not (arg0), 0, TREE_OVERFLOW (arg0));
13947 }
13948
13949 /* Given CODE, a relational operator, the target type, TYPE and two
13950 constant operands OP0 and OP1, return the result of the
13951 relational operation. If the result is not a compile time
13952 constant, then return NULL_TREE. */
13953
13954 static tree
13955 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
13956 {
13957 int result, invert;
13958
13959 /* From here on, the only cases we handle are when the result is
13960 known to be a constant. */
13961
13962 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
13963 {
13964 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
13965 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
13966
13967 /* Handle the cases where either operand is a NaN. */
13968 if (real_isnan (c0) || real_isnan (c1))
13969 {
13970 switch (code)
13971 {
13972 case EQ_EXPR:
13973 case ORDERED_EXPR:
13974 result = 0;
13975 break;
13976
13977 case NE_EXPR:
13978 case UNORDERED_EXPR:
13979 case UNLT_EXPR:
13980 case UNLE_EXPR:
13981 case UNGT_EXPR:
13982 case UNGE_EXPR:
13983 case UNEQ_EXPR:
13984 result = 1;
13985 break;
13986
13987 case LT_EXPR:
13988 case LE_EXPR:
13989 case GT_EXPR:
13990 case GE_EXPR:
13991 case LTGT_EXPR:
13992 if (flag_trapping_math)
13993 return NULL_TREE;
13994 result = 0;
13995 break;
13996
13997 default:
13998 gcc_unreachable ();
13999 }
14000
14001 return constant_boolean_node (result, type);
14002 }
14003
14004 return constant_boolean_node (real_compare (code, c0, c1), type);
14005 }
14006
14007 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
14008 {
14009 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
14010 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
14011 return constant_boolean_node (fixed_compare (code, c0, c1), type);
14012 }
14013
14014 /* Handle equality/inequality of complex constants. */
14015 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
14016 {
14017 tree rcond = fold_relational_const (code, type,
14018 TREE_REALPART (op0),
14019 TREE_REALPART (op1));
14020 tree icond = fold_relational_const (code, type,
14021 TREE_IMAGPART (op0),
14022 TREE_IMAGPART (op1));
14023 if (code == EQ_EXPR)
14024 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
14025 else if (code == NE_EXPR)
14026 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
14027 else
14028 return NULL_TREE;
14029 }
14030
14031 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
14032 {
14033 if (!VECTOR_TYPE_P (type))
14034 {
14035 /* Have vector comparison with scalar boolean result. */
14036 bool result = true;
14037 gcc_assert ((code == EQ_EXPR || code == NE_EXPR)
14038 && VECTOR_CST_NELTS (op0) == VECTOR_CST_NELTS (op1));
14039 for (unsigned i = 0; i < VECTOR_CST_NELTS (op0); i++)
14040 {
14041 tree elem0 = VECTOR_CST_ELT (op0, i);
14042 tree elem1 = VECTOR_CST_ELT (op1, i);
14043 tree tmp = fold_relational_const (code, type, elem0, elem1);
14044 result &= integer_onep (tmp);
14045 }
14046 if (code == NE_EXPR)
14047 result = !result;
14048 return constant_boolean_node (result, type);
14049 }
14050 unsigned count = VECTOR_CST_NELTS (op0);
14051 tree *elts = XALLOCAVEC (tree, count);
14052 gcc_assert (VECTOR_CST_NELTS (op1) == count
14053 && TYPE_VECTOR_SUBPARTS (type) == count);
14054
14055 for (unsigned i = 0; i < count; i++)
14056 {
14057 tree elem_type = TREE_TYPE (type);
14058 tree elem0 = VECTOR_CST_ELT (op0, i);
14059 tree elem1 = VECTOR_CST_ELT (op1, i);
14060
14061 tree tem = fold_relational_const (code, elem_type,
14062 elem0, elem1);
14063
14064 if (tem == NULL_TREE)
14065 return NULL_TREE;
14066
14067 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
14068 }
14069
14070 return build_vector (type, elts);
14071 }
14072
14073 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
14074
14075 To compute GT, swap the arguments and do LT.
14076 To compute GE, do LT and invert the result.
14077 To compute LE, swap the arguments, do LT and invert the result.
14078 To compute NE, do EQ and invert the result.
14079
14080 Therefore, the code below must handle only EQ and LT. */
14081
14082 if (code == LE_EXPR || code == GT_EXPR)
14083 {
14084 std::swap (op0, op1);
14085 code = swap_tree_comparison (code);
14086 }
14087
14088 /* Note that it is safe to invert for real values here because we
14089 have already handled the one case that it matters. */
14090
14091 invert = 0;
14092 if (code == NE_EXPR || code == GE_EXPR)
14093 {
14094 invert = 1;
14095 code = invert_tree_comparison (code, false);
14096 }
14097
14098 /* Compute a result for LT or EQ if args permit;
14099 Otherwise return T. */
14100 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
14101 {
14102 if (code == EQ_EXPR)
14103 result = tree_int_cst_equal (op0, op1);
14104 else
14105 result = tree_int_cst_lt (op0, op1);
14106 }
14107 else
14108 return NULL_TREE;
14109
14110 if (invert)
14111 result ^= 1;
14112 return constant_boolean_node (result, type);
14113 }
14114
14115 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
14116 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
14117 itself. */
14118
14119 tree
14120 fold_build_cleanup_point_expr (tree type, tree expr)
14121 {
14122 /* If the expression does not have side effects then we don't have to wrap
14123 it with a cleanup point expression. */
14124 if (!TREE_SIDE_EFFECTS (expr))
14125 return expr;
14126
14127 /* If the expression is a return, check to see if the expression inside the
14128 return has no side effects or the right hand side of the modify expression
14129 inside the return. If either don't have side effects set we don't need to
14130 wrap the expression in a cleanup point expression. Note we don't check the
14131 left hand side of the modify because it should always be a return decl. */
14132 if (TREE_CODE (expr) == RETURN_EXPR)
14133 {
14134 tree op = TREE_OPERAND (expr, 0);
14135 if (!op || !TREE_SIDE_EFFECTS (op))
14136 return expr;
14137 op = TREE_OPERAND (op, 1);
14138 if (!TREE_SIDE_EFFECTS (op))
14139 return expr;
14140 }
14141
14142 return build1 (CLEANUP_POINT_EXPR, type, expr);
14143 }
14144
14145 /* Given a pointer value OP0 and a type TYPE, return a simplified version
14146 of an indirection through OP0, or NULL_TREE if no simplification is
14147 possible. */
14148
14149 tree
14150 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
14151 {
14152 tree sub = op0;
14153 tree subtype;
14154
14155 STRIP_NOPS (sub);
14156 subtype = TREE_TYPE (sub);
14157 if (!POINTER_TYPE_P (subtype))
14158 return NULL_TREE;
14159
14160 if (TREE_CODE (sub) == ADDR_EXPR)
14161 {
14162 tree op = TREE_OPERAND (sub, 0);
14163 tree optype = TREE_TYPE (op);
14164 /* *&CONST_DECL -> to the value of the const decl. */
14165 if (TREE_CODE (op) == CONST_DECL)
14166 return DECL_INITIAL (op);
14167 /* *&p => p; make sure to handle *&"str"[cst] here. */
14168 if (type == optype)
14169 {
14170 tree fop = fold_read_from_constant_string (op);
14171 if (fop)
14172 return fop;
14173 else
14174 return op;
14175 }
14176 /* *(foo *)&fooarray => fooarray[0] */
14177 else if (TREE_CODE (optype) == ARRAY_TYPE
14178 && type == TREE_TYPE (optype)
14179 && (!in_gimple_form
14180 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14181 {
14182 tree type_domain = TYPE_DOMAIN (optype);
14183 tree min_val = size_zero_node;
14184 if (type_domain && TYPE_MIN_VALUE (type_domain))
14185 min_val = TYPE_MIN_VALUE (type_domain);
14186 if (in_gimple_form
14187 && TREE_CODE (min_val) != INTEGER_CST)
14188 return NULL_TREE;
14189 return build4_loc (loc, ARRAY_REF, type, op, min_val,
14190 NULL_TREE, NULL_TREE);
14191 }
14192 /* *(foo *)&complexfoo => __real__ complexfoo */
14193 else if (TREE_CODE (optype) == COMPLEX_TYPE
14194 && type == TREE_TYPE (optype))
14195 return fold_build1_loc (loc, REALPART_EXPR, type, op);
14196 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
14197 else if (TREE_CODE (optype) == VECTOR_TYPE
14198 && type == TREE_TYPE (optype))
14199 {
14200 tree part_width = TYPE_SIZE (type);
14201 tree index = bitsize_int (0);
14202 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
14203 }
14204 }
14205
14206 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
14207 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
14208 {
14209 tree op00 = TREE_OPERAND (sub, 0);
14210 tree op01 = TREE_OPERAND (sub, 1);
14211
14212 STRIP_NOPS (op00);
14213 if (TREE_CODE (op00) == ADDR_EXPR)
14214 {
14215 tree op00type;
14216 op00 = TREE_OPERAND (op00, 0);
14217 op00type = TREE_TYPE (op00);
14218
14219 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
14220 if (TREE_CODE (op00type) == VECTOR_TYPE
14221 && type == TREE_TYPE (op00type))
14222 {
14223 tree part_width = TYPE_SIZE (type);
14224 unsigned HOST_WIDE_INT max_offset
14225 = (tree_to_uhwi (part_width) / BITS_PER_UNIT
14226 * TYPE_VECTOR_SUBPARTS (op00type));
14227 if (tree_int_cst_sign_bit (op01) == 0
14228 && compare_tree_int (op01, max_offset) == -1)
14229 {
14230 unsigned HOST_WIDE_INT offset = tree_to_uhwi (op01);
14231 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
14232 tree index = bitsize_int (indexi);
14233 return fold_build3_loc (loc,
14234 BIT_FIELD_REF, type, op00,
14235 part_width, index);
14236 }
14237 }
14238 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
14239 else if (TREE_CODE (op00type) == COMPLEX_TYPE
14240 && type == TREE_TYPE (op00type))
14241 {
14242 tree size = TYPE_SIZE_UNIT (type);
14243 if (tree_int_cst_equal (size, op01))
14244 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
14245 }
14246 /* ((foo *)&fooarray)[1] => fooarray[1] */
14247 else if (TREE_CODE (op00type) == ARRAY_TYPE
14248 && type == TREE_TYPE (op00type))
14249 {
14250 tree type_domain = TYPE_DOMAIN (op00type);
14251 tree min_val = size_zero_node;
14252 if (type_domain && TYPE_MIN_VALUE (type_domain))
14253 min_val = TYPE_MIN_VALUE (type_domain);
14254 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
14255 TYPE_SIZE_UNIT (type));
14256 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
14257 return build4_loc (loc, ARRAY_REF, type, op00, op01,
14258 NULL_TREE, NULL_TREE);
14259 }
14260 }
14261 }
14262
14263 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
14264 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
14265 && type == TREE_TYPE (TREE_TYPE (subtype))
14266 && (!in_gimple_form
14267 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14268 {
14269 tree type_domain;
14270 tree min_val = size_zero_node;
14271 sub = build_fold_indirect_ref_loc (loc, sub);
14272 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
14273 if (type_domain && TYPE_MIN_VALUE (type_domain))
14274 min_val = TYPE_MIN_VALUE (type_domain);
14275 if (in_gimple_form
14276 && TREE_CODE (min_val) != INTEGER_CST)
14277 return NULL_TREE;
14278 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
14279 NULL_TREE);
14280 }
14281
14282 return NULL_TREE;
14283 }
14284
14285 /* Builds an expression for an indirection through T, simplifying some
14286 cases. */
14287
14288 tree
14289 build_fold_indirect_ref_loc (location_t loc, tree t)
14290 {
14291 tree type = TREE_TYPE (TREE_TYPE (t));
14292 tree sub = fold_indirect_ref_1 (loc, type, t);
14293
14294 if (sub)
14295 return sub;
14296
14297 return build1_loc (loc, INDIRECT_REF, type, t);
14298 }
14299
14300 /* Given an INDIRECT_REF T, return either T or a simplified version. */
14301
14302 tree
14303 fold_indirect_ref_loc (location_t loc, tree t)
14304 {
14305 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
14306
14307 if (sub)
14308 return sub;
14309 else
14310 return t;
14311 }
14312
14313 /* Strip non-trapping, non-side-effecting tree nodes from an expression
14314 whose result is ignored. The type of the returned tree need not be
14315 the same as the original expression. */
14316
14317 tree
14318 fold_ignored_result (tree t)
14319 {
14320 if (!TREE_SIDE_EFFECTS (t))
14321 return integer_zero_node;
14322
14323 for (;;)
14324 switch (TREE_CODE_CLASS (TREE_CODE (t)))
14325 {
14326 case tcc_unary:
14327 t = TREE_OPERAND (t, 0);
14328 break;
14329
14330 case tcc_binary:
14331 case tcc_comparison:
14332 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14333 t = TREE_OPERAND (t, 0);
14334 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
14335 t = TREE_OPERAND (t, 1);
14336 else
14337 return t;
14338 break;
14339
14340 case tcc_expression:
14341 switch (TREE_CODE (t))
14342 {
14343 case COMPOUND_EXPR:
14344 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14345 return t;
14346 t = TREE_OPERAND (t, 0);
14347 break;
14348
14349 case COND_EXPR:
14350 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
14351 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
14352 return t;
14353 t = TREE_OPERAND (t, 0);
14354 break;
14355
14356 default:
14357 return t;
14358 }
14359 break;
14360
14361 default:
14362 return t;
14363 }
14364 }
14365
14366 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
14367
14368 tree
14369 round_up_loc (location_t loc, tree value, unsigned int divisor)
14370 {
14371 tree div = NULL_TREE;
14372
14373 if (divisor == 1)
14374 return value;
14375
14376 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14377 have to do anything. Only do this when we are not given a const,
14378 because in that case, this check is more expensive than just
14379 doing it. */
14380 if (TREE_CODE (value) != INTEGER_CST)
14381 {
14382 div = build_int_cst (TREE_TYPE (value), divisor);
14383
14384 if (multiple_of_p (TREE_TYPE (value), value, div))
14385 return value;
14386 }
14387
14388 /* If divisor is a power of two, simplify this to bit manipulation. */
14389 if (divisor == (divisor & -divisor))
14390 {
14391 if (TREE_CODE (value) == INTEGER_CST)
14392 {
14393 wide_int val = value;
14394 bool overflow_p;
14395
14396 if ((val & (divisor - 1)) == 0)
14397 return value;
14398
14399 overflow_p = TREE_OVERFLOW (value);
14400 val += divisor - 1;
14401 val &= - (int) divisor;
14402 if (val == 0)
14403 overflow_p = true;
14404
14405 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
14406 }
14407 else
14408 {
14409 tree t;
14410
14411 t = build_int_cst (TREE_TYPE (value), divisor - 1);
14412 value = size_binop_loc (loc, PLUS_EXPR, value, t);
14413 t = build_int_cst (TREE_TYPE (value), - (int) divisor);
14414 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14415 }
14416 }
14417 else
14418 {
14419 if (!div)
14420 div = build_int_cst (TREE_TYPE (value), divisor);
14421 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
14422 value = size_binop_loc (loc, MULT_EXPR, value, div);
14423 }
14424
14425 return value;
14426 }
14427
14428 /* Likewise, but round down. */
14429
14430 tree
14431 round_down_loc (location_t loc, tree value, int divisor)
14432 {
14433 tree div = NULL_TREE;
14434
14435 gcc_assert (divisor > 0);
14436 if (divisor == 1)
14437 return value;
14438
14439 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14440 have to do anything. Only do this when we are not given a const,
14441 because in that case, this check is more expensive than just
14442 doing it. */
14443 if (TREE_CODE (value) != INTEGER_CST)
14444 {
14445 div = build_int_cst (TREE_TYPE (value), divisor);
14446
14447 if (multiple_of_p (TREE_TYPE (value), value, div))
14448 return value;
14449 }
14450
14451 /* If divisor is a power of two, simplify this to bit manipulation. */
14452 if (divisor == (divisor & -divisor))
14453 {
14454 tree t;
14455
14456 t = build_int_cst (TREE_TYPE (value), -divisor);
14457 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14458 }
14459 else
14460 {
14461 if (!div)
14462 div = build_int_cst (TREE_TYPE (value), divisor);
14463 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
14464 value = size_binop_loc (loc, MULT_EXPR, value, div);
14465 }
14466
14467 return value;
14468 }
14469
14470 /* Returns the pointer to the base of the object addressed by EXP and
14471 extracts the information about the offset of the access, storing it
14472 to PBITPOS and POFFSET. */
14473
14474 static tree
14475 split_address_to_core_and_offset (tree exp,
14476 HOST_WIDE_INT *pbitpos, tree *poffset)
14477 {
14478 tree core;
14479 machine_mode mode;
14480 int unsignedp, reversep, volatilep;
14481 HOST_WIDE_INT bitsize;
14482 location_t loc = EXPR_LOCATION (exp);
14483
14484 if (TREE_CODE (exp) == ADDR_EXPR)
14485 {
14486 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
14487 poffset, &mode, &unsignedp, &reversep,
14488 &volatilep, false);
14489 core = build_fold_addr_expr_loc (loc, core);
14490 }
14491 else
14492 {
14493 core = exp;
14494 *pbitpos = 0;
14495 *poffset = NULL_TREE;
14496 }
14497
14498 return core;
14499 }
14500
14501 /* Returns true if addresses of E1 and E2 differ by a constant, false
14502 otherwise. If they do, E1 - E2 is stored in *DIFF. */
14503
14504 bool
14505 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
14506 {
14507 tree core1, core2;
14508 HOST_WIDE_INT bitpos1, bitpos2;
14509 tree toffset1, toffset2, tdiff, type;
14510
14511 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
14512 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
14513
14514 if (bitpos1 % BITS_PER_UNIT != 0
14515 || bitpos2 % BITS_PER_UNIT != 0
14516 || !operand_equal_p (core1, core2, 0))
14517 return false;
14518
14519 if (toffset1 && toffset2)
14520 {
14521 type = TREE_TYPE (toffset1);
14522 if (type != TREE_TYPE (toffset2))
14523 toffset2 = fold_convert (type, toffset2);
14524
14525 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
14526 if (!cst_and_fits_in_hwi (tdiff))
14527 return false;
14528
14529 *diff = int_cst_value (tdiff);
14530 }
14531 else if (toffset1 || toffset2)
14532 {
14533 /* If only one of the offsets is non-constant, the difference cannot
14534 be a constant. */
14535 return false;
14536 }
14537 else
14538 *diff = 0;
14539
14540 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
14541 return true;
14542 }
14543
14544 /* Return OFF converted to a pointer offset type suitable as offset for
14545 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
14546 tree
14547 convert_to_ptrofftype_loc (location_t loc, tree off)
14548 {
14549 return fold_convert_loc (loc, sizetype, off);
14550 }
14551
14552 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14553 tree
14554 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
14555 {
14556 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14557 ptr, convert_to_ptrofftype_loc (loc, off));
14558 }
14559
14560 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14561 tree
14562 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
14563 {
14564 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14565 ptr, size_int (off));
14566 }
14567
14568 /* Return a char pointer for a C string if it is a string constant
14569 or sum of string constant and integer constant. */
14570
14571 const char *
14572 c_getstr (tree src)
14573 {
14574 tree offset_node;
14575
14576 src = string_constant (src, &offset_node);
14577 if (src == 0)
14578 return 0;
14579
14580 if (offset_node == 0)
14581 return TREE_STRING_POINTER (src);
14582 else if (!tree_fits_uhwi_p (offset_node)
14583 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
14584 return 0;
14585
14586 return TREE_STRING_POINTER (src) + tree_to_uhwi (offset_node);
14587 }