re PR c++/69902 (Bogus -Wnonnull-compare for: dynamic_cast<T*>(&ref) == nullptr)
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2016 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
27
28 /* The entry points in this file are fold, size_int_wide and size_binop.
29
30 fold takes a tree as argument and returns a simplified tree.
31
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
35
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
38
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
42
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "backend.h"
47 #include "target.h"
48 #include "rtl.h"
49 #include "tree.h"
50 #include "gimple.h"
51 #include "predict.h"
52 #include "tm_p.h"
53 #include "tree-ssa-operands.h"
54 #include "optabs-query.h"
55 #include "cgraph.h"
56 #include "diagnostic-core.h"
57 #include "flags.h"
58 #include "alias.h"
59 #include "fold-const.h"
60 #include "fold-const-call.h"
61 #include "stor-layout.h"
62 #include "calls.h"
63 #include "tree-iterator.h"
64 #include "expr.h"
65 #include "intl.h"
66 #include "langhooks.h"
67 #include "tree-eh.h"
68 #include "gimplify.h"
69 #include "tree-dfa.h"
70 #include "builtins.h"
71 #include "generic-match.h"
72 #include "gimple-fold.h"
73 #include "params.h"
74 #include "tree-into-ssa.h"
75 #include "md5.h"
76 #include "case-cfn-macros.h"
77 #include "stringpool.h"
78 #include "tree-ssanames.h"
79
80 #ifndef LOAD_EXTEND_OP
81 #define LOAD_EXTEND_OP(M) UNKNOWN
82 #endif
83
84 /* Nonzero if we are folding constants inside an initializer; zero
85 otherwise. */
86 int folding_initializer = 0;
87
88 /* The following constants represent a bit based encoding of GCC's
89 comparison operators. This encoding simplifies transformations
90 on relational comparison operators, such as AND and OR. */
91 enum comparison_code {
92 COMPCODE_FALSE = 0,
93 COMPCODE_LT = 1,
94 COMPCODE_EQ = 2,
95 COMPCODE_LE = 3,
96 COMPCODE_GT = 4,
97 COMPCODE_LTGT = 5,
98 COMPCODE_GE = 6,
99 COMPCODE_ORD = 7,
100 COMPCODE_UNORD = 8,
101 COMPCODE_UNLT = 9,
102 COMPCODE_UNEQ = 10,
103 COMPCODE_UNLE = 11,
104 COMPCODE_UNGT = 12,
105 COMPCODE_NE = 13,
106 COMPCODE_UNGE = 14,
107 COMPCODE_TRUE = 15
108 };
109
110 static bool negate_expr_p (tree);
111 static tree negate_expr (tree);
112 static tree split_tree (location_t, tree, tree, enum tree_code,
113 tree *, tree *, tree *, int);
114 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
115 static enum comparison_code comparison_to_compcode (enum tree_code);
116 static enum tree_code compcode_to_comparison (enum comparison_code);
117 static int operand_equal_for_comparison_p (tree, tree, tree);
118 static int twoval_comparison_p (tree, tree *, tree *, int *);
119 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
120 static tree make_bit_field_ref (location_t, tree, tree,
121 HOST_WIDE_INT, HOST_WIDE_INT, int, int);
122 static tree optimize_bit_field_compare (location_t, enum tree_code,
123 tree, tree, tree);
124 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
125 HOST_WIDE_INT *,
126 machine_mode *, int *, int *, int *,
127 tree *, tree *);
128 static int simple_operand_p (const_tree);
129 static bool simple_operand_p_2 (tree);
130 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
131 static tree range_predecessor (tree);
132 static tree range_successor (tree);
133 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
134 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
135 static tree unextend (tree, int, int, tree);
136 static tree optimize_minmax_comparison (location_t, enum tree_code,
137 tree, tree, tree);
138 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
139 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
140 static tree fold_binary_op_with_conditional_arg (location_t,
141 enum tree_code, tree,
142 tree, tree,
143 tree, tree, int);
144 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
145 static bool reorder_operands_p (const_tree, const_tree);
146 static tree fold_negate_const (tree, tree);
147 static tree fold_not_const (const_tree, tree);
148 static tree fold_relational_const (enum tree_code, tree, tree, tree);
149 static tree fold_convert_const (enum tree_code, tree, tree);
150 static tree fold_view_convert_expr (tree, tree);
151 static bool vec_cst_ctor_to_array (tree, tree *);
152
153
154 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
155 Otherwise, return LOC. */
156
157 static location_t
158 expr_location_or (tree t, location_t loc)
159 {
160 location_t tloc = EXPR_LOCATION (t);
161 return tloc == UNKNOWN_LOCATION ? loc : tloc;
162 }
163
164 /* Similar to protected_set_expr_location, but never modify x in place,
165 if location can and needs to be set, unshare it. */
166
167 static inline tree
168 protected_set_expr_location_unshare (tree x, location_t loc)
169 {
170 if (CAN_HAVE_LOCATION_P (x)
171 && EXPR_LOCATION (x) != loc
172 && !(TREE_CODE (x) == SAVE_EXPR
173 || TREE_CODE (x) == TARGET_EXPR
174 || TREE_CODE (x) == BIND_EXPR))
175 {
176 x = copy_node (x);
177 SET_EXPR_LOCATION (x, loc);
178 }
179 return x;
180 }
181 \f
182 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
183 division and returns the quotient. Otherwise returns
184 NULL_TREE. */
185
186 tree
187 div_if_zero_remainder (const_tree arg1, const_tree arg2)
188 {
189 widest_int quo;
190
191 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
192 SIGNED, &quo))
193 return wide_int_to_tree (TREE_TYPE (arg1), quo);
194
195 return NULL_TREE;
196 }
197 \f
198 /* This is nonzero if we should defer warnings about undefined
199 overflow. This facility exists because these warnings are a
200 special case. The code to estimate loop iterations does not want
201 to issue any warnings, since it works with expressions which do not
202 occur in user code. Various bits of cleanup code call fold(), but
203 only use the result if it has certain characteristics (e.g., is a
204 constant); that code only wants to issue a warning if the result is
205 used. */
206
207 static int fold_deferring_overflow_warnings;
208
209 /* If a warning about undefined overflow is deferred, this is the
210 warning. Note that this may cause us to turn two warnings into
211 one, but that is fine since it is sufficient to only give one
212 warning per expression. */
213
214 static const char* fold_deferred_overflow_warning;
215
216 /* If a warning about undefined overflow is deferred, this is the
217 level at which the warning should be emitted. */
218
219 static enum warn_strict_overflow_code fold_deferred_overflow_code;
220
221 /* Start deferring overflow warnings. We could use a stack here to
222 permit nested calls, but at present it is not necessary. */
223
224 void
225 fold_defer_overflow_warnings (void)
226 {
227 ++fold_deferring_overflow_warnings;
228 }
229
230 /* Stop deferring overflow warnings. If there is a pending warning,
231 and ISSUE is true, then issue the warning if appropriate. STMT is
232 the statement with which the warning should be associated (used for
233 location information); STMT may be NULL. CODE is the level of the
234 warning--a warn_strict_overflow_code value. This function will use
235 the smaller of CODE and the deferred code when deciding whether to
236 issue the warning. CODE may be zero to mean to always use the
237 deferred code. */
238
239 void
240 fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
241 {
242 const char *warnmsg;
243 location_t locus;
244
245 gcc_assert (fold_deferring_overflow_warnings > 0);
246 --fold_deferring_overflow_warnings;
247 if (fold_deferring_overflow_warnings > 0)
248 {
249 if (fold_deferred_overflow_warning != NULL
250 && code != 0
251 && code < (int) fold_deferred_overflow_code)
252 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
253 return;
254 }
255
256 warnmsg = fold_deferred_overflow_warning;
257 fold_deferred_overflow_warning = NULL;
258
259 if (!issue || warnmsg == NULL)
260 return;
261
262 if (gimple_no_warning_p (stmt))
263 return;
264
265 /* Use the smallest code level when deciding to issue the
266 warning. */
267 if (code == 0 || code > (int) fold_deferred_overflow_code)
268 code = fold_deferred_overflow_code;
269
270 if (!issue_strict_overflow_warning (code))
271 return;
272
273 if (stmt == NULL)
274 locus = input_location;
275 else
276 locus = gimple_location (stmt);
277 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
278 }
279
280 /* Stop deferring overflow warnings, ignoring any deferred
281 warnings. */
282
283 void
284 fold_undefer_and_ignore_overflow_warnings (void)
285 {
286 fold_undefer_overflow_warnings (false, NULL, 0);
287 }
288
289 /* Whether we are deferring overflow warnings. */
290
291 bool
292 fold_deferring_overflow_warnings_p (void)
293 {
294 return fold_deferring_overflow_warnings > 0;
295 }
296
297 /* This is called when we fold something based on the fact that signed
298 overflow is undefined. */
299
300 static void
301 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
302 {
303 if (fold_deferring_overflow_warnings > 0)
304 {
305 if (fold_deferred_overflow_warning == NULL
306 || wc < fold_deferred_overflow_code)
307 {
308 fold_deferred_overflow_warning = gmsgid;
309 fold_deferred_overflow_code = wc;
310 }
311 }
312 else if (issue_strict_overflow_warning (wc))
313 warning (OPT_Wstrict_overflow, gmsgid);
314 }
315 \f
316 /* Return true if the built-in mathematical function specified by CODE
317 is odd, i.e. -f(x) == f(-x). */
318
319 bool
320 negate_mathfn_p (combined_fn fn)
321 {
322 switch (fn)
323 {
324 CASE_CFN_ASIN:
325 CASE_CFN_ASINH:
326 CASE_CFN_ATAN:
327 CASE_CFN_ATANH:
328 CASE_CFN_CASIN:
329 CASE_CFN_CASINH:
330 CASE_CFN_CATAN:
331 CASE_CFN_CATANH:
332 CASE_CFN_CBRT:
333 CASE_CFN_CPROJ:
334 CASE_CFN_CSIN:
335 CASE_CFN_CSINH:
336 CASE_CFN_CTAN:
337 CASE_CFN_CTANH:
338 CASE_CFN_ERF:
339 CASE_CFN_LLROUND:
340 CASE_CFN_LROUND:
341 CASE_CFN_ROUND:
342 CASE_CFN_SIN:
343 CASE_CFN_SINH:
344 CASE_CFN_TAN:
345 CASE_CFN_TANH:
346 CASE_CFN_TRUNC:
347 return true;
348
349 CASE_CFN_LLRINT:
350 CASE_CFN_LRINT:
351 CASE_CFN_NEARBYINT:
352 CASE_CFN_RINT:
353 return !flag_rounding_math;
354
355 default:
356 break;
357 }
358 return false;
359 }
360
361 /* Check whether we may negate an integer constant T without causing
362 overflow. */
363
364 bool
365 may_negate_without_overflow_p (const_tree t)
366 {
367 tree type;
368
369 gcc_assert (TREE_CODE (t) == INTEGER_CST);
370
371 type = TREE_TYPE (t);
372 if (TYPE_UNSIGNED (type))
373 return false;
374
375 return !wi::only_sign_bit_p (t);
376 }
377
378 /* Determine whether an expression T can be cheaply negated using
379 the function negate_expr without introducing undefined overflow. */
380
381 static bool
382 negate_expr_p (tree t)
383 {
384 tree type;
385
386 if (t == 0)
387 return false;
388
389 type = TREE_TYPE (t);
390
391 STRIP_SIGN_NOPS (t);
392 switch (TREE_CODE (t))
393 {
394 case INTEGER_CST:
395 if (INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_WRAPS (type))
396 return true;
397
398 /* Check that -CST will not overflow type. */
399 return may_negate_without_overflow_p (t);
400 case BIT_NOT_EXPR:
401 return (INTEGRAL_TYPE_P (type)
402 && TYPE_OVERFLOW_WRAPS (type));
403
404 case FIXED_CST:
405 return true;
406
407 case NEGATE_EXPR:
408 return !TYPE_OVERFLOW_SANITIZED (type);
409
410 case REAL_CST:
411 /* We want to canonicalize to positive real constants. Pretend
412 that only negative ones can be easily negated. */
413 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
414
415 case COMPLEX_CST:
416 return negate_expr_p (TREE_REALPART (t))
417 && negate_expr_p (TREE_IMAGPART (t));
418
419 case VECTOR_CST:
420 {
421 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
422 return true;
423
424 int count = TYPE_VECTOR_SUBPARTS (type), i;
425
426 for (i = 0; i < count; i++)
427 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
428 return false;
429
430 return true;
431 }
432
433 case COMPLEX_EXPR:
434 return negate_expr_p (TREE_OPERAND (t, 0))
435 && negate_expr_p (TREE_OPERAND (t, 1));
436
437 case CONJ_EXPR:
438 return negate_expr_p (TREE_OPERAND (t, 0));
439
440 case PLUS_EXPR:
441 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
442 || HONOR_SIGNED_ZEROS (element_mode (type))
443 || (INTEGRAL_TYPE_P (type)
444 && ! TYPE_OVERFLOW_WRAPS (type)))
445 return false;
446 /* -(A + B) -> (-B) - A. */
447 if (negate_expr_p (TREE_OPERAND (t, 1))
448 && reorder_operands_p (TREE_OPERAND (t, 0),
449 TREE_OPERAND (t, 1)))
450 return true;
451 /* -(A + B) -> (-A) - B. */
452 return negate_expr_p (TREE_OPERAND (t, 0));
453
454 case MINUS_EXPR:
455 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
456 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
457 && !HONOR_SIGNED_ZEROS (element_mode (type))
458 && (! INTEGRAL_TYPE_P (type)
459 || TYPE_OVERFLOW_WRAPS (type))
460 && reorder_operands_p (TREE_OPERAND (t, 0),
461 TREE_OPERAND (t, 1));
462
463 case MULT_EXPR:
464 if (TYPE_UNSIGNED (type))
465 break;
466 /* INT_MIN/n * n doesn't overflow while negating one operand it does
467 if n is a power of two. */
468 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
469 && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
470 && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
471 && ! integer_pow2p (TREE_OPERAND (t, 0)))
472 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
473 && ! integer_pow2p (TREE_OPERAND (t, 1)))))
474 break;
475
476 /* Fall through. */
477
478 case RDIV_EXPR:
479 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
480 return negate_expr_p (TREE_OPERAND (t, 1))
481 || negate_expr_p (TREE_OPERAND (t, 0));
482 break;
483
484 case TRUNC_DIV_EXPR:
485 case ROUND_DIV_EXPR:
486 case EXACT_DIV_EXPR:
487 if (TYPE_UNSIGNED (type))
488 break;
489 if (negate_expr_p (TREE_OPERAND (t, 0)))
490 return true;
491 /* In general we can't negate B in A / B, because if A is INT_MIN and
492 B is 1, we may turn this into INT_MIN / -1 which is undefined
493 and actually traps on some architectures. */
494 if (! INTEGRAL_TYPE_P (TREE_TYPE (t))
495 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
496 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
497 && ! integer_onep (TREE_OPERAND (t, 1))))
498 return negate_expr_p (TREE_OPERAND (t, 1));
499 break;
500
501 case NOP_EXPR:
502 /* Negate -((double)float) as (double)(-float). */
503 if (TREE_CODE (type) == REAL_TYPE)
504 {
505 tree tem = strip_float_extensions (t);
506 if (tem != t)
507 return negate_expr_p (tem);
508 }
509 break;
510
511 case CALL_EXPR:
512 /* Negate -f(x) as f(-x). */
513 if (negate_mathfn_p (get_call_combined_fn (t)))
514 return negate_expr_p (CALL_EXPR_ARG (t, 0));
515 break;
516
517 case RSHIFT_EXPR:
518 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
519 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
520 {
521 tree op1 = TREE_OPERAND (t, 1);
522 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
523 return true;
524 }
525 break;
526
527 default:
528 break;
529 }
530 return false;
531 }
532
533 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
534 simplification is possible.
535 If negate_expr_p would return true for T, NULL_TREE will never be
536 returned. */
537
538 static tree
539 fold_negate_expr (location_t loc, tree t)
540 {
541 tree type = TREE_TYPE (t);
542 tree tem;
543
544 switch (TREE_CODE (t))
545 {
546 /* Convert - (~A) to A + 1. */
547 case BIT_NOT_EXPR:
548 if (INTEGRAL_TYPE_P (type))
549 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
550 build_one_cst (type));
551 break;
552
553 case INTEGER_CST:
554 tem = fold_negate_const (t, type);
555 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
556 || (ANY_INTEGRAL_TYPE_P (type)
557 && !TYPE_OVERFLOW_TRAPS (type)
558 && TYPE_OVERFLOW_WRAPS (type))
559 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
560 return tem;
561 break;
562
563 case REAL_CST:
564 tem = fold_negate_const (t, type);
565 return tem;
566
567 case FIXED_CST:
568 tem = fold_negate_const (t, type);
569 return tem;
570
571 case COMPLEX_CST:
572 {
573 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
574 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
575 if (rpart && ipart)
576 return build_complex (type, rpart, ipart);
577 }
578 break;
579
580 case VECTOR_CST:
581 {
582 int count = TYPE_VECTOR_SUBPARTS (type), i;
583 tree *elts = XALLOCAVEC (tree, count);
584
585 for (i = 0; i < count; i++)
586 {
587 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
588 if (elts[i] == NULL_TREE)
589 return NULL_TREE;
590 }
591
592 return build_vector (type, elts);
593 }
594
595 case COMPLEX_EXPR:
596 if (negate_expr_p (t))
597 return fold_build2_loc (loc, COMPLEX_EXPR, type,
598 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
599 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
600 break;
601
602 case CONJ_EXPR:
603 if (negate_expr_p (t))
604 return fold_build1_loc (loc, CONJ_EXPR, type,
605 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
606 break;
607
608 case NEGATE_EXPR:
609 if (!TYPE_OVERFLOW_SANITIZED (type))
610 return TREE_OPERAND (t, 0);
611 break;
612
613 case PLUS_EXPR:
614 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
615 && !HONOR_SIGNED_ZEROS (element_mode (type)))
616 {
617 /* -(A + B) -> (-B) - A. */
618 if (negate_expr_p (TREE_OPERAND (t, 1))
619 && reorder_operands_p (TREE_OPERAND (t, 0),
620 TREE_OPERAND (t, 1)))
621 {
622 tem = negate_expr (TREE_OPERAND (t, 1));
623 return fold_build2_loc (loc, MINUS_EXPR, type,
624 tem, TREE_OPERAND (t, 0));
625 }
626
627 /* -(A + B) -> (-A) - B. */
628 if (negate_expr_p (TREE_OPERAND (t, 0)))
629 {
630 tem = negate_expr (TREE_OPERAND (t, 0));
631 return fold_build2_loc (loc, MINUS_EXPR, type,
632 tem, TREE_OPERAND (t, 1));
633 }
634 }
635 break;
636
637 case MINUS_EXPR:
638 /* - (A - B) -> B - A */
639 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
640 && !HONOR_SIGNED_ZEROS (element_mode (type))
641 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
642 return fold_build2_loc (loc, MINUS_EXPR, type,
643 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
644 break;
645
646 case MULT_EXPR:
647 if (TYPE_UNSIGNED (type))
648 break;
649
650 /* Fall through. */
651
652 case RDIV_EXPR:
653 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
654 {
655 tem = TREE_OPERAND (t, 1);
656 if (negate_expr_p (tem))
657 return fold_build2_loc (loc, TREE_CODE (t), type,
658 TREE_OPERAND (t, 0), negate_expr (tem));
659 tem = TREE_OPERAND (t, 0);
660 if (negate_expr_p (tem))
661 return fold_build2_loc (loc, TREE_CODE (t), type,
662 negate_expr (tem), TREE_OPERAND (t, 1));
663 }
664 break;
665
666 case TRUNC_DIV_EXPR:
667 case ROUND_DIV_EXPR:
668 case EXACT_DIV_EXPR:
669 if (TYPE_UNSIGNED (type))
670 break;
671 if (negate_expr_p (TREE_OPERAND (t, 0)))
672 return fold_build2_loc (loc, TREE_CODE (t), type,
673 negate_expr (TREE_OPERAND (t, 0)),
674 TREE_OPERAND (t, 1));
675 /* In general we can't negate B in A / B, because if A is INT_MIN and
676 B is 1, we may turn this into INT_MIN / -1 which is undefined
677 and actually traps on some architectures. */
678 if ((! INTEGRAL_TYPE_P (TREE_TYPE (t))
679 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
680 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
681 && ! integer_onep (TREE_OPERAND (t, 1))))
682 && negate_expr_p (TREE_OPERAND (t, 1)))
683 return fold_build2_loc (loc, TREE_CODE (t), type,
684 TREE_OPERAND (t, 0),
685 negate_expr (TREE_OPERAND (t, 1)));
686 break;
687
688 case NOP_EXPR:
689 /* Convert -((double)float) into (double)(-float). */
690 if (TREE_CODE (type) == REAL_TYPE)
691 {
692 tem = strip_float_extensions (t);
693 if (tem != t && negate_expr_p (tem))
694 return fold_convert_loc (loc, type, negate_expr (tem));
695 }
696 break;
697
698 case CALL_EXPR:
699 /* Negate -f(x) as f(-x). */
700 if (negate_mathfn_p (get_call_combined_fn (t))
701 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
702 {
703 tree fndecl, arg;
704
705 fndecl = get_callee_fndecl (t);
706 arg = negate_expr (CALL_EXPR_ARG (t, 0));
707 return build_call_expr_loc (loc, fndecl, 1, arg);
708 }
709 break;
710
711 case RSHIFT_EXPR:
712 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
713 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
714 {
715 tree op1 = TREE_OPERAND (t, 1);
716 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
717 {
718 tree ntype = TYPE_UNSIGNED (type)
719 ? signed_type_for (type)
720 : unsigned_type_for (type);
721 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
722 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
723 return fold_convert_loc (loc, type, temp);
724 }
725 }
726 break;
727
728 default:
729 break;
730 }
731
732 return NULL_TREE;
733 }
734
735 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
736 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
737 return NULL_TREE. */
738
739 static tree
740 negate_expr (tree t)
741 {
742 tree type, tem;
743 location_t loc;
744
745 if (t == NULL_TREE)
746 return NULL_TREE;
747
748 loc = EXPR_LOCATION (t);
749 type = TREE_TYPE (t);
750 STRIP_SIGN_NOPS (t);
751
752 tem = fold_negate_expr (loc, t);
753 if (!tem)
754 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
755 return fold_convert_loc (loc, type, tem);
756 }
757 \f
758 /* Split a tree IN into a constant, literal and variable parts that could be
759 combined with CODE to make IN. "constant" means an expression with
760 TREE_CONSTANT but that isn't an actual constant. CODE must be a
761 commutative arithmetic operation. Store the constant part into *CONP,
762 the literal in *LITP and return the variable part. If a part isn't
763 present, set it to null. If the tree does not decompose in this way,
764 return the entire tree as the variable part and the other parts as null.
765
766 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
767 case, we negate an operand that was subtracted. Except if it is a
768 literal for which we use *MINUS_LITP instead.
769
770 If NEGATE_P is true, we are negating all of IN, again except a literal
771 for which we use *MINUS_LITP instead. If a variable part is of pointer
772 type, it is negated after converting to TYPE. This prevents us from
773 generating illegal MINUS pointer expression. LOC is the location of
774 the converted variable part.
775
776 If IN is itself a literal or constant, return it as appropriate.
777
778 Note that we do not guarantee that any of the three values will be the
779 same type as IN, but they will have the same signedness and mode. */
780
781 static tree
782 split_tree (location_t loc, tree in, tree type, enum tree_code code,
783 tree *conp, tree *litp, tree *minus_litp, int negate_p)
784 {
785 tree var = 0;
786
787 *conp = 0;
788 *litp = 0;
789 *minus_litp = 0;
790
791 /* Strip any conversions that don't change the machine mode or signedness. */
792 STRIP_SIGN_NOPS (in);
793
794 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
795 || TREE_CODE (in) == FIXED_CST)
796 *litp = in;
797 else if (TREE_CODE (in) == code
798 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
799 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
800 /* We can associate addition and subtraction together (even
801 though the C standard doesn't say so) for integers because
802 the value is not affected. For reals, the value might be
803 affected, so we can't. */
804 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
805 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
806 {
807 tree op0 = TREE_OPERAND (in, 0);
808 tree op1 = TREE_OPERAND (in, 1);
809 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
810 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
811
812 /* First see if either of the operands is a literal, then a constant. */
813 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
814 || TREE_CODE (op0) == FIXED_CST)
815 *litp = op0, op0 = 0;
816 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
817 || TREE_CODE (op1) == FIXED_CST)
818 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
819
820 if (op0 != 0 && TREE_CONSTANT (op0))
821 *conp = op0, op0 = 0;
822 else if (op1 != 0 && TREE_CONSTANT (op1))
823 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
824
825 /* If we haven't dealt with either operand, this is not a case we can
826 decompose. Otherwise, VAR is either of the ones remaining, if any. */
827 if (op0 != 0 && op1 != 0)
828 var = in;
829 else if (op0 != 0)
830 var = op0;
831 else
832 var = op1, neg_var_p = neg1_p;
833
834 /* Now do any needed negations. */
835 if (neg_litp_p)
836 *minus_litp = *litp, *litp = 0;
837 if (neg_conp_p)
838 *conp = negate_expr (*conp);
839 if (neg_var_p)
840 {
841 /* Convert to TYPE before negating a pointer type expr. */
842 if (var && POINTER_TYPE_P (TREE_TYPE (var)))
843 var = fold_convert_loc (loc, type, var);
844 var = negate_expr (var);
845 }
846 }
847 else if (TREE_CODE (in) == BIT_NOT_EXPR
848 && code == PLUS_EXPR)
849 {
850 /* -X - 1 is folded to ~X, undo that here. */
851 *minus_litp = build_one_cst (TREE_TYPE (in));
852 var = negate_expr (TREE_OPERAND (in, 0));
853 }
854 else if (TREE_CONSTANT (in))
855 *conp = in;
856 else
857 var = in;
858
859 if (negate_p)
860 {
861 if (*litp)
862 *minus_litp = *litp, *litp = 0;
863 else if (*minus_litp)
864 *litp = *minus_litp, *minus_litp = 0;
865 *conp = negate_expr (*conp);
866 /* Convert to TYPE before negating a pointer type expr. */
867 if (var && POINTER_TYPE_P (TREE_TYPE (var)))
868 var = fold_convert_loc (loc, type, var);
869 var = negate_expr (var);
870 }
871
872 return var;
873 }
874
875 /* Re-associate trees split by the above function. T1 and T2 are
876 either expressions to associate or null. Return the new
877 expression, if any. LOC is the location of the new expression. If
878 we build an operation, do it in TYPE and with CODE. */
879
880 static tree
881 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
882 {
883 if (t1 == 0)
884 return t2;
885 else if (t2 == 0)
886 return t1;
887
888 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
889 try to fold this since we will have infinite recursion. But do
890 deal with any NEGATE_EXPRs. */
891 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
892 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
893 {
894 if (code == PLUS_EXPR)
895 {
896 if (TREE_CODE (t1) == NEGATE_EXPR)
897 return build2_loc (loc, MINUS_EXPR, type,
898 fold_convert_loc (loc, type, t2),
899 fold_convert_loc (loc, type,
900 TREE_OPERAND (t1, 0)));
901 else if (TREE_CODE (t2) == NEGATE_EXPR)
902 return build2_loc (loc, MINUS_EXPR, type,
903 fold_convert_loc (loc, type, t1),
904 fold_convert_loc (loc, type,
905 TREE_OPERAND (t2, 0)));
906 else if (integer_zerop (t2))
907 return fold_convert_loc (loc, type, t1);
908 }
909 else if (code == MINUS_EXPR)
910 {
911 if (integer_zerop (t2))
912 return fold_convert_loc (loc, type, t1);
913 }
914
915 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
916 fold_convert_loc (loc, type, t2));
917 }
918
919 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
920 fold_convert_loc (loc, type, t2));
921 }
922 \f
923 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
924 for use in int_const_binop, size_binop and size_diffop. */
925
926 static bool
927 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
928 {
929 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
930 return false;
931 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
932 return false;
933
934 switch (code)
935 {
936 case LSHIFT_EXPR:
937 case RSHIFT_EXPR:
938 case LROTATE_EXPR:
939 case RROTATE_EXPR:
940 return true;
941
942 default:
943 break;
944 }
945
946 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
947 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
948 && TYPE_MODE (type1) == TYPE_MODE (type2);
949 }
950
951
952 /* Combine two integer constants ARG1 and ARG2 under operation CODE
953 to produce a new constant. Return NULL_TREE if we don't know how
954 to evaluate CODE at compile-time. */
955
956 static tree
957 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree parg2,
958 int overflowable)
959 {
960 wide_int res;
961 tree t;
962 tree type = TREE_TYPE (arg1);
963 signop sign = TYPE_SIGN (type);
964 bool overflow = false;
965
966 wide_int arg2 = wide_int::from (parg2, TYPE_PRECISION (type),
967 TYPE_SIGN (TREE_TYPE (parg2)));
968
969 switch (code)
970 {
971 case BIT_IOR_EXPR:
972 res = wi::bit_or (arg1, arg2);
973 break;
974
975 case BIT_XOR_EXPR:
976 res = wi::bit_xor (arg1, arg2);
977 break;
978
979 case BIT_AND_EXPR:
980 res = wi::bit_and (arg1, arg2);
981 break;
982
983 case RSHIFT_EXPR:
984 case LSHIFT_EXPR:
985 if (wi::neg_p (arg2))
986 {
987 arg2 = -arg2;
988 if (code == RSHIFT_EXPR)
989 code = LSHIFT_EXPR;
990 else
991 code = RSHIFT_EXPR;
992 }
993
994 if (code == RSHIFT_EXPR)
995 /* It's unclear from the C standard whether shifts can overflow.
996 The following code ignores overflow; perhaps a C standard
997 interpretation ruling is needed. */
998 res = wi::rshift (arg1, arg2, sign);
999 else
1000 res = wi::lshift (arg1, arg2);
1001 break;
1002
1003 case RROTATE_EXPR:
1004 case LROTATE_EXPR:
1005 if (wi::neg_p (arg2))
1006 {
1007 arg2 = -arg2;
1008 if (code == RROTATE_EXPR)
1009 code = LROTATE_EXPR;
1010 else
1011 code = RROTATE_EXPR;
1012 }
1013
1014 if (code == RROTATE_EXPR)
1015 res = wi::rrotate (arg1, arg2);
1016 else
1017 res = wi::lrotate (arg1, arg2);
1018 break;
1019
1020 case PLUS_EXPR:
1021 res = wi::add (arg1, arg2, sign, &overflow);
1022 break;
1023
1024 case MINUS_EXPR:
1025 res = wi::sub (arg1, arg2, sign, &overflow);
1026 break;
1027
1028 case MULT_EXPR:
1029 res = wi::mul (arg1, arg2, sign, &overflow);
1030 break;
1031
1032 case MULT_HIGHPART_EXPR:
1033 res = wi::mul_high (arg1, arg2, sign);
1034 break;
1035
1036 case TRUNC_DIV_EXPR:
1037 case EXACT_DIV_EXPR:
1038 if (arg2 == 0)
1039 return NULL_TREE;
1040 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1041 break;
1042
1043 case FLOOR_DIV_EXPR:
1044 if (arg2 == 0)
1045 return NULL_TREE;
1046 res = wi::div_floor (arg1, arg2, sign, &overflow);
1047 break;
1048
1049 case CEIL_DIV_EXPR:
1050 if (arg2 == 0)
1051 return NULL_TREE;
1052 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1053 break;
1054
1055 case ROUND_DIV_EXPR:
1056 if (arg2 == 0)
1057 return NULL_TREE;
1058 res = wi::div_round (arg1, arg2, sign, &overflow);
1059 break;
1060
1061 case TRUNC_MOD_EXPR:
1062 if (arg2 == 0)
1063 return NULL_TREE;
1064 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1065 break;
1066
1067 case FLOOR_MOD_EXPR:
1068 if (arg2 == 0)
1069 return NULL_TREE;
1070 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1071 break;
1072
1073 case CEIL_MOD_EXPR:
1074 if (arg2 == 0)
1075 return NULL_TREE;
1076 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1077 break;
1078
1079 case ROUND_MOD_EXPR:
1080 if (arg2 == 0)
1081 return NULL_TREE;
1082 res = wi::mod_round (arg1, arg2, sign, &overflow);
1083 break;
1084
1085 case MIN_EXPR:
1086 res = wi::min (arg1, arg2, sign);
1087 break;
1088
1089 case MAX_EXPR:
1090 res = wi::max (arg1, arg2, sign);
1091 break;
1092
1093 default:
1094 return NULL_TREE;
1095 }
1096
1097 t = force_fit_type (type, res, overflowable,
1098 (((sign == SIGNED || overflowable == -1)
1099 && overflow)
1100 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (parg2)));
1101
1102 return t;
1103 }
1104
1105 tree
1106 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1107 {
1108 return int_const_binop_1 (code, arg1, arg2, 1);
1109 }
1110
1111 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1112 constant. We assume ARG1 and ARG2 have the same data type, or at least
1113 are the same kind of constant and the same machine mode. Return zero if
1114 combining the constants is not allowed in the current operating mode. */
1115
1116 static tree
1117 const_binop (enum tree_code code, tree arg1, tree arg2)
1118 {
1119 /* Sanity check for the recursive cases. */
1120 if (!arg1 || !arg2)
1121 return NULL_TREE;
1122
1123 STRIP_NOPS (arg1);
1124 STRIP_NOPS (arg2);
1125
1126 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1127 {
1128 if (code == POINTER_PLUS_EXPR)
1129 return int_const_binop (PLUS_EXPR,
1130 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1131
1132 return int_const_binop (code, arg1, arg2);
1133 }
1134
1135 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1136 {
1137 machine_mode mode;
1138 REAL_VALUE_TYPE d1;
1139 REAL_VALUE_TYPE d2;
1140 REAL_VALUE_TYPE value;
1141 REAL_VALUE_TYPE result;
1142 bool inexact;
1143 tree t, type;
1144
1145 /* The following codes are handled by real_arithmetic. */
1146 switch (code)
1147 {
1148 case PLUS_EXPR:
1149 case MINUS_EXPR:
1150 case MULT_EXPR:
1151 case RDIV_EXPR:
1152 case MIN_EXPR:
1153 case MAX_EXPR:
1154 break;
1155
1156 default:
1157 return NULL_TREE;
1158 }
1159
1160 d1 = TREE_REAL_CST (arg1);
1161 d2 = TREE_REAL_CST (arg2);
1162
1163 type = TREE_TYPE (arg1);
1164 mode = TYPE_MODE (type);
1165
1166 /* Don't perform operation if we honor signaling NaNs and
1167 either operand is a signaling NaN. */
1168 if (HONOR_SNANS (mode)
1169 && (REAL_VALUE_ISSIGNALING_NAN (d1)
1170 || REAL_VALUE_ISSIGNALING_NAN (d2)))
1171 return NULL_TREE;
1172
1173 /* Don't perform operation if it would raise a division
1174 by zero exception. */
1175 if (code == RDIV_EXPR
1176 && real_equal (&d2, &dconst0)
1177 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1178 return NULL_TREE;
1179
1180 /* If either operand is a NaN, just return it. Otherwise, set up
1181 for floating-point trap; we return an overflow. */
1182 if (REAL_VALUE_ISNAN (d1))
1183 {
1184 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1185 is off. */
1186 d1.signalling = 0;
1187 t = build_real (type, d1);
1188 return t;
1189 }
1190 else if (REAL_VALUE_ISNAN (d2))
1191 {
1192 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1193 is off. */
1194 d2.signalling = 0;
1195 t = build_real (type, d2);
1196 return t;
1197 }
1198
1199 inexact = real_arithmetic (&value, code, &d1, &d2);
1200 real_convert (&result, mode, &value);
1201
1202 /* Don't constant fold this floating point operation if
1203 the result has overflowed and flag_trapping_math. */
1204 if (flag_trapping_math
1205 && MODE_HAS_INFINITIES (mode)
1206 && REAL_VALUE_ISINF (result)
1207 && !REAL_VALUE_ISINF (d1)
1208 && !REAL_VALUE_ISINF (d2))
1209 return NULL_TREE;
1210
1211 /* Don't constant fold this floating point operation if the
1212 result may dependent upon the run-time rounding mode and
1213 flag_rounding_math is set, or if GCC's software emulation
1214 is unable to accurately represent the result. */
1215 if ((flag_rounding_math
1216 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1217 && (inexact || !real_identical (&result, &value)))
1218 return NULL_TREE;
1219
1220 t = build_real (type, result);
1221
1222 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1223 return t;
1224 }
1225
1226 if (TREE_CODE (arg1) == FIXED_CST)
1227 {
1228 FIXED_VALUE_TYPE f1;
1229 FIXED_VALUE_TYPE f2;
1230 FIXED_VALUE_TYPE result;
1231 tree t, type;
1232 int sat_p;
1233 bool overflow_p;
1234
1235 /* The following codes are handled by fixed_arithmetic. */
1236 switch (code)
1237 {
1238 case PLUS_EXPR:
1239 case MINUS_EXPR:
1240 case MULT_EXPR:
1241 case TRUNC_DIV_EXPR:
1242 if (TREE_CODE (arg2) != FIXED_CST)
1243 return NULL_TREE;
1244 f2 = TREE_FIXED_CST (arg2);
1245 break;
1246
1247 case LSHIFT_EXPR:
1248 case RSHIFT_EXPR:
1249 {
1250 if (TREE_CODE (arg2) != INTEGER_CST)
1251 return NULL_TREE;
1252 wide_int w2 = arg2;
1253 f2.data.high = w2.elt (1);
1254 f2.data.low = w2.elt (0);
1255 f2.mode = SImode;
1256 }
1257 break;
1258
1259 default:
1260 return NULL_TREE;
1261 }
1262
1263 f1 = TREE_FIXED_CST (arg1);
1264 type = TREE_TYPE (arg1);
1265 sat_p = TYPE_SATURATING (type);
1266 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1267 t = build_fixed (type, result);
1268 /* Propagate overflow flags. */
1269 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1270 TREE_OVERFLOW (t) = 1;
1271 return t;
1272 }
1273
1274 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1275 {
1276 tree type = TREE_TYPE (arg1);
1277 tree r1 = TREE_REALPART (arg1);
1278 tree i1 = TREE_IMAGPART (arg1);
1279 tree r2 = TREE_REALPART (arg2);
1280 tree i2 = TREE_IMAGPART (arg2);
1281 tree real, imag;
1282
1283 switch (code)
1284 {
1285 case PLUS_EXPR:
1286 case MINUS_EXPR:
1287 real = const_binop (code, r1, r2);
1288 imag = const_binop (code, i1, i2);
1289 break;
1290
1291 case MULT_EXPR:
1292 if (COMPLEX_FLOAT_TYPE_P (type))
1293 return do_mpc_arg2 (arg1, arg2, type,
1294 /* do_nonfinite= */ folding_initializer,
1295 mpc_mul);
1296
1297 real = const_binop (MINUS_EXPR,
1298 const_binop (MULT_EXPR, r1, r2),
1299 const_binop (MULT_EXPR, i1, i2));
1300 imag = const_binop (PLUS_EXPR,
1301 const_binop (MULT_EXPR, r1, i2),
1302 const_binop (MULT_EXPR, i1, r2));
1303 break;
1304
1305 case RDIV_EXPR:
1306 if (COMPLEX_FLOAT_TYPE_P (type))
1307 return do_mpc_arg2 (arg1, arg2, type,
1308 /* do_nonfinite= */ folding_initializer,
1309 mpc_div);
1310 /* Fallthru ... */
1311 case TRUNC_DIV_EXPR:
1312 case CEIL_DIV_EXPR:
1313 case FLOOR_DIV_EXPR:
1314 case ROUND_DIV_EXPR:
1315 if (flag_complex_method == 0)
1316 {
1317 /* Keep this algorithm in sync with
1318 tree-complex.c:expand_complex_div_straight().
1319
1320 Expand complex division to scalars, straightforward algorithm.
1321 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1322 t = br*br + bi*bi
1323 */
1324 tree magsquared
1325 = const_binop (PLUS_EXPR,
1326 const_binop (MULT_EXPR, r2, r2),
1327 const_binop (MULT_EXPR, i2, i2));
1328 tree t1
1329 = const_binop (PLUS_EXPR,
1330 const_binop (MULT_EXPR, r1, r2),
1331 const_binop (MULT_EXPR, i1, i2));
1332 tree t2
1333 = const_binop (MINUS_EXPR,
1334 const_binop (MULT_EXPR, i1, r2),
1335 const_binop (MULT_EXPR, r1, i2));
1336
1337 real = const_binop (code, t1, magsquared);
1338 imag = const_binop (code, t2, magsquared);
1339 }
1340 else
1341 {
1342 /* Keep this algorithm in sync with
1343 tree-complex.c:expand_complex_div_wide().
1344
1345 Expand complex division to scalars, modified algorithm to minimize
1346 overflow with wide input ranges. */
1347 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1348 fold_abs_const (r2, TREE_TYPE (type)),
1349 fold_abs_const (i2, TREE_TYPE (type)));
1350
1351 if (integer_nonzerop (compare))
1352 {
1353 /* In the TRUE branch, we compute
1354 ratio = br/bi;
1355 div = (br * ratio) + bi;
1356 tr = (ar * ratio) + ai;
1357 ti = (ai * ratio) - ar;
1358 tr = tr / div;
1359 ti = ti / div; */
1360 tree ratio = const_binop (code, r2, i2);
1361 tree div = const_binop (PLUS_EXPR, i2,
1362 const_binop (MULT_EXPR, r2, ratio));
1363 real = const_binop (MULT_EXPR, r1, ratio);
1364 real = const_binop (PLUS_EXPR, real, i1);
1365 real = const_binop (code, real, div);
1366
1367 imag = const_binop (MULT_EXPR, i1, ratio);
1368 imag = const_binop (MINUS_EXPR, imag, r1);
1369 imag = const_binop (code, imag, div);
1370 }
1371 else
1372 {
1373 /* In the FALSE branch, we compute
1374 ratio = d/c;
1375 divisor = (d * ratio) + c;
1376 tr = (b * ratio) + a;
1377 ti = b - (a * ratio);
1378 tr = tr / div;
1379 ti = ti / div; */
1380 tree ratio = const_binop (code, i2, r2);
1381 tree div = const_binop (PLUS_EXPR, r2,
1382 const_binop (MULT_EXPR, i2, ratio));
1383
1384 real = const_binop (MULT_EXPR, i1, ratio);
1385 real = const_binop (PLUS_EXPR, real, r1);
1386 real = const_binop (code, real, div);
1387
1388 imag = const_binop (MULT_EXPR, r1, ratio);
1389 imag = const_binop (MINUS_EXPR, i1, imag);
1390 imag = const_binop (code, imag, div);
1391 }
1392 }
1393 break;
1394
1395 default:
1396 return NULL_TREE;
1397 }
1398
1399 if (real && imag)
1400 return build_complex (type, real, imag);
1401 }
1402
1403 if (TREE_CODE (arg1) == VECTOR_CST
1404 && TREE_CODE (arg2) == VECTOR_CST)
1405 {
1406 tree type = TREE_TYPE (arg1);
1407 int count = TYPE_VECTOR_SUBPARTS (type), i;
1408 tree *elts = XALLOCAVEC (tree, count);
1409
1410 for (i = 0; i < count; i++)
1411 {
1412 tree elem1 = VECTOR_CST_ELT (arg1, i);
1413 tree elem2 = VECTOR_CST_ELT (arg2, i);
1414
1415 elts[i] = const_binop (code, elem1, elem2);
1416
1417 /* It is possible that const_binop cannot handle the given
1418 code and return NULL_TREE */
1419 if (elts[i] == NULL_TREE)
1420 return NULL_TREE;
1421 }
1422
1423 return build_vector (type, elts);
1424 }
1425
1426 /* Shifts allow a scalar offset for a vector. */
1427 if (TREE_CODE (arg1) == VECTOR_CST
1428 && TREE_CODE (arg2) == INTEGER_CST)
1429 {
1430 tree type = TREE_TYPE (arg1);
1431 int count = TYPE_VECTOR_SUBPARTS (type), i;
1432 tree *elts = XALLOCAVEC (tree, count);
1433
1434 for (i = 0; i < count; i++)
1435 {
1436 tree elem1 = VECTOR_CST_ELT (arg1, i);
1437
1438 elts[i] = const_binop (code, elem1, arg2);
1439
1440 /* It is possible that const_binop cannot handle the given
1441 code and return NULL_TREE. */
1442 if (elts[i] == NULL_TREE)
1443 return NULL_TREE;
1444 }
1445
1446 return build_vector (type, elts);
1447 }
1448 return NULL_TREE;
1449 }
1450
1451 /* Overload that adds a TYPE parameter to be able to dispatch
1452 to fold_relational_const. */
1453
1454 tree
1455 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1456 {
1457 if (TREE_CODE_CLASS (code) == tcc_comparison)
1458 return fold_relational_const (code, type, arg1, arg2);
1459
1460 /* ??? Until we make the const_binop worker take the type of the
1461 result as argument put those cases that need it here. */
1462 switch (code)
1463 {
1464 case COMPLEX_EXPR:
1465 if ((TREE_CODE (arg1) == REAL_CST
1466 && TREE_CODE (arg2) == REAL_CST)
1467 || (TREE_CODE (arg1) == INTEGER_CST
1468 && TREE_CODE (arg2) == INTEGER_CST))
1469 return build_complex (type, arg1, arg2);
1470 return NULL_TREE;
1471
1472 case VEC_PACK_TRUNC_EXPR:
1473 case VEC_PACK_FIX_TRUNC_EXPR:
1474 {
1475 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1476 tree *elts;
1477
1478 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2
1479 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts / 2);
1480 if (TREE_CODE (arg1) != VECTOR_CST
1481 || TREE_CODE (arg2) != VECTOR_CST)
1482 return NULL_TREE;
1483
1484 elts = XALLOCAVEC (tree, nelts);
1485 if (!vec_cst_ctor_to_array (arg1, elts)
1486 || !vec_cst_ctor_to_array (arg2, elts + nelts / 2))
1487 return NULL_TREE;
1488
1489 for (i = 0; i < nelts; i++)
1490 {
1491 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1492 ? NOP_EXPR : FIX_TRUNC_EXPR,
1493 TREE_TYPE (type), elts[i]);
1494 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1495 return NULL_TREE;
1496 }
1497
1498 return build_vector (type, elts);
1499 }
1500
1501 case VEC_WIDEN_MULT_LO_EXPR:
1502 case VEC_WIDEN_MULT_HI_EXPR:
1503 case VEC_WIDEN_MULT_EVEN_EXPR:
1504 case VEC_WIDEN_MULT_ODD_EXPR:
1505 {
1506 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
1507 unsigned int out, ofs, scale;
1508 tree *elts;
1509
1510 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2
1511 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts * 2);
1512 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1513 return NULL_TREE;
1514
1515 elts = XALLOCAVEC (tree, nelts * 4);
1516 if (!vec_cst_ctor_to_array (arg1, elts)
1517 || !vec_cst_ctor_to_array (arg2, elts + nelts * 2))
1518 return NULL_TREE;
1519
1520 if (code == VEC_WIDEN_MULT_LO_EXPR)
1521 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
1522 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1523 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
1524 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1525 scale = 1, ofs = 0;
1526 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1527 scale = 1, ofs = 1;
1528
1529 for (out = 0; out < nelts; out++)
1530 {
1531 unsigned int in1 = (out << scale) + ofs;
1532 unsigned int in2 = in1 + nelts * 2;
1533 tree t1, t2;
1534
1535 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
1536 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
1537
1538 if (t1 == NULL_TREE || t2 == NULL_TREE)
1539 return NULL_TREE;
1540 elts[out] = const_binop (MULT_EXPR, t1, t2);
1541 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
1542 return NULL_TREE;
1543 }
1544
1545 return build_vector (type, elts);
1546 }
1547
1548 default:;
1549 }
1550
1551 if (TREE_CODE_CLASS (code) != tcc_binary)
1552 return NULL_TREE;
1553
1554 /* Make sure type and arg0 have the same saturating flag. */
1555 gcc_checking_assert (TYPE_SATURATING (type)
1556 == TYPE_SATURATING (TREE_TYPE (arg1)));
1557
1558 return const_binop (code, arg1, arg2);
1559 }
1560
1561 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1562 Return zero if computing the constants is not possible. */
1563
1564 tree
1565 const_unop (enum tree_code code, tree type, tree arg0)
1566 {
1567 /* Don't perform the operation, other than NEGATE and ABS, if
1568 flag_signaling_nans is on and the operand is a signaling NaN. */
1569 if (TREE_CODE (arg0) == REAL_CST
1570 && HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
1571 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0))
1572 && code != NEGATE_EXPR
1573 && code != ABS_EXPR)
1574 return NULL_TREE;
1575
1576 switch (code)
1577 {
1578 CASE_CONVERT:
1579 case FLOAT_EXPR:
1580 case FIX_TRUNC_EXPR:
1581 case FIXED_CONVERT_EXPR:
1582 return fold_convert_const (code, type, arg0);
1583
1584 case ADDR_SPACE_CONVERT_EXPR:
1585 /* If the source address is 0, and the source address space
1586 cannot have a valid object at 0, fold to dest type null. */
1587 if (integer_zerop (arg0)
1588 && !(targetm.addr_space.zero_address_valid
1589 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))))))
1590 return fold_convert_const (code, type, arg0);
1591 break;
1592
1593 case VIEW_CONVERT_EXPR:
1594 return fold_view_convert_expr (type, arg0);
1595
1596 case NEGATE_EXPR:
1597 {
1598 /* Can't call fold_negate_const directly here as that doesn't
1599 handle all cases and we might not be able to negate some
1600 constants. */
1601 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1602 if (tem && CONSTANT_CLASS_P (tem))
1603 return tem;
1604 break;
1605 }
1606
1607 case ABS_EXPR:
1608 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1609 return fold_abs_const (arg0, type);
1610 break;
1611
1612 case CONJ_EXPR:
1613 if (TREE_CODE (arg0) == COMPLEX_CST)
1614 {
1615 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1616 TREE_TYPE (type));
1617 return build_complex (type, TREE_REALPART (arg0), ipart);
1618 }
1619 break;
1620
1621 case BIT_NOT_EXPR:
1622 if (TREE_CODE (arg0) == INTEGER_CST)
1623 return fold_not_const (arg0, type);
1624 /* Perform BIT_NOT_EXPR on each element individually. */
1625 else if (TREE_CODE (arg0) == VECTOR_CST)
1626 {
1627 tree *elements;
1628 tree elem;
1629 unsigned count = VECTOR_CST_NELTS (arg0), i;
1630
1631 elements = XALLOCAVEC (tree, count);
1632 for (i = 0; i < count; i++)
1633 {
1634 elem = VECTOR_CST_ELT (arg0, i);
1635 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1636 if (elem == NULL_TREE)
1637 break;
1638 elements[i] = elem;
1639 }
1640 if (i == count)
1641 return build_vector (type, elements);
1642 }
1643 break;
1644
1645 case TRUTH_NOT_EXPR:
1646 if (TREE_CODE (arg0) == INTEGER_CST)
1647 return constant_boolean_node (integer_zerop (arg0), type);
1648 break;
1649
1650 case REALPART_EXPR:
1651 if (TREE_CODE (arg0) == COMPLEX_CST)
1652 return fold_convert (type, TREE_REALPART (arg0));
1653 break;
1654
1655 case IMAGPART_EXPR:
1656 if (TREE_CODE (arg0) == COMPLEX_CST)
1657 return fold_convert (type, TREE_IMAGPART (arg0));
1658 break;
1659
1660 case VEC_UNPACK_LO_EXPR:
1661 case VEC_UNPACK_HI_EXPR:
1662 case VEC_UNPACK_FLOAT_LO_EXPR:
1663 case VEC_UNPACK_FLOAT_HI_EXPR:
1664 {
1665 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1666 tree *elts;
1667 enum tree_code subcode;
1668
1669 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
1670 if (TREE_CODE (arg0) != VECTOR_CST)
1671 return NULL_TREE;
1672
1673 elts = XALLOCAVEC (tree, nelts * 2);
1674 if (!vec_cst_ctor_to_array (arg0, elts))
1675 return NULL_TREE;
1676
1677 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1678 || code == VEC_UNPACK_FLOAT_LO_EXPR))
1679 elts += nelts;
1680
1681 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1682 subcode = NOP_EXPR;
1683 else
1684 subcode = FLOAT_EXPR;
1685
1686 for (i = 0; i < nelts; i++)
1687 {
1688 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
1689 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1690 return NULL_TREE;
1691 }
1692
1693 return build_vector (type, elts);
1694 }
1695
1696 case REDUC_MIN_EXPR:
1697 case REDUC_MAX_EXPR:
1698 case REDUC_PLUS_EXPR:
1699 {
1700 unsigned int nelts, i;
1701 tree *elts;
1702 enum tree_code subcode;
1703
1704 if (TREE_CODE (arg0) != VECTOR_CST)
1705 return NULL_TREE;
1706 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
1707
1708 elts = XALLOCAVEC (tree, nelts);
1709 if (!vec_cst_ctor_to_array (arg0, elts))
1710 return NULL_TREE;
1711
1712 switch (code)
1713 {
1714 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
1715 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
1716 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
1717 default: gcc_unreachable ();
1718 }
1719
1720 for (i = 1; i < nelts; i++)
1721 {
1722 elts[0] = const_binop (subcode, elts[0], elts[i]);
1723 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
1724 return NULL_TREE;
1725 }
1726
1727 return elts[0];
1728 }
1729
1730 default:
1731 break;
1732 }
1733
1734 return NULL_TREE;
1735 }
1736
1737 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1738 indicates which particular sizetype to create. */
1739
1740 tree
1741 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1742 {
1743 return build_int_cst (sizetype_tab[(int) kind], number);
1744 }
1745 \f
1746 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1747 is a tree code. The type of the result is taken from the operands.
1748 Both must be equivalent integer types, ala int_binop_types_match_p.
1749 If the operands are constant, so is the result. */
1750
1751 tree
1752 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1753 {
1754 tree type = TREE_TYPE (arg0);
1755
1756 if (arg0 == error_mark_node || arg1 == error_mark_node)
1757 return error_mark_node;
1758
1759 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1760 TREE_TYPE (arg1)));
1761
1762 /* Handle the special case of two integer constants faster. */
1763 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1764 {
1765 /* And some specific cases even faster than that. */
1766 if (code == PLUS_EXPR)
1767 {
1768 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1769 return arg1;
1770 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1771 return arg0;
1772 }
1773 else if (code == MINUS_EXPR)
1774 {
1775 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1776 return arg0;
1777 }
1778 else if (code == MULT_EXPR)
1779 {
1780 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1781 return arg1;
1782 }
1783
1784 /* Handle general case of two integer constants. For sizetype
1785 constant calculations we always want to know about overflow,
1786 even in the unsigned case. */
1787 return int_const_binop_1 (code, arg0, arg1, -1);
1788 }
1789
1790 return fold_build2_loc (loc, code, type, arg0, arg1);
1791 }
1792
1793 /* Given two values, either both of sizetype or both of bitsizetype,
1794 compute the difference between the two values. Return the value
1795 in signed type corresponding to the type of the operands. */
1796
1797 tree
1798 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1799 {
1800 tree type = TREE_TYPE (arg0);
1801 tree ctype;
1802
1803 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1804 TREE_TYPE (arg1)));
1805
1806 /* If the type is already signed, just do the simple thing. */
1807 if (!TYPE_UNSIGNED (type))
1808 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1809
1810 if (type == sizetype)
1811 ctype = ssizetype;
1812 else if (type == bitsizetype)
1813 ctype = sbitsizetype;
1814 else
1815 ctype = signed_type_for (type);
1816
1817 /* If either operand is not a constant, do the conversions to the signed
1818 type and subtract. The hardware will do the right thing with any
1819 overflow in the subtraction. */
1820 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1821 return size_binop_loc (loc, MINUS_EXPR,
1822 fold_convert_loc (loc, ctype, arg0),
1823 fold_convert_loc (loc, ctype, arg1));
1824
1825 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1826 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1827 overflow) and negate (which can't either). Special-case a result
1828 of zero while we're here. */
1829 if (tree_int_cst_equal (arg0, arg1))
1830 return build_int_cst (ctype, 0);
1831 else if (tree_int_cst_lt (arg1, arg0))
1832 return fold_convert_loc (loc, ctype,
1833 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1834 else
1835 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1836 fold_convert_loc (loc, ctype,
1837 size_binop_loc (loc,
1838 MINUS_EXPR,
1839 arg1, arg0)));
1840 }
1841 \f
1842 /* A subroutine of fold_convert_const handling conversions of an
1843 INTEGER_CST to another integer type. */
1844
1845 static tree
1846 fold_convert_const_int_from_int (tree type, const_tree arg1)
1847 {
1848 /* Given an integer constant, make new constant with new type,
1849 appropriately sign-extended or truncated. Use widest_int
1850 so that any extension is done according ARG1's type. */
1851 return force_fit_type (type, wi::to_widest (arg1),
1852 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1853 TREE_OVERFLOW (arg1));
1854 }
1855
1856 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1857 to an integer type. */
1858
1859 static tree
1860 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1861 {
1862 bool overflow = false;
1863 tree t;
1864
1865 /* The following code implements the floating point to integer
1866 conversion rules required by the Java Language Specification,
1867 that IEEE NaNs are mapped to zero and values that overflow
1868 the target precision saturate, i.e. values greater than
1869 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1870 are mapped to INT_MIN. These semantics are allowed by the
1871 C and C++ standards that simply state that the behavior of
1872 FP-to-integer conversion is unspecified upon overflow. */
1873
1874 wide_int val;
1875 REAL_VALUE_TYPE r;
1876 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1877
1878 switch (code)
1879 {
1880 case FIX_TRUNC_EXPR:
1881 real_trunc (&r, VOIDmode, &x);
1882 break;
1883
1884 default:
1885 gcc_unreachable ();
1886 }
1887
1888 /* If R is NaN, return zero and show we have an overflow. */
1889 if (REAL_VALUE_ISNAN (r))
1890 {
1891 overflow = true;
1892 val = wi::zero (TYPE_PRECISION (type));
1893 }
1894
1895 /* See if R is less than the lower bound or greater than the
1896 upper bound. */
1897
1898 if (! overflow)
1899 {
1900 tree lt = TYPE_MIN_VALUE (type);
1901 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1902 if (real_less (&r, &l))
1903 {
1904 overflow = true;
1905 val = lt;
1906 }
1907 }
1908
1909 if (! overflow)
1910 {
1911 tree ut = TYPE_MAX_VALUE (type);
1912 if (ut)
1913 {
1914 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1915 if (real_less (&u, &r))
1916 {
1917 overflow = true;
1918 val = ut;
1919 }
1920 }
1921 }
1922
1923 if (! overflow)
1924 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1925
1926 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1927 return t;
1928 }
1929
1930 /* A subroutine of fold_convert_const handling conversions of a
1931 FIXED_CST to an integer type. */
1932
1933 static tree
1934 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1935 {
1936 tree t;
1937 double_int temp, temp_trunc;
1938 unsigned int mode;
1939
1940 /* Right shift FIXED_CST to temp by fbit. */
1941 temp = TREE_FIXED_CST (arg1).data;
1942 mode = TREE_FIXED_CST (arg1).mode;
1943 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1944 {
1945 temp = temp.rshift (GET_MODE_FBIT (mode),
1946 HOST_BITS_PER_DOUBLE_INT,
1947 SIGNED_FIXED_POINT_MODE_P (mode));
1948
1949 /* Left shift temp to temp_trunc by fbit. */
1950 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1951 HOST_BITS_PER_DOUBLE_INT,
1952 SIGNED_FIXED_POINT_MODE_P (mode));
1953 }
1954 else
1955 {
1956 temp = double_int_zero;
1957 temp_trunc = double_int_zero;
1958 }
1959
1960 /* If FIXED_CST is negative, we need to round the value toward 0.
1961 By checking if the fractional bits are not zero to add 1 to temp. */
1962 if (SIGNED_FIXED_POINT_MODE_P (mode)
1963 && temp_trunc.is_negative ()
1964 && TREE_FIXED_CST (arg1).data != temp_trunc)
1965 temp += double_int_one;
1966
1967 /* Given a fixed-point constant, make new constant with new type,
1968 appropriately sign-extended or truncated. */
1969 t = force_fit_type (type, temp, -1,
1970 (temp.is_negative ()
1971 && (TYPE_UNSIGNED (type)
1972 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1973 | TREE_OVERFLOW (arg1));
1974
1975 return t;
1976 }
1977
1978 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1979 to another floating point type. */
1980
1981 static tree
1982 fold_convert_const_real_from_real (tree type, const_tree arg1)
1983 {
1984 REAL_VALUE_TYPE value;
1985 tree t;
1986
1987 /* Don't perform the operation if flag_signaling_nans is on
1988 and the operand is a signaling NaN. */
1989 if (HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
1990 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1)))
1991 return NULL_TREE;
1992
1993 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1994 t = build_real (type, value);
1995
1996 /* If converting an infinity or NAN to a representation that doesn't
1997 have one, set the overflow bit so that we can produce some kind of
1998 error message at the appropriate point if necessary. It's not the
1999 most user-friendly message, but it's better than nothing. */
2000 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2001 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2002 TREE_OVERFLOW (t) = 1;
2003 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2004 && !MODE_HAS_NANS (TYPE_MODE (type)))
2005 TREE_OVERFLOW (t) = 1;
2006 /* Regular overflow, conversion produced an infinity in a mode that
2007 can't represent them. */
2008 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2009 && REAL_VALUE_ISINF (value)
2010 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2011 TREE_OVERFLOW (t) = 1;
2012 else
2013 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2014 return t;
2015 }
2016
2017 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2018 to a floating point type. */
2019
2020 static tree
2021 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2022 {
2023 REAL_VALUE_TYPE value;
2024 tree t;
2025
2026 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2027 t = build_real (type, value);
2028
2029 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2030 return t;
2031 }
2032
2033 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2034 to another fixed-point type. */
2035
2036 static tree
2037 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2038 {
2039 FIXED_VALUE_TYPE value;
2040 tree t;
2041 bool overflow_p;
2042
2043 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2044 TYPE_SATURATING (type));
2045 t = build_fixed (type, value);
2046
2047 /* Propagate overflow flags. */
2048 if (overflow_p | TREE_OVERFLOW (arg1))
2049 TREE_OVERFLOW (t) = 1;
2050 return t;
2051 }
2052
2053 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2054 to a fixed-point type. */
2055
2056 static tree
2057 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2058 {
2059 FIXED_VALUE_TYPE value;
2060 tree t;
2061 bool overflow_p;
2062 double_int di;
2063
2064 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2065
2066 di.low = TREE_INT_CST_ELT (arg1, 0);
2067 if (TREE_INT_CST_NUNITS (arg1) == 1)
2068 di.high = (HOST_WIDE_INT) di.low < 0 ? (HOST_WIDE_INT) -1 : 0;
2069 else
2070 di.high = TREE_INT_CST_ELT (arg1, 1);
2071
2072 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di,
2073 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2074 TYPE_SATURATING (type));
2075 t = build_fixed (type, value);
2076
2077 /* Propagate overflow flags. */
2078 if (overflow_p | TREE_OVERFLOW (arg1))
2079 TREE_OVERFLOW (t) = 1;
2080 return t;
2081 }
2082
2083 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2084 to a fixed-point type. */
2085
2086 static tree
2087 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2088 {
2089 FIXED_VALUE_TYPE value;
2090 tree t;
2091 bool overflow_p;
2092
2093 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2094 &TREE_REAL_CST (arg1),
2095 TYPE_SATURATING (type));
2096 t = build_fixed (type, value);
2097
2098 /* Propagate overflow flags. */
2099 if (overflow_p | TREE_OVERFLOW (arg1))
2100 TREE_OVERFLOW (t) = 1;
2101 return t;
2102 }
2103
2104 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2105 type TYPE. If no simplification can be done return NULL_TREE. */
2106
2107 static tree
2108 fold_convert_const (enum tree_code code, tree type, tree arg1)
2109 {
2110 if (TREE_TYPE (arg1) == type)
2111 return arg1;
2112
2113 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2114 || TREE_CODE (type) == OFFSET_TYPE)
2115 {
2116 if (TREE_CODE (arg1) == INTEGER_CST)
2117 return fold_convert_const_int_from_int (type, arg1);
2118 else if (TREE_CODE (arg1) == REAL_CST)
2119 return fold_convert_const_int_from_real (code, type, arg1);
2120 else if (TREE_CODE (arg1) == FIXED_CST)
2121 return fold_convert_const_int_from_fixed (type, arg1);
2122 }
2123 else if (TREE_CODE (type) == REAL_TYPE)
2124 {
2125 if (TREE_CODE (arg1) == INTEGER_CST)
2126 return build_real_from_int_cst (type, arg1);
2127 else if (TREE_CODE (arg1) == REAL_CST)
2128 return fold_convert_const_real_from_real (type, arg1);
2129 else if (TREE_CODE (arg1) == FIXED_CST)
2130 return fold_convert_const_real_from_fixed (type, arg1);
2131 }
2132 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2133 {
2134 if (TREE_CODE (arg1) == FIXED_CST)
2135 return fold_convert_const_fixed_from_fixed (type, arg1);
2136 else if (TREE_CODE (arg1) == INTEGER_CST)
2137 return fold_convert_const_fixed_from_int (type, arg1);
2138 else if (TREE_CODE (arg1) == REAL_CST)
2139 return fold_convert_const_fixed_from_real (type, arg1);
2140 }
2141 else if (TREE_CODE (type) == VECTOR_TYPE)
2142 {
2143 if (TREE_CODE (arg1) == VECTOR_CST
2144 && TYPE_VECTOR_SUBPARTS (type) == VECTOR_CST_NELTS (arg1))
2145 {
2146 int len = TYPE_VECTOR_SUBPARTS (type);
2147 tree elttype = TREE_TYPE (type);
2148 tree *v = XALLOCAVEC (tree, len);
2149 for (int i = 0; i < len; ++i)
2150 {
2151 tree elt = VECTOR_CST_ELT (arg1, i);
2152 tree cvt = fold_convert_const (code, elttype, elt);
2153 if (cvt == NULL_TREE)
2154 return NULL_TREE;
2155 v[i] = cvt;
2156 }
2157 return build_vector (type, v);
2158 }
2159 }
2160 return NULL_TREE;
2161 }
2162
2163 /* Construct a vector of zero elements of vector type TYPE. */
2164
2165 static tree
2166 build_zero_vector (tree type)
2167 {
2168 tree t;
2169
2170 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2171 return build_vector_from_val (type, t);
2172 }
2173
2174 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2175
2176 bool
2177 fold_convertible_p (const_tree type, const_tree arg)
2178 {
2179 tree orig = TREE_TYPE (arg);
2180
2181 if (type == orig)
2182 return true;
2183
2184 if (TREE_CODE (arg) == ERROR_MARK
2185 || TREE_CODE (type) == ERROR_MARK
2186 || TREE_CODE (orig) == ERROR_MARK)
2187 return false;
2188
2189 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2190 return true;
2191
2192 switch (TREE_CODE (type))
2193 {
2194 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2195 case POINTER_TYPE: case REFERENCE_TYPE:
2196 case OFFSET_TYPE:
2197 return (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2198 || TREE_CODE (orig) == OFFSET_TYPE);
2199
2200 case REAL_TYPE:
2201 case FIXED_POINT_TYPE:
2202 case COMPLEX_TYPE:
2203 case VECTOR_TYPE:
2204 case VOID_TYPE:
2205 return TREE_CODE (type) == TREE_CODE (orig);
2206
2207 default:
2208 return false;
2209 }
2210 }
2211
2212 /* Convert expression ARG to type TYPE. Used by the middle-end for
2213 simple conversions in preference to calling the front-end's convert. */
2214
2215 tree
2216 fold_convert_loc (location_t loc, tree type, tree arg)
2217 {
2218 tree orig = TREE_TYPE (arg);
2219 tree tem;
2220
2221 if (type == orig)
2222 return arg;
2223
2224 if (TREE_CODE (arg) == ERROR_MARK
2225 || TREE_CODE (type) == ERROR_MARK
2226 || TREE_CODE (orig) == ERROR_MARK)
2227 return error_mark_node;
2228
2229 switch (TREE_CODE (type))
2230 {
2231 case POINTER_TYPE:
2232 case REFERENCE_TYPE:
2233 /* Handle conversions between pointers to different address spaces. */
2234 if (POINTER_TYPE_P (orig)
2235 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2236 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2237 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2238 /* fall through */
2239
2240 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2241 case OFFSET_TYPE:
2242 if (TREE_CODE (arg) == INTEGER_CST)
2243 {
2244 tem = fold_convert_const (NOP_EXPR, type, arg);
2245 if (tem != NULL_TREE)
2246 return tem;
2247 }
2248 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2249 || TREE_CODE (orig) == OFFSET_TYPE)
2250 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2251 if (TREE_CODE (orig) == COMPLEX_TYPE)
2252 return fold_convert_loc (loc, type,
2253 fold_build1_loc (loc, REALPART_EXPR,
2254 TREE_TYPE (orig), arg));
2255 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2256 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2257 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2258
2259 case REAL_TYPE:
2260 if (TREE_CODE (arg) == INTEGER_CST)
2261 {
2262 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2263 if (tem != NULL_TREE)
2264 return tem;
2265 }
2266 else if (TREE_CODE (arg) == REAL_CST)
2267 {
2268 tem = fold_convert_const (NOP_EXPR, type, arg);
2269 if (tem != NULL_TREE)
2270 return tem;
2271 }
2272 else if (TREE_CODE (arg) == FIXED_CST)
2273 {
2274 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2275 if (tem != NULL_TREE)
2276 return tem;
2277 }
2278
2279 switch (TREE_CODE (orig))
2280 {
2281 case INTEGER_TYPE:
2282 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2283 case POINTER_TYPE: case REFERENCE_TYPE:
2284 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2285
2286 case REAL_TYPE:
2287 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2288
2289 case FIXED_POINT_TYPE:
2290 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2291
2292 case COMPLEX_TYPE:
2293 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2294 return fold_convert_loc (loc, type, tem);
2295
2296 default:
2297 gcc_unreachable ();
2298 }
2299
2300 case FIXED_POINT_TYPE:
2301 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2302 || TREE_CODE (arg) == REAL_CST)
2303 {
2304 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2305 if (tem != NULL_TREE)
2306 goto fold_convert_exit;
2307 }
2308
2309 switch (TREE_CODE (orig))
2310 {
2311 case FIXED_POINT_TYPE:
2312 case INTEGER_TYPE:
2313 case ENUMERAL_TYPE:
2314 case BOOLEAN_TYPE:
2315 case REAL_TYPE:
2316 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2317
2318 case COMPLEX_TYPE:
2319 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2320 return fold_convert_loc (loc, type, tem);
2321
2322 default:
2323 gcc_unreachable ();
2324 }
2325
2326 case COMPLEX_TYPE:
2327 switch (TREE_CODE (orig))
2328 {
2329 case INTEGER_TYPE:
2330 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2331 case POINTER_TYPE: case REFERENCE_TYPE:
2332 case REAL_TYPE:
2333 case FIXED_POINT_TYPE:
2334 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2335 fold_convert_loc (loc, TREE_TYPE (type), arg),
2336 fold_convert_loc (loc, TREE_TYPE (type),
2337 integer_zero_node));
2338 case COMPLEX_TYPE:
2339 {
2340 tree rpart, ipart;
2341
2342 if (TREE_CODE (arg) == COMPLEX_EXPR)
2343 {
2344 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2345 TREE_OPERAND (arg, 0));
2346 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2347 TREE_OPERAND (arg, 1));
2348 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2349 }
2350
2351 arg = save_expr (arg);
2352 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2353 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2354 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2355 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2356 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2357 }
2358
2359 default:
2360 gcc_unreachable ();
2361 }
2362
2363 case VECTOR_TYPE:
2364 if (integer_zerop (arg))
2365 return build_zero_vector (type);
2366 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2367 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2368 || TREE_CODE (orig) == VECTOR_TYPE);
2369 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2370
2371 case VOID_TYPE:
2372 tem = fold_ignored_result (arg);
2373 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2374
2375 default:
2376 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2377 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2378 gcc_unreachable ();
2379 }
2380 fold_convert_exit:
2381 protected_set_expr_location_unshare (tem, loc);
2382 return tem;
2383 }
2384 \f
2385 /* Return false if expr can be assumed not to be an lvalue, true
2386 otherwise. */
2387
2388 static bool
2389 maybe_lvalue_p (const_tree x)
2390 {
2391 /* We only need to wrap lvalue tree codes. */
2392 switch (TREE_CODE (x))
2393 {
2394 case VAR_DECL:
2395 case PARM_DECL:
2396 case RESULT_DECL:
2397 case LABEL_DECL:
2398 case FUNCTION_DECL:
2399 case SSA_NAME:
2400
2401 case COMPONENT_REF:
2402 case MEM_REF:
2403 case INDIRECT_REF:
2404 case ARRAY_REF:
2405 case ARRAY_RANGE_REF:
2406 case BIT_FIELD_REF:
2407 case OBJ_TYPE_REF:
2408
2409 case REALPART_EXPR:
2410 case IMAGPART_EXPR:
2411 case PREINCREMENT_EXPR:
2412 case PREDECREMENT_EXPR:
2413 case SAVE_EXPR:
2414 case TRY_CATCH_EXPR:
2415 case WITH_CLEANUP_EXPR:
2416 case COMPOUND_EXPR:
2417 case MODIFY_EXPR:
2418 case TARGET_EXPR:
2419 case COND_EXPR:
2420 case BIND_EXPR:
2421 break;
2422
2423 default:
2424 /* Assume the worst for front-end tree codes. */
2425 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2426 break;
2427 return false;
2428 }
2429
2430 return true;
2431 }
2432
2433 /* Return an expr equal to X but certainly not valid as an lvalue. */
2434
2435 tree
2436 non_lvalue_loc (location_t loc, tree x)
2437 {
2438 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2439 us. */
2440 if (in_gimple_form)
2441 return x;
2442
2443 if (! maybe_lvalue_p (x))
2444 return x;
2445 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2446 }
2447
2448 /* When pedantic, return an expr equal to X but certainly not valid as a
2449 pedantic lvalue. Otherwise, return X. */
2450
2451 static tree
2452 pedantic_non_lvalue_loc (location_t loc, tree x)
2453 {
2454 return protected_set_expr_location_unshare (x, loc);
2455 }
2456 \f
2457 /* Given a tree comparison code, return the code that is the logical inverse.
2458 It is generally not safe to do this for floating-point comparisons, except
2459 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2460 ERROR_MARK in this case. */
2461
2462 enum tree_code
2463 invert_tree_comparison (enum tree_code code, bool honor_nans)
2464 {
2465 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2466 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2467 return ERROR_MARK;
2468
2469 switch (code)
2470 {
2471 case EQ_EXPR:
2472 return NE_EXPR;
2473 case NE_EXPR:
2474 return EQ_EXPR;
2475 case GT_EXPR:
2476 return honor_nans ? UNLE_EXPR : LE_EXPR;
2477 case GE_EXPR:
2478 return honor_nans ? UNLT_EXPR : LT_EXPR;
2479 case LT_EXPR:
2480 return honor_nans ? UNGE_EXPR : GE_EXPR;
2481 case LE_EXPR:
2482 return honor_nans ? UNGT_EXPR : GT_EXPR;
2483 case LTGT_EXPR:
2484 return UNEQ_EXPR;
2485 case UNEQ_EXPR:
2486 return LTGT_EXPR;
2487 case UNGT_EXPR:
2488 return LE_EXPR;
2489 case UNGE_EXPR:
2490 return LT_EXPR;
2491 case UNLT_EXPR:
2492 return GE_EXPR;
2493 case UNLE_EXPR:
2494 return GT_EXPR;
2495 case ORDERED_EXPR:
2496 return UNORDERED_EXPR;
2497 case UNORDERED_EXPR:
2498 return ORDERED_EXPR;
2499 default:
2500 gcc_unreachable ();
2501 }
2502 }
2503
2504 /* Similar, but return the comparison that results if the operands are
2505 swapped. This is safe for floating-point. */
2506
2507 enum tree_code
2508 swap_tree_comparison (enum tree_code code)
2509 {
2510 switch (code)
2511 {
2512 case EQ_EXPR:
2513 case NE_EXPR:
2514 case ORDERED_EXPR:
2515 case UNORDERED_EXPR:
2516 case LTGT_EXPR:
2517 case UNEQ_EXPR:
2518 return code;
2519 case GT_EXPR:
2520 return LT_EXPR;
2521 case GE_EXPR:
2522 return LE_EXPR;
2523 case LT_EXPR:
2524 return GT_EXPR;
2525 case LE_EXPR:
2526 return GE_EXPR;
2527 case UNGT_EXPR:
2528 return UNLT_EXPR;
2529 case UNGE_EXPR:
2530 return UNLE_EXPR;
2531 case UNLT_EXPR:
2532 return UNGT_EXPR;
2533 case UNLE_EXPR:
2534 return UNGE_EXPR;
2535 default:
2536 gcc_unreachable ();
2537 }
2538 }
2539
2540
2541 /* Convert a comparison tree code from an enum tree_code representation
2542 into a compcode bit-based encoding. This function is the inverse of
2543 compcode_to_comparison. */
2544
2545 static enum comparison_code
2546 comparison_to_compcode (enum tree_code code)
2547 {
2548 switch (code)
2549 {
2550 case LT_EXPR:
2551 return COMPCODE_LT;
2552 case EQ_EXPR:
2553 return COMPCODE_EQ;
2554 case LE_EXPR:
2555 return COMPCODE_LE;
2556 case GT_EXPR:
2557 return COMPCODE_GT;
2558 case NE_EXPR:
2559 return COMPCODE_NE;
2560 case GE_EXPR:
2561 return COMPCODE_GE;
2562 case ORDERED_EXPR:
2563 return COMPCODE_ORD;
2564 case UNORDERED_EXPR:
2565 return COMPCODE_UNORD;
2566 case UNLT_EXPR:
2567 return COMPCODE_UNLT;
2568 case UNEQ_EXPR:
2569 return COMPCODE_UNEQ;
2570 case UNLE_EXPR:
2571 return COMPCODE_UNLE;
2572 case UNGT_EXPR:
2573 return COMPCODE_UNGT;
2574 case LTGT_EXPR:
2575 return COMPCODE_LTGT;
2576 case UNGE_EXPR:
2577 return COMPCODE_UNGE;
2578 default:
2579 gcc_unreachable ();
2580 }
2581 }
2582
2583 /* Convert a compcode bit-based encoding of a comparison operator back
2584 to GCC's enum tree_code representation. This function is the
2585 inverse of comparison_to_compcode. */
2586
2587 static enum tree_code
2588 compcode_to_comparison (enum comparison_code code)
2589 {
2590 switch (code)
2591 {
2592 case COMPCODE_LT:
2593 return LT_EXPR;
2594 case COMPCODE_EQ:
2595 return EQ_EXPR;
2596 case COMPCODE_LE:
2597 return LE_EXPR;
2598 case COMPCODE_GT:
2599 return GT_EXPR;
2600 case COMPCODE_NE:
2601 return NE_EXPR;
2602 case COMPCODE_GE:
2603 return GE_EXPR;
2604 case COMPCODE_ORD:
2605 return ORDERED_EXPR;
2606 case COMPCODE_UNORD:
2607 return UNORDERED_EXPR;
2608 case COMPCODE_UNLT:
2609 return UNLT_EXPR;
2610 case COMPCODE_UNEQ:
2611 return UNEQ_EXPR;
2612 case COMPCODE_UNLE:
2613 return UNLE_EXPR;
2614 case COMPCODE_UNGT:
2615 return UNGT_EXPR;
2616 case COMPCODE_LTGT:
2617 return LTGT_EXPR;
2618 case COMPCODE_UNGE:
2619 return UNGE_EXPR;
2620 default:
2621 gcc_unreachable ();
2622 }
2623 }
2624
2625 /* Return a tree for the comparison which is the combination of
2626 doing the AND or OR (depending on CODE) of the two operations LCODE
2627 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2628 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2629 if this makes the transformation invalid. */
2630
2631 tree
2632 combine_comparisons (location_t loc,
2633 enum tree_code code, enum tree_code lcode,
2634 enum tree_code rcode, tree truth_type,
2635 tree ll_arg, tree lr_arg)
2636 {
2637 bool honor_nans = HONOR_NANS (ll_arg);
2638 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2639 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2640 int compcode;
2641
2642 switch (code)
2643 {
2644 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2645 compcode = lcompcode & rcompcode;
2646 break;
2647
2648 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2649 compcode = lcompcode | rcompcode;
2650 break;
2651
2652 default:
2653 return NULL_TREE;
2654 }
2655
2656 if (!honor_nans)
2657 {
2658 /* Eliminate unordered comparisons, as well as LTGT and ORD
2659 which are not used unless the mode has NaNs. */
2660 compcode &= ~COMPCODE_UNORD;
2661 if (compcode == COMPCODE_LTGT)
2662 compcode = COMPCODE_NE;
2663 else if (compcode == COMPCODE_ORD)
2664 compcode = COMPCODE_TRUE;
2665 }
2666 else if (flag_trapping_math)
2667 {
2668 /* Check that the original operation and the optimized ones will trap
2669 under the same condition. */
2670 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2671 && (lcompcode != COMPCODE_EQ)
2672 && (lcompcode != COMPCODE_ORD);
2673 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2674 && (rcompcode != COMPCODE_EQ)
2675 && (rcompcode != COMPCODE_ORD);
2676 bool trap = (compcode & COMPCODE_UNORD) == 0
2677 && (compcode != COMPCODE_EQ)
2678 && (compcode != COMPCODE_ORD);
2679
2680 /* In a short-circuited boolean expression the LHS might be
2681 such that the RHS, if evaluated, will never trap. For
2682 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2683 if neither x nor y is NaN. (This is a mixed blessing: for
2684 example, the expression above will never trap, hence
2685 optimizing it to x < y would be invalid). */
2686 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2687 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2688 rtrap = false;
2689
2690 /* If the comparison was short-circuited, and only the RHS
2691 trapped, we may now generate a spurious trap. */
2692 if (rtrap && !ltrap
2693 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2694 return NULL_TREE;
2695
2696 /* If we changed the conditions that cause a trap, we lose. */
2697 if ((ltrap || rtrap) != trap)
2698 return NULL_TREE;
2699 }
2700
2701 if (compcode == COMPCODE_TRUE)
2702 return constant_boolean_node (true, truth_type);
2703 else if (compcode == COMPCODE_FALSE)
2704 return constant_boolean_node (false, truth_type);
2705 else
2706 {
2707 enum tree_code tcode;
2708
2709 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2710 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2711 }
2712 }
2713 \f
2714 /* Return nonzero if two operands (typically of the same tree node)
2715 are necessarily equal. FLAGS modifies behavior as follows:
2716
2717 If OEP_ONLY_CONST is set, only return nonzero for constants.
2718 This function tests whether the operands are indistinguishable;
2719 it does not test whether they are equal using C's == operation.
2720 The distinction is important for IEEE floating point, because
2721 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2722 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2723
2724 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2725 even though it may hold multiple values during a function.
2726 This is because a GCC tree node guarantees that nothing else is
2727 executed between the evaluation of its "operands" (which may often
2728 be evaluated in arbitrary order). Hence if the operands themselves
2729 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2730 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2731 unset means assuming isochronic (or instantaneous) tree equivalence.
2732 Unless comparing arbitrary expression trees, such as from different
2733 statements, this flag can usually be left unset.
2734
2735 If OEP_PURE_SAME is set, then pure functions with identical arguments
2736 are considered the same. It is used when the caller has other ways
2737 to ensure that global memory is unchanged in between.
2738
2739 If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
2740 not values of expressions.
2741
2742 Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
2743 any operand with side effect. This is unnecesarily conservative in the
2744 case we know that arg0 and arg1 are in disjoint code paths (such as in
2745 ?: operator). In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
2746 addresses with TREE_CONSTANT flag set so we know that &var == &var
2747 even if var is volatile. */
2748
2749 int
2750 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2751 {
2752 /* If either is ERROR_MARK, they aren't equal. */
2753 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2754 || TREE_TYPE (arg0) == error_mark_node
2755 || TREE_TYPE (arg1) == error_mark_node)
2756 return 0;
2757
2758 /* Similar, if either does not have a type (like a released SSA name),
2759 they aren't equal. */
2760 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2761 return 0;
2762
2763 /* We cannot consider pointers to different address space equal. */
2764 if (POINTER_TYPE_P (TREE_TYPE (arg0))
2765 && POINTER_TYPE_P (TREE_TYPE (arg1))
2766 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2767 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2768 return 0;
2769
2770 /* Check equality of integer constants before bailing out due to
2771 precision differences. */
2772 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2773 {
2774 /* Address of INTEGER_CST is not defined; check that we did not forget
2775 to drop the OEP_ADDRESS_OF flags. */
2776 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2777 return tree_int_cst_equal (arg0, arg1);
2778 }
2779
2780 if (!(flags & OEP_ADDRESS_OF))
2781 {
2782 /* If both types don't have the same signedness, then we can't consider
2783 them equal. We must check this before the STRIP_NOPS calls
2784 because they may change the signedness of the arguments. As pointers
2785 strictly don't have a signedness, require either two pointers or
2786 two non-pointers as well. */
2787 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2788 || POINTER_TYPE_P (TREE_TYPE (arg0))
2789 != POINTER_TYPE_P (TREE_TYPE (arg1)))
2790 return 0;
2791
2792 /* If both types don't have the same precision, then it is not safe
2793 to strip NOPs. */
2794 if (element_precision (TREE_TYPE (arg0))
2795 != element_precision (TREE_TYPE (arg1)))
2796 return 0;
2797
2798 STRIP_NOPS (arg0);
2799 STRIP_NOPS (arg1);
2800 }
2801 #if 0
2802 /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
2803 sanity check once the issue is solved. */
2804 else
2805 /* Addresses of conversions and SSA_NAMEs (and many other things)
2806 are not defined. Check that we did not forget to drop the
2807 OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
2808 gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
2809 && TREE_CODE (arg0) != SSA_NAME);
2810 #endif
2811
2812 /* In case both args are comparisons but with different comparison
2813 code, try to swap the comparison operands of one arg to produce
2814 a match and compare that variant. */
2815 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2816 && COMPARISON_CLASS_P (arg0)
2817 && COMPARISON_CLASS_P (arg1))
2818 {
2819 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2820
2821 if (TREE_CODE (arg0) == swap_code)
2822 return operand_equal_p (TREE_OPERAND (arg0, 0),
2823 TREE_OPERAND (arg1, 1), flags)
2824 && operand_equal_p (TREE_OPERAND (arg0, 1),
2825 TREE_OPERAND (arg1, 0), flags);
2826 }
2827
2828 if (TREE_CODE (arg0) != TREE_CODE (arg1))
2829 {
2830 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2831 if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
2832 ;
2833 else if (flags & OEP_ADDRESS_OF)
2834 {
2835 /* If we are interested in comparing addresses ignore
2836 MEM_REF wrappings of the base that can appear just for
2837 TBAA reasons. */
2838 if (TREE_CODE (arg0) == MEM_REF
2839 && DECL_P (arg1)
2840 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
2841 && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
2842 && integer_zerop (TREE_OPERAND (arg0, 1)))
2843 return 1;
2844 else if (TREE_CODE (arg1) == MEM_REF
2845 && DECL_P (arg0)
2846 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
2847 && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
2848 && integer_zerop (TREE_OPERAND (arg1, 1)))
2849 return 1;
2850 return 0;
2851 }
2852 else
2853 return 0;
2854 }
2855
2856 /* When not checking adddresses, this is needed for conversions and for
2857 COMPONENT_REF. Might as well play it safe and always test this. */
2858 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2859 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2860 || (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))
2861 && !(flags & OEP_ADDRESS_OF)))
2862 return 0;
2863
2864 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2865 We don't care about side effects in that case because the SAVE_EXPR
2866 takes care of that for us. In all other cases, two expressions are
2867 equal if they have no side effects. If we have two identical
2868 expressions with side effects that should be treated the same due
2869 to the only side effects being identical SAVE_EXPR's, that will
2870 be detected in the recursive calls below.
2871 If we are taking an invariant address of two identical objects
2872 they are necessarily equal as well. */
2873 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2874 && (TREE_CODE (arg0) == SAVE_EXPR
2875 || (flags & OEP_MATCH_SIDE_EFFECTS)
2876 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2877 return 1;
2878
2879 /* Next handle constant cases, those for which we can return 1 even
2880 if ONLY_CONST is set. */
2881 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2882 switch (TREE_CODE (arg0))
2883 {
2884 case INTEGER_CST:
2885 return tree_int_cst_equal (arg0, arg1);
2886
2887 case FIXED_CST:
2888 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2889 TREE_FIXED_CST (arg1));
2890
2891 case REAL_CST:
2892 if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
2893 return 1;
2894
2895
2896 if (!HONOR_SIGNED_ZEROS (arg0))
2897 {
2898 /* If we do not distinguish between signed and unsigned zero,
2899 consider them equal. */
2900 if (real_zerop (arg0) && real_zerop (arg1))
2901 return 1;
2902 }
2903 return 0;
2904
2905 case VECTOR_CST:
2906 {
2907 unsigned i;
2908
2909 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2910 return 0;
2911
2912 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2913 {
2914 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2915 VECTOR_CST_ELT (arg1, i), flags))
2916 return 0;
2917 }
2918 return 1;
2919 }
2920
2921 case COMPLEX_CST:
2922 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2923 flags)
2924 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2925 flags));
2926
2927 case STRING_CST:
2928 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2929 && ! memcmp (TREE_STRING_POINTER (arg0),
2930 TREE_STRING_POINTER (arg1),
2931 TREE_STRING_LENGTH (arg0)));
2932
2933 case ADDR_EXPR:
2934 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2935 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2936 flags | OEP_ADDRESS_OF
2937 | OEP_MATCH_SIDE_EFFECTS);
2938 case CONSTRUCTOR:
2939 /* In GIMPLE empty constructors are allowed in initializers of
2940 aggregates. */
2941 return (!vec_safe_length (CONSTRUCTOR_ELTS (arg0))
2942 && !vec_safe_length (CONSTRUCTOR_ELTS (arg1)));
2943 default:
2944 break;
2945 }
2946
2947 if (flags & OEP_ONLY_CONST)
2948 return 0;
2949
2950 /* Define macros to test an operand from arg0 and arg1 for equality and a
2951 variant that allows null and views null as being different from any
2952 non-null value. In the latter case, if either is null, the both
2953 must be; otherwise, do the normal comparison. */
2954 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2955 TREE_OPERAND (arg1, N), flags)
2956
2957 #define OP_SAME_WITH_NULL(N) \
2958 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2959 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2960
2961 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2962 {
2963 case tcc_unary:
2964 /* Two conversions are equal only if signedness and modes match. */
2965 switch (TREE_CODE (arg0))
2966 {
2967 CASE_CONVERT:
2968 case FIX_TRUNC_EXPR:
2969 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2970 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2971 return 0;
2972 break;
2973 default:
2974 break;
2975 }
2976
2977 return OP_SAME (0);
2978
2979
2980 case tcc_comparison:
2981 case tcc_binary:
2982 if (OP_SAME (0) && OP_SAME (1))
2983 return 1;
2984
2985 /* For commutative ops, allow the other order. */
2986 return (commutative_tree_code (TREE_CODE (arg0))
2987 && operand_equal_p (TREE_OPERAND (arg0, 0),
2988 TREE_OPERAND (arg1, 1), flags)
2989 && operand_equal_p (TREE_OPERAND (arg0, 1),
2990 TREE_OPERAND (arg1, 0), flags));
2991
2992 case tcc_reference:
2993 /* If either of the pointer (or reference) expressions we are
2994 dereferencing contain a side effect, these cannot be equal,
2995 but their addresses can be. */
2996 if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0
2997 && (TREE_SIDE_EFFECTS (arg0)
2998 || TREE_SIDE_EFFECTS (arg1)))
2999 return 0;
3000
3001 switch (TREE_CODE (arg0))
3002 {
3003 case INDIRECT_REF:
3004 if (!(flags & OEP_ADDRESS_OF)
3005 && (TYPE_ALIGN (TREE_TYPE (arg0))
3006 != TYPE_ALIGN (TREE_TYPE (arg1))))
3007 return 0;
3008 flags &= ~OEP_ADDRESS_OF;
3009 return OP_SAME (0);
3010
3011 case IMAGPART_EXPR:
3012 /* Require the same offset. */
3013 if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3014 TYPE_SIZE (TREE_TYPE (arg1)),
3015 flags & ~OEP_ADDRESS_OF))
3016 return 0;
3017
3018 /* Fallthru. */
3019 case REALPART_EXPR:
3020 case VIEW_CONVERT_EXPR:
3021 return OP_SAME (0);
3022
3023 case TARGET_MEM_REF:
3024 case MEM_REF:
3025 if (!(flags & OEP_ADDRESS_OF))
3026 {
3027 /* Require equal access sizes */
3028 if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1))
3029 && (!TYPE_SIZE (TREE_TYPE (arg0))
3030 || !TYPE_SIZE (TREE_TYPE (arg1))
3031 || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3032 TYPE_SIZE (TREE_TYPE (arg1)),
3033 flags)))
3034 return 0;
3035 /* Verify that accesses are TBAA compatible. */
3036 if (!alias_ptr_types_compatible_p
3037 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
3038 TREE_TYPE (TREE_OPERAND (arg1, 1)))
3039 || (MR_DEPENDENCE_CLIQUE (arg0)
3040 != MR_DEPENDENCE_CLIQUE (arg1))
3041 || (MR_DEPENDENCE_BASE (arg0)
3042 != MR_DEPENDENCE_BASE (arg1)))
3043 return 0;
3044 /* Verify that alignment is compatible. */
3045 if (TYPE_ALIGN (TREE_TYPE (arg0))
3046 != TYPE_ALIGN (TREE_TYPE (arg1)))
3047 return 0;
3048 }
3049 flags &= ~OEP_ADDRESS_OF;
3050 return (OP_SAME (0) && OP_SAME (1)
3051 /* TARGET_MEM_REF require equal extra operands. */
3052 && (TREE_CODE (arg0) != TARGET_MEM_REF
3053 || (OP_SAME_WITH_NULL (2)
3054 && OP_SAME_WITH_NULL (3)
3055 && OP_SAME_WITH_NULL (4))));
3056
3057 case ARRAY_REF:
3058 case ARRAY_RANGE_REF:
3059 if (!OP_SAME (0))
3060 return 0;
3061 flags &= ~OEP_ADDRESS_OF;
3062 /* Compare the array index by value if it is constant first as we
3063 may have different types but same value here. */
3064 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3065 TREE_OPERAND (arg1, 1))
3066 || OP_SAME (1))
3067 && OP_SAME_WITH_NULL (2)
3068 && OP_SAME_WITH_NULL (3)
3069 /* Compare low bound and element size as with OEP_ADDRESS_OF
3070 we have to account for the offset of the ref. */
3071 && (TREE_TYPE (TREE_OPERAND (arg0, 0))
3072 == TREE_TYPE (TREE_OPERAND (arg1, 0))
3073 || (operand_equal_p (array_ref_low_bound
3074 (CONST_CAST_TREE (arg0)),
3075 array_ref_low_bound
3076 (CONST_CAST_TREE (arg1)), flags)
3077 && operand_equal_p (array_ref_element_size
3078 (CONST_CAST_TREE (arg0)),
3079 array_ref_element_size
3080 (CONST_CAST_TREE (arg1)),
3081 flags))));
3082
3083 case COMPONENT_REF:
3084 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3085 may be NULL when we're called to compare MEM_EXPRs. */
3086 if (!OP_SAME_WITH_NULL (0)
3087 || !OP_SAME (1))
3088 return 0;
3089 flags &= ~OEP_ADDRESS_OF;
3090 return OP_SAME_WITH_NULL (2);
3091
3092 case BIT_FIELD_REF:
3093 if (!OP_SAME (0))
3094 return 0;
3095 flags &= ~OEP_ADDRESS_OF;
3096 return OP_SAME (1) && OP_SAME (2);
3097
3098 default:
3099 return 0;
3100 }
3101
3102 case tcc_expression:
3103 switch (TREE_CODE (arg0))
3104 {
3105 case ADDR_EXPR:
3106 /* Be sure we pass right ADDRESS_OF flag. */
3107 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3108 return operand_equal_p (TREE_OPERAND (arg0, 0),
3109 TREE_OPERAND (arg1, 0),
3110 flags | OEP_ADDRESS_OF);
3111
3112 case TRUTH_NOT_EXPR:
3113 return OP_SAME (0);
3114
3115 case TRUTH_ANDIF_EXPR:
3116 case TRUTH_ORIF_EXPR:
3117 return OP_SAME (0) && OP_SAME (1);
3118
3119 case FMA_EXPR:
3120 case WIDEN_MULT_PLUS_EXPR:
3121 case WIDEN_MULT_MINUS_EXPR:
3122 if (!OP_SAME (2))
3123 return 0;
3124 /* The multiplcation operands are commutative. */
3125 /* FALLTHRU */
3126
3127 case TRUTH_AND_EXPR:
3128 case TRUTH_OR_EXPR:
3129 case TRUTH_XOR_EXPR:
3130 if (OP_SAME (0) && OP_SAME (1))
3131 return 1;
3132
3133 /* Otherwise take into account this is a commutative operation. */
3134 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3135 TREE_OPERAND (arg1, 1), flags)
3136 && operand_equal_p (TREE_OPERAND (arg0, 1),
3137 TREE_OPERAND (arg1, 0), flags));
3138
3139 case COND_EXPR:
3140 if (! OP_SAME (1) || ! OP_SAME (2))
3141 return 0;
3142 flags &= ~OEP_ADDRESS_OF;
3143 return OP_SAME (0);
3144
3145 case VEC_COND_EXPR:
3146 case DOT_PROD_EXPR:
3147 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3148
3149 default:
3150 return 0;
3151 }
3152
3153 case tcc_vl_exp:
3154 switch (TREE_CODE (arg0))
3155 {
3156 case CALL_EXPR:
3157 if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3158 != (CALL_EXPR_FN (arg1) == NULL_TREE))
3159 /* If not both CALL_EXPRs are either internal or normal function
3160 functions, then they are not equal. */
3161 return 0;
3162 else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3163 {
3164 /* If the CALL_EXPRs call different internal functions, then they
3165 are not equal. */
3166 if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3167 return 0;
3168 }
3169 else
3170 {
3171 /* If the CALL_EXPRs call different functions, then they are not
3172 equal. */
3173 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3174 flags))
3175 return 0;
3176 }
3177
3178 /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */
3179 {
3180 unsigned int cef = call_expr_flags (arg0);
3181 if (flags & OEP_PURE_SAME)
3182 cef &= ECF_CONST | ECF_PURE;
3183 else
3184 cef &= ECF_CONST;
3185 if (!cef)
3186 return 0;
3187 }
3188
3189 /* Now see if all the arguments are the same. */
3190 {
3191 const_call_expr_arg_iterator iter0, iter1;
3192 const_tree a0, a1;
3193 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3194 a1 = first_const_call_expr_arg (arg1, &iter1);
3195 a0 && a1;
3196 a0 = next_const_call_expr_arg (&iter0),
3197 a1 = next_const_call_expr_arg (&iter1))
3198 if (! operand_equal_p (a0, a1, flags))
3199 return 0;
3200
3201 /* If we get here and both argument lists are exhausted
3202 then the CALL_EXPRs are equal. */
3203 return ! (a0 || a1);
3204 }
3205 default:
3206 return 0;
3207 }
3208
3209 case tcc_declaration:
3210 /* Consider __builtin_sqrt equal to sqrt. */
3211 return (TREE_CODE (arg0) == FUNCTION_DECL
3212 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3213 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3214 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3215
3216 case tcc_exceptional:
3217 if (TREE_CODE (arg0) == CONSTRUCTOR)
3218 {
3219 /* In GIMPLE constructors are used only to build vectors from
3220 elements. Individual elements in the constructor must be
3221 indexed in increasing order and form an initial sequence.
3222
3223 We make no effort to compare constructors in generic.
3224 (see sem_variable::equals in ipa-icf which can do so for
3225 constants). */
3226 if (!VECTOR_TYPE_P (TREE_TYPE (arg0))
3227 || !VECTOR_TYPE_P (TREE_TYPE (arg1)))
3228 return 0;
3229
3230 /* Be sure that vectors constructed have the same representation.
3231 We only tested element precision and modes to match.
3232 Vectors may be BLKmode and thus also check that the number of
3233 parts match. */
3234 if (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))
3235 != TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)))
3236 return 0;
3237
3238 vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3239 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3240 unsigned int len = vec_safe_length (v0);
3241
3242 if (len != vec_safe_length (v1))
3243 return 0;
3244
3245 for (unsigned int i = 0; i < len; i++)
3246 {
3247 constructor_elt *c0 = &(*v0)[i];
3248 constructor_elt *c1 = &(*v1)[i];
3249
3250 if (!operand_equal_p (c0->value, c1->value, flags)
3251 /* In GIMPLE the indexes can be either NULL or matching i.
3252 Double check this so we won't get false
3253 positives for GENERIC. */
3254 || (c0->index
3255 && (TREE_CODE (c0->index) != INTEGER_CST
3256 || !compare_tree_int (c0->index, i)))
3257 || (c1->index
3258 && (TREE_CODE (c1->index) != INTEGER_CST
3259 || !compare_tree_int (c1->index, i))))
3260 return 0;
3261 }
3262 return 1;
3263 }
3264 return 0;
3265
3266 default:
3267 return 0;
3268 }
3269
3270 #undef OP_SAME
3271 #undef OP_SAME_WITH_NULL
3272 }
3273 \f
3274 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3275 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3276
3277 When in doubt, return 0. */
3278
3279 static int
3280 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3281 {
3282 int unsignedp1, unsignedpo;
3283 tree primarg0, primarg1, primother;
3284 unsigned int correct_width;
3285
3286 if (operand_equal_p (arg0, arg1, 0))
3287 return 1;
3288
3289 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3290 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3291 return 0;
3292
3293 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3294 and see if the inner values are the same. This removes any
3295 signedness comparison, which doesn't matter here. */
3296 primarg0 = arg0, primarg1 = arg1;
3297 STRIP_NOPS (primarg0);
3298 STRIP_NOPS (primarg1);
3299 if (operand_equal_p (primarg0, primarg1, 0))
3300 return 1;
3301
3302 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3303 actual comparison operand, ARG0.
3304
3305 First throw away any conversions to wider types
3306 already present in the operands. */
3307
3308 primarg1 = get_narrower (arg1, &unsignedp1);
3309 primother = get_narrower (other, &unsignedpo);
3310
3311 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3312 if (unsignedp1 == unsignedpo
3313 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3314 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3315 {
3316 tree type = TREE_TYPE (arg0);
3317
3318 /* Make sure shorter operand is extended the right way
3319 to match the longer operand. */
3320 primarg1 = fold_convert (signed_or_unsigned_type_for
3321 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3322
3323 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3324 return 1;
3325 }
3326
3327 return 0;
3328 }
3329 \f
3330 /* See if ARG is an expression that is either a comparison or is performing
3331 arithmetic on comparisons. The comparisons must only be comparing
3332 two different values, which will be stored in *CVAL1 and *CVAL2; if
3333 they are nonzero it means that some operands have already been found.
3334 No variables may be used anywhere else in the expression except in the
3335 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3336 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3337
3338 If this is true, return 1. Otherwise, return zero. */
3339
3340 static int
3341 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3342 {
3343 enum tree_code code = TREE_CODE (arg);
3344 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3345
3346 /* We can handle some of the tcc_expression cases here. */
3347 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3348 tclass = tcc_unary;
3349 else if (tclass == tcc_expression
3350 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3351 || code == COMPOUND_EXPR))
3352 tclass = tcc_binary;
3353
3354 else if (tclass == tcc_expression && code == SAVE_EXPR
3355 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3356 {
3357 /* If we've already found a CVAL1 or CVAL2, this expression is
3358 two complex to handle. */
3359 if (*cval1 || *cval2)
3360 return 0;
3361
3362 tclass = tcc_unary;
3363 *save_p = 1;
3364 }
3365
3366 switch (tclass)
3367 {
3368 case tcc_unary:
3369 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3370
3371 case tcc_binary:
3372 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3373 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3374 cval1, cval2, save_p));
3375
3376 case tcc_constant:
3377 return 1;
3378
3379 case tcc_expression:
3380 if (code == COND_EXPR)
3381 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3382 cval1, cval2, save_p)
3383 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3384 cval1, cval2, save_p)
3385 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3386 cval1, cval2, save_p));
3387 return 0;
3388
3389 case tcc_comparison:
3390 /* First see if we can handle the first operand, then the second. For
3391 the second operand, we know *CVAL1 can't be zero. It must be that
3392 one side of the comparison is each of the values; test for the
3393 case where this isn't true by failing if the two operands
3394 are the same. */
3395
3396 if (operand_equal_p (TREE_OPERAND (arg, 0),
3397 TREE_OPERAND (arg, 1), 0))
3398 return 0;
3399
3400 if (*cval1 == 0)
3401 *cval1 = TREE_OPERAND (arg, 0);
3402 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3403 ;
3404 else if (*cval2 == 0)
3405 *cval2 = TREE_OPERAND (arg, 0);
3406 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3407 ;
3408 else
3409 return 0;
3410
3411 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3412 ;
3413 else if (*cval2 == 0)
3414 *cval2 = TREE_OPERAND (arg, 1);
3415 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3416 ;
3417 else
3418 return 0;
3419
3420 return 1;
3421
3422 default:
3423 return 0;
3424 }
3425 }
3426 \f
3427 /* ARG is a tree that is known to contain just arithmetic operations and
3428 comparisons. Evaluate the operations in the tree substituting NEW0 for
3429 any occurrence of OLD0 as an operand of a comparison and likewise for
3430 NEW1 and OLD1. */
3431
3432 static tree
3433 eval_subst (location_t loc, tree arg, tree old0, tree new0,
3434 tree old1, tree new1)
3435 {
3436 tree type = TREE_TYPE (arg);
3437 enum tree_code code = TREE_CODE (arg);
3438 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3439
3440 /* We can handle some of the tcc_expression cases here. */
3441 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3442 tclass = tcc_unary;
3443 else if (tclass == tcc_expression
3444 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3445 tclass = tcc_binary;
3446
3447 switch (tclass)
3448 {
3449 case tcc_unary:
3450 return fold_build1_loc (loc, code, type,
3451 eval_subst (loc, TREE_OPERAND (arg, 0),
3452 old0, new0, old1, new1));
3453
3454 case tcc_binary:
3455 return fold_build2_loc (loc, code, type,
3456 eval_subst (loc, TREE_OPERAND (arg, 0),
3457 old0, new0, old1, new1),
3458 eval_subst (loc, TREE_OPERAND (arg, 1),
3459 old0, new0, old1, new1));
3460
3461 case tcc_expression:
3462 switch (code)
3463 {
3464 case SAVE_EXPR:
3465 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3466 old1, new1);
3467
3468 case COMPOUND_EXPR:
3469 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3470 old1, new1);
3471
3472 case COND_EXPR:
3473 return fold_build3_loc (loc, code, type,
3474 eval_subst (loc, TREE_OPERAND (arg, 0),
3475 old0, new0, old1, new1),
3476 eval_subst (loc, TREE_OPERAND (arg, 1),
3477 old0, new0, old1, new1),
3478 eval_subst (loc, TREE_OPERAND (arg, 2),
3479 old0, new0, old1, new1));
3480 default:
3481 break;
3482 }
3483 /* Fall through - ??? */
3484
3485 case tcc_comparison:
3486 {
3487 tree arg0 = TREE_OPERAND (arg, 0);
3488 tree arg1 = TREE_OPERAND (arg, 1);
3489
3490 /* We need to check both for exact equality and tree equality. The
3491 former will be true if the operand has a side-effect. In that
3492 case, we know the operand occurred exactly once. */
3493
3494 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3495 arg0 = new0;
3496 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3497 arg0 = new1;
3498
3499 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3500 arg1 = new0;
3501 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3502 arg1 = new1;
3503
3504 return fold_build2_loc (loc, code, type, arg0, arg1);
3505 }
3506
3507 default:
3508 return arg;
3509 }
3510 }
3511 \f
3512 /* Return a tree for the case when the result of an expression is RESULT
3513 converted to TYPE and OMITTED was previously an operand of the expression
3514 but is now not needed (e.g., we folded OMITTED * 0).
3515
3516 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3517 the conversion of RESULT to TYPE. */
3518
3519 tree
3520 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3521 {
3522 tree t = fold_convert_loc (loc, type, result);
3523
3524 /* If the resulting operand is an empty statement, just return the omitted
3525 statement casted to void. */
3526 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3527 return build1_loc (loc, NOP_EXPR, void_type_node,
3528 fold_ignored_result (omitted));
3529
3530 if (TREE_SIDE_EFFECTS (omitted))
3531 return build2_loc (loc, COMPOUND_EXPR, type,
3532 fold_ignored_result (omitted), t);
3533
3534 return non_lvalue_loc (loc, t);
3535 }
3536
3537 /* Return a tree for the case when the result of an expression is RESULT
3538 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3539 of the expression but are now not needed.
3540
3541 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3542 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3543 evaluated before OMITTED2. Otherwise, if neither has side effects,
3544 just do the conversion of RESULT to TYPE. */
3545
3546 tree
3547 omit_two_operands_loc (location_t loc, tree type, tree result,
3548 tree omitted1, tree omitted2)
3549 {
3550 tree t = fold_convert_loc (loc, type, result);
3551
3552 if (TREE_SIDE_EFFECTS (omitted2))
3553 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3554 if (TREE_SIDE_EFFECTS (omitted1))
3555 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3556
3557 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3558 }
3559
3560 \f
3561 /* Return a simplified tree node for the truth-negation of ARG. This
3562 never alters ARG itself. We assume that ARG is an operation that
3563 returns a truth value (0 or 1).
3564
3565 FIXME: one would think we would fold the result, but it causes
3566 problems with the dominator optimizer. */
3567
3568 static tree
3569 fold_truth_not_expr (location_t loc, tree arg)
3570 {
3571 tree type = TREE_TYPE (arg);
3572 enum tree_code code = TREE_CODE (arg);
3573 location_t loc1, loc2;
3574
3575 /* If this is a comparison, we can simply invert it, except for
3576 floating-point non-equality comparisons, in which case we just
3577 enclose a TRUTH_NOT_EXPR around what we have. */
3578
3579 if (TREE_CODE_CLASS (code) == tcc_comparison)
3580 {
3581 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3582 if (FLOAT_TYPE_P (op_type)
3583 && flag_trapping_math
3584 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3585 && code != NE_EXPR && code != EQ_EXPR)
3586 return NULL_TREE;
3587
3588 code = invert_tree_comparison (code, HONOR_NANS (op_type));
3589 if (code == ERROR_MARK)
3590 return NULL_TREE;
3591
3592 tree ret = build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3593 TREE_OPERAND (arg, 1));
3594 if (TREE_NO_WARNING (arg))
3595 TREE_NO_WARNING (ret) = 1;
3596 return ret;
3597 }
3598
3599 switch (code)
3600 {
3601 case INTEGER_CST:
3602 return constant_boolean_node (integer_zerop (arg), type);
3603
3604 case TRUTH_AND_EXPR:
3605 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3606 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3607 return build2_loc (loc, TRUTH_OR_EXPR, type,
3608 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3609 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3610
3611 case TRUTH_OR_EXPR:
3612 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3613 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3614 return build2_loc (loc, TRUTH_AND_EXPR, type,
3615 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3616 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3617
3618 case TRUTH_XOR_EXPR:
3619 /* Here we can invert either operand. We invert the first operand
3620 unless the second operand is a TRUTH_NOT_EXPR in which case our
3621 result is the XOR of the first operand with the inside of the
3622 negation of the second operand. */
3623
3624 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3625 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3626 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3627 else
3628 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3629 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3630 TREE_OPERAND (arg, 1));
3631
3632 case TRUTH_ANDIF_EXPR:
3633 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3634 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3635 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3636 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3637 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3638
3639 case TRUTH_ORIF_EXPR:
3640 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3641 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3642 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3643 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3644 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3645
3646 case TRUTH_NOT_EXPR:
3647 return TREE_OPERAND (arg, 0);
3648
3649 case COND_EXPR:
3650 {
3651 tree arg1 = TREE_OPERAND (arg, 1);
3652 tree arg2 = TREE_OPERAND (arg, 2);
3653
3654 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3655 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3656
3657 /* A COND_EXPR may have a throw as one operand, which
3658 then has void type. Just leave void operands
3659 as they are. */
3660 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3661 VOID_TYPE_P (TREE_TYPE (arg1))
3662 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3663 VOID_TYPE_P (TREE_TYPE (arg2))
3664 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3665 }
3666
3667 case COMPOUND_EXPR:
3668 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3669 return build2_loc (loc, COMPOUND_EXPR, type,
3670 TREE_OPERAND (arg, 0),
3671 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3672
3673 case NON_LVALUE_EXPR:
3674 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3675 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3676
3677 CASE_CONVERT:
3678 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3679 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3680
3681 /* ... fall through ... */
3682
3683 case FLOAT_EXPR:
3684 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3685 return build1_loc (loc, TREE_CODE (arg), type,
3686 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3687
3688 case BIT_AND_EXPR:
3689 if (!integer_onep (TREE_OPERAND (arg, 1)))
3690 return NULL_TREE;
3691 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3692
3693 case SAVE_EXPR:
3694 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3695
3696 case CLEANUP_POINT_EXPR:
3697 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3698 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3699 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3700
3701 default:
3702 return NULL_TREE;
3703 }
3704 }
3705
3706 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3707 assume that ARG is an operation that returns a truth value (0 or 1
3708 for scalars, 0 or -1 for vectors). Return the folded expression if
3709 folding is successful. Otherwise, return NULL_TREE. */
3710
3711 static tree
3712 fold_invert_truthvalue (location_t loc, tree arg)
3713 {
3714 tree type = TREE_TYPE (arg);
3715 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3716 ? BIT_NOT_EXPR
3717 : TRUTH_NOT_EXPR,
3718 type, arg);
3719 }
3720
3721 /* Return a simplified tree node for the truth-negation of ARG. This
3722 never alters ARG itself. We assume that ARG is an operation that
3723 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3724
3725 tree
3726 invert_truthvalue_loc (location_t loc, tree arg)
3727 {
3728 if (TREE_CODE (arg) == ERROR_MARK)
3729 return arg;
3730
3731 tree type = TREE_TYPE (arg);
3732 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3733 ? BIT_NOT_EXPR
3734 : TRUTH_NOT_EXPR,
3735 type, arg);
3736 }
3737
3738 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3739 with code CODE. This optimization is unsafe. */
3740 static tree
3741 distribute_real_division (location_t loc, enum tree_code code, tree type,
3742 tree arg0, tree arg1)
3743 {
3744 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3745 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3746
3747 /* (A / C) +- (B / C) -> (A +- B) / C. */
3748 if (mul0 == mul1
3749 && operand_equal_p (TREE_OPERAND (arg0, 1),
3750 TREE_OPERAND (arg1, 1), 0))
3751 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3752 fold_build2_loc (loc, code, type,
3753 TREE_OPERAND (arg0, 0),
3754 TREE_OPERAND (arg1, 0)),
3755 TREE_OPERAND (arg0, 1));
3756
3757 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3758 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3759 TREE_OPERAND (arg1, 0), 0)
3760 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3761 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3762 {
3763 REAL_VALUE_TYPE r0, r1;
3764 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3765 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3766 if (!mul0)
3767 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3768 if (!mul1)
3769 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3770 real_arithmetic (&r0, code, &r0, &r1);
3771 return fold_build2_loc (loc, MULT_EXPR, type,
3772 TREE_OPERAND (arg0, 0),
3773 build_real (type, r0));
3774 }
3775
3776 return NULL_TREE;
3777 }
3778 \f
3779 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3780 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero
3781 and uses reverse storage order if REVERSEP is nonzero. */
3782
3783 static tree
3784 make_bit_field_ref (location_t loc, tree inner, tree type,
3785 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
3786 int unsignedp, int reversep)
3787 {
3788 tree result, bftype;
3789
3790 if (bitpos == 0 && !reversep)
3791 {
3792 tree size = TYPE_SIZE (TREE_TYPE (inner));
3793 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3794 || POINTER_TYPE_P (TREE_TYPE (inner)))
3795 && tree_fits_shwi_p (size)
3796 && tree_to_shwi (size) == bitsize)
3797 return fold_convert_loc (loc, type, inner);
3798 }
3799
3800 bftype = type;
3801 if (TYPE_PRECISION (bftype) != bitsize
3802 || TYPE_UNSIGNED (bftype) == !unsignedp)
3803 bftype = build_nonstandard_integer_type (bitsize, 0);
3804
3805 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3806 size_int (bitsize), bitsize_int (bitpos));
3807 REF_REVERSE_STORAGE_ORDER (result) = reversep;
3808
3809 if (bftype != type)
3810 result = fold_convert_loc (loc, type, result);
3811
3812 return result;
3813 }
3814
3815 /* Optimize a bit-field compare.
3816
3817 There are two cases: First is a compare against a constant and the
3818 second is a comparison of two items where the fields are at the same
3819 bit position relative to the start of a chunk (byte, halfword, word)
3820 large enough to contain it. In these cases we can avoid the shift
3821 implicit in bitfield extractions.
3822
3823 For constants, we emit a compare of the shifted constant with the
3824 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3825 compared. For two fields at the same position, we do the ANDs with the
3826 similar mask and compare the result of the ANDs.
3827
3828 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3829 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3830 are the left and right operands of the comparison, respectively.
3831
3832 If the optimization described above can be done, we return the resulting
3833 tree. Otherwise we return zero. */
3834
3835 static tree
3836 optimize_bit_field_compare (location_t loc, enum tree_code code,
3837 tree compare_type, tree lhs, tree rhs)
3838 {
3839 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3840 tree type = TREE_TYPE (lhs);
3841 tree unsigned_type;
3842 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3843 machine_mode lmode, rmode, nmode;
3844 int lunsignedp, runsignedp;
3845 int lreversep, rreversep;
3846 int lvolatilep = 0, rvolatilep = 0;
3847 tree linner, rinner = NULL_TREE;
3848 tree mask;
3849 tree offset;
3850
3851 /* Get all the information about the extractions being done. If the bit size
3852 if the same as the size of the underlying object, we aren't doing an
3853 extraction at all and so can do nothing. We also don't want to
3854 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3855 then will no longer be able to replace it. */
3856 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3857 &lunsignedp, &lreversep, &lvolatilep, false);
3858 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3859 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3860 return 0;
3861
3862 if (const_p)
3863 rreversep = lreversep;
3864 else
3865 {
3866 /* If this is not a constant, we can only do something if bit positions,
3867 sizes, signedness and storage order are the same. */
3868 rinner
3869 = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3870 &runsignedp, &rreversep, &rvolatilep, false);
3871
3872 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3873 || lunsignedp != runsignedp || lreversep != rreversep || offset != 0
3874 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3875 return 0;
3876 }
3877
3878 /* See if we can find a mode to refer to this field. We should be able to,
3879 but fail if we can't. */
3880 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3881 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3882 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3883 TYPE_ALIGN (TREE_TYPE (rinner))),
3884 word_mode, false);
3885 if (nmode == VOIDmode)
3886 return 0;
3887
3888 /* Set signed and unsigned types of the precision of this mode for the
3889 shifts below. */
3890 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3891
3892 /* Compute the bit position and size for the new reference and our offset
3893 within it. If the new reference is the same size as the original, we
3894 won't optimize anything, so return zero. */
3895 nbitsize = GET_MODE_BITSIZE (nmode);
3896 nbitpos = lbitpos & ~ (nbitsize - 1);
3897 lbitpos -= nbitpos;
3898 if (nbitsize == lbitsize)
3899 return 0;
3900
3901 if (lreversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
3902 lbitpos = nbitsize - lbitsize - lbitpos;
3903
3904 /* Make the mask to be used against the extracted field. */
3905 mask = build_int_cst_type (unsigned_type, -1);
3906 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3907 mask = const_binop (RSHIFT_EXPR, mask,
3908 size_int (nbitsize - lbitsize - lbitpos));
3909
3910 if (! const_p)
3911 /* If not comparing with constant, just rework the comparison
3912 and return. */
3913 return fold_build2_loc (loc, code, compare_type,
3914 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3915 make_bit_field_ref (loc, linner,
3916 unsigned_type,
3917 nbitsize, nbitpos,
3918 1, lreversep),
3919 mask),
3920 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3921 make_bit_field_ref (loc, rinner,
3922 unsigned_type,
3923 nbitsize, nbitpos,
3924 1, rreversep),
3925 mask));
3926
3927 /* Otherwise, we are handling the constant case. See if the constant is too
3928 big for the field. Warn and return a tree for 0 (false) if so. We do
3929 this not only for its own sake, but to avoid having to test for this
3930 error case below. If we didn't, we might generate wrong code.
3931
3932 For unsigned fields, the constant shifted right by the field length should
3933 be all zero. For signed fields, the high-order bits should agree with
3934 the sign bit. */
3935
3936 if (lunsignedp)
3937 {
3938 if (wi::lrshift (rhs, lbitsize) != 0)
3939 {
3940 warning (0, "comparison is always %d due to width of bit-field",
3941 code == NE_EXPR);
3942 return constant_boolean_node (code == NE_EXPR, compare_type);
3943 }
3944 }
3945 else
3946 {
3947 wide_int tem = wi::arshift (rhs, lbitsize - 1);
3948 if (tem != 0 && tem != -1)
3949 {
3950 warning (0, "comparison is always %d due to width of bit-field",
3951 code == NE_EXPR);
3952 return constant_boolean_node (code == NE_EXPR, compare_type);
3953 }
3954 }
3955
3956 /* Single-bit compares should always be against zero. */
3957 if (lbitsize == 1 && ! integer_zerop (rhs))
3958 {
3959 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3960 rhs = build_int_cst (type, 0);
3961 }
3962
3963 /* Make a new bitfield reference, shift the constant over the
3964 appropriate number of bits and mask it with the computed mask
3965 (in case this was a signed field). If we changed it, make a new one. */
3966 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1,
3967 lreversep);
3968
3969 rhs = const_binop (BIT_AND_EXPR,
3970 const_binop (LSHIFT_EXPR,
3971 fold_convert_loc (loc, unsigned_type, rhs),
3972 size_int (lbitpos)),
3973 mask);
3974
3975 lhs = build2_loc (loc, code, compare_type,
3976 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3977 return lhs;
3978 }
3979 \f
3980 /* Subroutine for fold_truth_andor_1: decode a field reference.
3981
3982 If EXP is a comparison reference, we return the innermost reference.
3983
3984 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3985 set to the starting bit number.
3986
3987 If the innermost field can be completely contained in a mode-sized
3988 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3989
3990 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3991 otherwise it is not changed.
3992
3993 *PUNSIGNEDP is set to the signedness of the field.
3994
3995 *PREVERSEP is set to the storage order of the field.
3996
3997 *PMASK is set to the mask used. This is either contained in a
3998 BIT_AND_EXPR or derived from the width of the field.
3999
4000 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4001
4002 Return 0 if this is not a component reference or is one that we can't
4003 do anything with. */
4004
4005 static tree
4006 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
4007 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
4008 int *punsignedp, int *preversep, int *pvolatilep,
4009 tree *pmask, tree *pand_mask)
4010 {
4011 tree outer_type = 0;
4012 tree and_mask = 0;
4013 tree mask, inner, offset;
4014 tree unsigned_type;
4015 unsigned int precision;
4016
4017 /* All the optimizations using this function assume integer fields.
4018 There are problems with FP fields since the type_for_size call
4019 below can fail for, e.g., XFmode. */
4020 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4021 return 0;
4022
4023 /* We are interested in the bare arrangement of bits, so strip everything
4024 that doesn't affect the machine mode. However, record the type of the
4025 outermost expression if it may matter below. */
4026 if (CONVERT_EXPR_P (exp)
4027 || TREE_CODE (exp) == NON_LVALUE_EXPR)
4028 outer_type = TREE_TYPE (exp);
4029 STRIP_NOPS (exp);
4030
4031 if (TREE_CODE (exp) == BIT_AND_EXPR)
4032 {
4033 and_mask = TREE_OPERAND (exp, 1);
4034 exp = TREE_OPERAND (exp, 0);
4035 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4036 if (TREE_CODE (and_mask) != INTEGER_CST)
4037 return 0;
4038 }
4039
4040 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
4041 punsignedp, preversep, pvolatilep, false);
4042 if ((inner == exp && and_mask == 0)
4043 || *pbitsize < 0 || offset != 0
4044 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
4045 return 0;
4046
4047 /* If the number of bits in the reference is the same as the bitsize of
4048 the outer type, then the outer type gives the signedness. Otherwise
4049 (in case of a small bitfield) the signedness is unchanged. */
4050 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4051 *punsignedp = TYPE_UNSIGNED (outer_type);
4052
4053 /* Compute the mask to access the bitfield. */
4054 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4055 precision = TYPE_PRECISION (unsigned_type);
4056
4057 mask = build_int_cst_type (unsigned_type, -1);
4058
4059 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4060 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4061
4062 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4063 if (and_mask != 0)
4064 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4065 fold_convert_loc (loc, unsigned_type, and_mask), mask);
4066
4067 *pmask = mask;
4068 *pand_mask = and_mask;
4069 return inner;
4070 }
4071
4072 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4073 bit positions and MASK is SIGNED. */
4074
4075 static int
4076 all_ones_mask_p (const_tree mask, unsigned int size)
4077 {
4078 tree type = TREE_TYPE (mask);
4079 unsigned int precision = TYPE_PRECISION (type);
4080
4081 /* If this function returns true when the type of the mask is
4082 UNSIGNED, then there will be errors. In particular see
4083 gcc.c-torture/execute/990326-1.c. There does not appear to be
4084 any documentation paper trail as to why this is so. But the pre
4085 wide-int worked with that restriction and it has been preserved
4086 here. */
4087 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
4088 return false;
4089
4090 return wi::mask (size, false, precision) == mask;
4091 }
4092
4093 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4094 represents the sign bit of EXP's type. If EXP represents a sign
4095 or zero extension, also test VAL against the unextended type.
4096 The return value is the (sub)expression whose sign bit is VAL,
4097 or NULL_TREE otherwise. */
4098
4099 tree
4100 sign_bit_p (tree exp, const_tree val)
4101 {
4102 int width;
4103 tree t;
4104
4105 /* Tree EXP must have an integral type. */
4106 t = TREE_TYPE (exp);
4107 if (! INTEGRAL_TYPE_P (t))
4108 return NULL_TREE;
4109
4110 /* Tree VAL must be an integer constant. */
4111 if (TREE_CODE (val) != INTEGER_CST
4112 || TREE_OVERFLOW (val))
4113 return NULL_TREE;
4114
4115 width = TYPE_PRECISION (t);
4116 if (wi::only_sign_bit_p (val, width))
4117 return exp;
4118
4119 /* Handle extension from a narrower type. */
4120 if (TREE_CODE (exp) == NOP_EXPR
4121 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4122 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4123
4124 return NULL_TREE;
4125 }
4126
4127 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
4128 to be evaluated unconditionally. */
4129
4130 static int
4131 simple_operand_p (const_tree exp)
4132 {
4133 /* Strip any conversions that don't change the machine mode. */
4134 STRIP_NOPS (exp);
4135
4136 return (CONSTANT_CLASS_P (exp)
4137 || TREE_CODE (exp) == SSA_NAME
4138 || (DECL_P (exp)
4139 && ! TREE_ADDRESSABLE (exp)
4140 && ! TREE_THIS_VOLATILE (exp)
4141 && ! DECL_NONLOCAL (exp)
4142 /* Don't regard global variables as simple. They may be
4143 allocated in ways unknown to the compiler (shared memory,
4144 #pragma weak, etc). */
4145 && ! TREE_PUBLIC (exp)
4146 && ! DECL_EXTERNAL (exp)
4147 /* Weakrefs are not safe to be read, since they can be NULL.
4148 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4149 have DECL_WEAK flag set. */
4150 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4151 /* Loading a static variable is unduly expensive, but global
4152 registers aren't expensive. */
4153 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4154 }
4155
4156 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4157 to be evaluated unconditionally.
4158 I addition to simple_operand_p, we assume that comparisons, conversions,
4159 and logic-not operations are simple, if their operands are simple, too. */
4160
4161 static bool
4162 simple_operand_p_2 (tree exp)
4163 {
4164 enum tree_code code;
4165
4166 if (TREE_SIDE_EFFECTS (exp)
4167 || tree_could_trap_p (exp))
4168 return false;
4169
4170 while (CONVERT_EXPR_P (exp))
4171 exp = TREE_OPERAND (exp, 0);
4172
4173 code = TREE_CODE (exp);
4174
4175 if (TREE_CODE_CLASS (code) == tcc_comparison)
4176 return (simple_operand_p (TREE_OPERAND (exp, 0))
4177 && simple_operand_p (TREE_OPERAND (exp, 1)));
4178
4179 if (code == TRUTH_NOT_EXPR)
4180 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4181
4182 return simple_operand_p (exp);
4183 }
4184
4185 \f
4186 /* The following functions are subroutines to fold_range_test and allow it to
4187 try to change a logical combination of comparisons into a range test.
4188
4189 For example, both
4190 X == 2 || X == 3 || X == 4 || X == 5
4191 and
4192 X >= 2 && X <= 5
4193 are converted to
4194 (unsigned) (X - 2) <= 3
4195
4196 We describe each set of comparisons as being either inside or outside
4197 a range, using a variable named like IN_P, and then describe the
4198 range with a lower and upper bound. If one of the bounds is omitted,
4199 it represents either the highest or lowest value of the type.
4200
4201 In the comments below, we represent a range by two numbers in brackets
4202 preceded by a "+" to designate being inside that range, or a "-" to
4203 designate being outside that range, so the condition can be inverted by
4204 flipping the prefix. An omitted bound is represented by a "-". For
4205 example, "- [-, 10]" means being outside the range starting at the lowest
4206 possible value and ending at 10, in other words, being greater than 10.
4207 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4208 always false.
4209
4210 We set up things so that the missing bounds are handled in a consistent
4211 manner so neither a missing bound nor "true" and "false" need to be
4212 handled using a special case. */
4213
4214 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4215 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4216 and UPPER1_P are nonzero if the respective argument is an upper bound
4217 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4218 must be specified for a comparison. ARG1 will be converted to ARG0's
4219 type if both are specified. */
4220
4221 static tree
4222 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4223 tree arg1, int upper1_p)
4224 {
4225 tree tem;
4226 int result;
4227 int sgn0, sgn1;
4228
4229 /* If neither arg represents infinity, do the normal operation.
4230 Else, if not a comparison, return infinity. Else handle the special
4231 comparison rules. Note that most of the cases below won't occur, but
4232 are handled for consistency. */
4233
4234 if (arg0 != 0 && arg1 != 0)
4235 {
4236 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4237 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4238 STRIP_NOPS (tem);
4239 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4240 }
4241
4242 if (TREE_CODE_CLASS (code) != tcc_comparison)
4243 return 0;
4244
4245 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4246 for neither. In real maths, we cannot assume open ended ranges are
4247 the same. But, this is computer arithmetic, where numbers are finite.
4248 We can therefore make the transformation of any unbounded range with
4249 the value Z, Z being greater than any representable number. This permits
4250 us to treat unbounded ranges as equal. */
4251 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4252 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4253 switch (code)
4254 {
4255 case EQ_EXPR:
4256 result = sgn0 == sgn1;
4257 break;
4258 case NE_EXPR:
4259 result = sgn0 != sgn1;
4260 break;
4261 case LT_EXPR:
4262 result = sgn0 < sgn1;
4263 break;
4264 case LE_EXPR:
4265 result = sgn0 <= sgn1;
4266 break;
4267 case GT_EXPR:
4268 result = sgn0 > sgn1;
4269 break;
4270 case GE_EXPR:
4271 result = sgn0 >= sgn1;
4272 break;
4273 default:
4274 gcc_unreachable ();
4275 }
4276
4277 return constant_boolean_node (result, type);
4278 }
4279 \f
4280 /* Helper routine for make_range. Perform one step for it, return
4281 new expression if the loop should continue or NULL_TREE if it should
4282 stop. */
4283
4284 tree
4285 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4286 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4287 bool *strict_overflow_p)
4288 {
4289 tree arg0_type = TREE_TYPE (arg0);
4290 tree n_low, n_high, low = *p_low, high = *p_high;
4291 int in_p = *p_in_p, n_in_p;
4292
4293 switch (code)
4294 {
4295 case TRUTH_NOT_EXPR:
4296 /* We can only do something if the range is testing for zero. */
4297 if (low == NULL_TREE || high == NULL_TREE
4298 || ! integer_zerop (low) || ! integer_zerop (high))
4299 return NULL_TREE;
4300 *p_in_p = ! in_p;
4301 return arg0;
4302
4303 case EQ_EXPR: case NE_EXPR:
4304 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4305 /* We can only do something if the range is testing for zero
4306 and if the second operand is an integer constant. Note that
4307 saying something is "in" the range we make is done by
4308 complementing IN_P since it will set in the initial case of
4309 being not equal to zero; "out" is leaving it alone. */
4310 if (low == NULL_TREE || high == NULL_TREE
4311 || ! integer_zerop (low) || ! integer_zerop (high)
4312 || TREE_CODE (arg1) != INTEGER_CST)
4313 return NULL_TREE;
4314
4315 switch (code)
4316 {
4317 case NE_EXPR: /* - [c, c] */
4318 low = high = arg1;
4319 break;
4320 case EQ_EXPR: /* + [c, c] */
4321 in_p = ! in_p, low = high = arg1;
4322 break;
4323 case GT_EXPR: /* - [-, c] */
4324 low = 0, high = arg1;
4325 break;
4326 case GE_EXPR: /* + [c, -] */
4327 in_p = ! in_p, low = arg1, high = 0;
4328 break;
4329 case LT_EXPR: /* - [c, -] */
4330 low = arg1, high = 0;
4331 break;
4332 case LE_EXPR: /* + [-, c] */
4333 in_p = ! in_p, low = 0, high = arg1;
4334 break;
4335 default:
4336 gcc_unreachable ();
4337 }
4338
4339 /* If this is an unsigned comparison, we also know that EXP is
4340 greater than or equal to zero. We base the range tests we make
4341 on that fact, so we record it here so we can parse existing
4342 range tests. We test arg0_type since often the return type
4343 of, e.g. EQ_EXPR, is boolean. */
4344 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4345 {
4346 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4347 in_p, low, high, 1,
4348 build_int_cst (arg0_type, 0),
4349 NULL_TREE))
4350 return NULL_TREE;
4351
4352 in_p = n_in_p, low = n_low, high = n_high;
4353
4354 /* If the high bound is missing, but we have a nonzero low
4355 bound, reverse the range so it goes from zero to the low bound
4356 minus 1. */
4357 if (high == 0 && low && ! integer_zerop (low))
4358 {
4359 in_p = ! in_p;
4360 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4361 build_int_cst (TREE_TYPE (low), 1), 0);
4362 low = build_int_cst (arg0_type, 0);
4363 }
4364 }
4365
4366 *p_low = low;
4367 *p_high = high;
4368 *p_in_p = in_p;
4369 return arg0;
4370
4371 case NEGATE_EXPR:
4372 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4373 low and high are non-NULL, then normalize will DTRT. */
4374 if (!TYPE_UNSIGNED (arg0_type)
4375 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4376 {
4377 if (low == NULL_TREE)
4378 low = TYPE_MIN_VALUE (arg0_type);
4379 if (high == NULL_TREE)
4380 high = TYPE_MAX_VALUE (arg0_type);
4381 }
4382
4383 /* (-x) IN [a,b] -> x in [-b, -a] */
4384 n_low = range_binop (MINUS_EXPR, exp_type,
4385 build_int_cst (exp_type, 0),
4386 0, high, 1);
4387 n_high = range_binop (MINUS_EXPR, exp_type,
4388 build_int_cst (exp_type, 0),
4389 0, low, 0);
4390 if (n_high != 0 && TREE_OVERFLOW (n_high))
4391 return NULL_TREE;
4392 goto normalize;
4393
4394 case BIT_NOT_EXPR:
4395 /* ~ X -> -X - 1 */
4396 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4397 build_int_cst (exp_type, 1));
4398
4399 case PLUS_EXPR:
4400 case MINUS_EXPR:
4401 if (TREE_CODE (arg1) != INTEGER_CST)
4402 return NULL_TREE;
4403
4404 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4405 move a constant to the other side. */
4406 if (!TYPE_UNSIGNED (arg0_type)
4407 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4408 return NULL_TREE;
4409
4410 /* If EXP is signed, any overflow in the computation is undefined,
4411 so we don't worry about it so long as our computations on
4412 the bounds don't overflow. For unsigned, overflow is defined
4413 and this is exactly the right thing. */
4414 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4415 arg0_type, low, 0, arg1, 0);
4416 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4417 arg0_type, high, 1, arg1, 0);
4418 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4419 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4420 return NULL_TREE;
4421
4422 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4423 *strict_overflow_p = true;
4424
4425 normalize:
4426 /* Check for an unsigned range which has wrapped around the maximum
4427 value thus making n_high < n_low, and normalize it. */
4428 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4429 {
4430 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4431 build_int_cst (TREE_TYPE (n_high), 1), 0);
4432 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4433 build_int_cst (TREE_TYPE (n_low), 1), 0);
4434
4435 /* If the range is of the form +/- [ x+1, x ], we won't
4436 be able to normalize it. But then, it represents the
4437 whole range or the empty set, so make it
4438 +/- [ -, - ]. */
4439 if (tree_int_cst_equal (n_low, low)
4440 && tree_int_cst_equal (n_high, high))
4441 low = high = 0;
4442 else
4443 in_p = ! in_p;
4444 }
4445 else
4446 low = n_low, high = n_high;
4447
4448 *p_low = low;
4449 *p_high = high;
4450 *p_in_p = in_p;
4451 return arg0;
4452
4453 CASE_CONVERT:
4454 case NON_LVALUE_EXPR:
4455 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4456 return NULL_TREE;
4457
4458 if (! INTEGRAL_TYPE_P (arg0_type)
4459 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4460 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4461 return NULL_TREE;
4462
4463 n_low = low, n_high = high;
4464
4465 if (n_low != 0)
4466 n_low = fold_convert_loc (loc, arg0_type, n_low);
4467
4468 if (n_high != 0)
4469 n_high = fold_convert_loc (loc, arg0_type, n_high);
4470
4471 /* If we're converting arg0 from an unsigned type, to exp,
4472 a signed type, we will be doing the comparison as unsigned.
4473 The tests above have already verified that LOW and HIGH
4474 are both positive.
4475
4476 So we have to ensure that we will handle large unsigned
4477 values the same way that the current signed bounds treat
4478 negative values. */
4479
4480 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4481 {
4482 tree high_positive;
4483 tree equiv_type;
4484 /* For fixed-point modes, we need to pass the saturating flag
4485 as the 2nd parameter. */
4486 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4487 equiv_type
4488 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4489 TYPE_SATURATING (arg0_type));
4490 else
4491 equiv_type
4492 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4493
4494 /* A range without an upper bound is, naturally, unbounded.
4495 Since convert would have cropped a very large value, use
4496 the max value for the destination type. */
4497 high_positive
4498 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4499 : TYPE_MAX_VALUE (arg0_type);
4500
4501 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4502 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4503 fold_convert_loc (loc, arg0_type,
4504 high_positive),
4505 build_int_cst (arg0_type, 1));
4506
4507 /* If the low bound is specified, "and" the range with the
4508 range for which the original unsigned value will be
4509 positive. */
4510 if (low != 0)
4511 {
4512 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4513 1, fold_convert_loc (loc, arg0_type,
4514 integer_zero_node),
4515 high_positive))
4516 return NULL_TREE;
4517
4518 in_p = (n_in_p == in_p);
4519 }
4520 else
4521 {
4522 /* Otherwise, "or" the range with the range of the input
4523 that will be interpreted as negative. */
4524 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4525 1, fold_convert_loc (loc, arg0_type,
4526 integer_zero_node),
4527 high_positive))
4528 return NULL_TREE;
4529
4530 in_p = (in_p != n_in_p);
4531 }
4532 }
4533
4534 *p_low = n_low;
4535 *p_high = n_high;
4536 *p_in_p = in_p;
4537 return arg0;
4538
4539 default:
4540 return NULL_TREE;
4541 }
4542 }
4543
4544 /* Given EXP, a logical expression, set the range it is testing into
4545 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4546 actually being tested. *PLOW and *PHIGH will be made of the same
4547 type as the returned expression. If EXP is not a comparison, we
4548 will most likely not be returning a useful value and range. Set
4549 *STRICT_OVERFLOW_P to true if the return value is only valid
4550 because signed overflow is undefined; otherwise, do not change
4551 *STRICT_OVERFLOW_P. */
4552
4553 tree
4554 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4555 bool *strict_overflow_p)
4556 {
4557 enum tree_code code;
4558 tree arg0, arg1 = NULL_TREE;
4559 tree exp_type, nexp;
4560 int in_p;
4561 tree low, high;
4562 location_t loc = EXPR_LOCATION (exp);
4563
4564 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4565 and see if we can refine the range. Some of the cases below may not
4566 happen, but it doesn't seem worth worrying about this. We "continue"
4567 the outer loop when we've changed something; otherwise we "break"
4568 the switch, which will "break" the while. */
4569
4570 in_p = 0;
4571 low = high = build_int_cst (TREE_TYPE (exp), 0);
4572
4573 while (1)
4574 {
4575 code = TREE_CODE (exp);
4576 exp_type = TREE_TYPE (exp);
4577 arg0 = NULL_TREE;
4578
4579 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4580 {
4581 if (TREE_OPERAND_LENGTH (exp) > 0)
4582 arg0 = TREE_OPERAND (exp, 0);
4583 if (TREE_CODE_CLASS (code) == tcc_binary
4584 || TREE_CODE_CLASS (code) == tcc_comparison
4585 || (TREE_CODE_CLASS (code) == tcc_expression
4586 && TREE_OPERAND_LENGTH (exp) > 1))
4587 arg1 = TREE_OPERAND (exp, 1);
4588 }
4589 if (arg0 == NULL_TREE)
4590 break;
4591
4592 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4593 &high, &in_p, strict_overflow_p);
4594 if (nexp == NULL_TREE)
4595 break;
4596 exp = nexp;
4597 }
4598
4599 /* If EXP is a constant, we can evaluate whether this is true or false. */
4600 if (TREE_CODE (exp) == INTEGER_CST)
4601 {
4602 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4603 exp, 0, low, 0))
4604 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4605 exp, 1, high, 1)));
4606 low = high = 0;
4607 exp = 0;
4608 }
4609
4610 *pin_p = in_p, *plow = low, *phigh = high;
4611 return exp;
4612 }
4613 \f
4614 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4615 type, TYPE, return an expression to test if EXP is in (or out of, depending
4616 on IN_P) the range. Return 0 if the test couldn't be created. */
4617
4618 tree
4619 build_range_check (location_t loc, tree type, tree exp, int in_p,
4620 tree low, tree high)
4621 {
4622 tree etype = TREE_TYPE (exp), value;
4623
4624 /* Disable this optimization for function pointer expressions
4625 on targets that require function pointer canonicalization. */
4626 if (targetm.have_canonicalize_funcptr_for_compare ()
4627 && TREE_CODE (etype) == POINTER_TYPE
4628 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4629 return NULL_TREE;
4630
4631 if (! in_p)
4632 {
4633 value = build_range_check (loc, type, exp, 1, low, high);
4634 if (value != 0)
4635 return invert_truthvalue_loc (loc, value);
4636
4637 return 0;
4638 }
4639
4640 if (low == 0 && high == 0)
4641 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4642
4643 if (low == 0)
4644 return fold_build2_loc (loc, LE_EXPR, type, exp,
4645 fold_convert_loc (loc, etype, high));
4646
4647 if (high == 0)
4648 return fold_build2_loc (loc, GE_EXPR, type, exp,
4649 fold_convert_loc (loc, etype, low));
4650
4651 if (operand_equal_p (low, high, 0))
4652 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4653 fold_convert_loc (loc, etype, low));
4654
4655 if (integer_zerop (low))
4656 {
4657 if (! TYPE_UNSIGNED (etype))
4658 {
4659 etype = unsigned_type_for (etype);
4660 high = fold_convert_loc (loc, etype, high);
4661 exp = fold_convert_loc (loc, etype, exp);
4662 }
4663 return build_range_check (loc, type, exp, 1, 0, high);
4664 }
4665
4666 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4667 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4668 {
4669 int prec = TYPE_PRECISION (etype);
4670
4671 if (wi::mask (prec - 1, false, prec) == high)
4672 {
4673 if (TYPE_UNSIGNED (etype))
4674 {
4675 tree signed_etype = signed_type_for (etype);
4676 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4677 etype
4678 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4679 else
4680 etype = signed_etype;
4681 exp = fold_convert_loc (loc, etype, exp);
4682 }
4683 return fold_build2_loc (loc, GT_EXPR, type, exp,
4684 build_int_cst (etype, 0));
4685 }
4686 }
4687
4688 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4689 This requires wrap-around arithmetics for the type of the expression.
4690 First make sure that arithmetics in this type is valid, then make sure
4691 that it wraps around. */
4692 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4693 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4694 TYPE_UNSIGNED (etype));
4695
4696 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4697 {
4698 tree utype, minv, maxv;
4699
4700 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4701 for the type in question, as we rely on this here. */
4702 utype = unsigned_type_for (etype);
4703 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4704 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4705 build_int_cst (TREE_TYPE (maxv), 1), 1);
4706 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4707
4708 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4709 minv, 1, maxv, 1)))
4710 etype = utype;
4711 else
4712 return 0;
4713 }
4714
4715 high = fold_convert_loc (loc, etype, high);
4716 low = fold_convert_loc (loc, etype, low);
4717 exp = fold_convert_loc (loc, etype, exp);
4718
4719 value = const_binop (MINUS_EXPR, high, low);
4720
4721
4722 if (POINTER_TYPE_P (etype))
4723 {
4724 if (value != 0 && !TREE_OVERFLOW (value))
4725 {
4726 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4727 return build_range_check (loc, type,
4728 fold_build_pointer_plus_loc (loc, exp, low),
4729 1, build_int_cst (etype, 0), value);
4730 }
4731 return 0;
4732 }
4733
4734 if (value != 0 && !TREE_OVERFLOW (value))
4735 return build_range_check (loc, type,
4736 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4737 1, build_int_cst (etype, 0), value);
4738
4739 return 0;
4740 }
4741 \f
4742 /* Return the predecessor of VAL in its type, handling the infinite case. */
4743
4744 static tree
4745 range_predecessor (tree val)
4746 {
4747 tree type = TREE_TYPE (val);
4748
4749 if (INTEGRAL_TYPE_P (type)
4750 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4751 return 0;
4752 else
4753 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4754 build_int_cst (TREE_TYPE (val), 1), 0);
4755 }
4756
4757 /* Return the successor of VAL in its type, handling the infinite case. */
4758
4759 static tree
4760 range_successor (tree val)
4761 {
4762 tree type = TREE_TYPE (val);
4763
4764 if (INTEGRAL_TYPE_P (type)
4765 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4766 return 0;
4767 else
4768 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4769 build_int_cst (TREE_TYPE (val), 1), 0);
4770 }
4771
4772 /* Given two ranges, see if we can merge them into one. Return 1 if we
4773 can, 0 if we can't. Set the output range into the specified parameters. */
4774
4775 bool
4776 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4777 tree high0, int in1_p, tree low1, tree high1)
4778 {
4779 int no_overlap;
4780 int subset;
4781 int temp;
4782 tree tem;
4783 int in_p;
4784 tree low, high;
4785 int lowequal = ((low0 == 0 && low1 == 0)
4786 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4787 low0, 0, low1, 0)));
4788 int highequal = ((high0 == 0 && high1 == 0)
4789 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4790 high0, 1, high1, 1)));
4791
4792 /* Make range 0 be the range that starts first, or ends last if they
4793 start at the same value. Swap them if it isn't. */
4794 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4795 low0, 0, low1, 0))
4796 || (lowequal
4797 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4798 high1, 1, high0, 1))))
4799 {
4800 temp = in0_p, in0_p = in1_p, in1_p = temp;
4801 tem = low0, low0 = low1, low1 = tem;
4802 tem = high0, high0 = high1, high1 = tem;
4803 }
4804
4805 /* Now flag two cases, whether the ranges are disjoint or whether the
4806 second range is totally subsumed in the first. Note that the tests
4807 below are simplified by the ones above. */
4808 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4809 high0, 1, low1, 0));
4810 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4811 high1, 1, high0, 1));
4812
4813 /* We now have four cases, depending on whether we are including or
4814 excluding the two ranges. */
4815 if (in0_p && in1_p)
4816 {
4817 /* If they don't overlap, the result is false. If the second range
4818 is a subset it is the result. Otherwise, the range is from the start
4819 of the second to the end of the first. */
4820 if (no_overlap)
4821 in_p = 0, low = high = 0;
4822 else if (subset)
4823 in_p = 1, low = low1, high = high1;
4824 else
4825 in_p = 1, low = low1, high = high0;
4826 }
4827
4828 else if (in0_p && ! in1_p)
4829 {
4830 /* If they don't overlap, the result is the first range. If they are
4831 equal, the result is false. If the second range is a subset of the
4832 first, and the ranges begin at the same place, we go from just after
4833 the end of the second range to the end of the first. If the second
4834 range is not a subset of the first, or if it is a subset and both
4835 ranges end at the same place, the range starts at the start of the
4836 first range and ends just before the second range.
4837 Otherwise, we can't describe this as a single range. */
4838 if (no_overlap)
4839 in_p = 1, low = low0, high = high0;
4840 else if (lowequal && highequal)
4841 in_p = 0, low = high = 0;
4842 else if (subset && lowequal)
4843 {
4844 low = range_successor (high1);
4845 high = high0;
4846 in_p = 1;
4847 if (low == 0)
4848 {
4849 /* We are in the weird situation where high0 > high1 but
4850 high1 has no successor. Punt. */
4851 return 0;
4852 }
4853 }
4854 else if (! subset || highequal)
4855 {
4856 low = low0;
4857 high = range_predecessor (low1);
4858 in_p = 1;
4859 if (high == 0)
4860 {
4861 /* low0 < low1 but low1 has no predecessor. Punt. */
4862 return 0;
4863 }
4864 }
4865 else
4866 return 0;
4867 }
4868
4869 else if (! in0_p && in1_p)
4870 {
4871 /* If they don't overlap, the result is the second range. If the second
4872 is a subset of the first, the result is false. Otherwise,
4873 the range starts just after the first range and ends at the
4874 end of the second. */
4875 if (no_overlap)
4876 in_p = 1, low = low1, high = high1;
4877 else if (subset || highequal)
4878 in_p = 0, low = high = 0;
4879 else
4880 {
4881 low = range_successor (high0);
4882 high = high1;
4883 in_p = 1;
4884 if (low == 0)
4885 {
4886 /* high1 > high0 but high0 has no successor. Punt. */
4887 return 0;
4888 }
4889 }
4890 }
4891
4892 else
4893 {
4894 /* The case where we are excluding both ranges. Here the complex case
4895 is if they don't overlap. In that case, the only time we have a
4896 range is if they are adjacent. If the second is a subset of the
4897 first, the result is the first. Otherwise, the range to exclude
4898 starts at the beginning of the first range and ends at the end of the
4899 second. */
4900 if (no_overlap)
4901 {
4902 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4903 range_successor (high0),
4904 1, low1, 0)))
4905 in_p = 0, low = low0, high = high1;
4906 else
4907 {
4908 /* Canonicalize - [min, x] into - [-, x]. */
4909 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4910 switch (TREE_CODE (TREE_TYPE (low0)))
4911 {
4912 case ENUMERAL_TYPE:
4913 if (TYPE_PRECISION (TREE_TYPE (low0))
4914 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4915 break;
4916 /* FALLTHROUGH */
4917 case INTEGER_TYPE:
4918 if (tree_int_cst_equal (low0,
4919 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4920 low0 = 0;
4921 break;
4922 case POINTER_TYPE:
4923 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4924 && integer_zerop (low0))
4925 low0 = 0;
4926 break;
4927 default:
4928 break;
4929 }
4930
4931 /* Canonicalize - [x, max] into - [x, -]. */
4932 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4933 switch (TREE_CODE (TREE_TYPE (high1)))
4934 {
4935 case ENUMERAL_TYPE:
4936 if (TYPE_PRECISION (TREE_TYPE (high1))
4937 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4938 break;
4939 /* FALLTHROUGH */
4940 case INTEGER_TYPE:
4941 if (tree_int_cst_equal (high1,
4942 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4943 high1 = 0;
4944 break;
4945 case POINTER_TYPE:
4946 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4947 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4948 high1, 1,
4949 build_int_cst (TREE_TYPE (high1), 1),
4950 1)))
4951 high1 = 0;
4952 break;
4953 default:
4954 break;
4955 }
4956
4957 /* The ranges might be also adjacent between the maximum and
4958 minimum values of the given type. For
4959 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4960 return + [x + 1, y - 1]. */
4961 if (low0 == 0 && high1 == 0)
4962 {
4963 low = range_successor (high0);
4964 high = range_predecessor (low1);
4965 if (low == 0 || high == 0)
4966 return 0;
4967
4968 in_p = 1;
4969 }
4970 else
4971 return 0;
4972 }
4973 }
4974 else if (subset)
4975 in_p = 0, low = low0, high = high0;
4976 else
4977 in_p = 0, low = low0, high = high1;
4978 }
4979
4980 *pin_p = in_p, *plow = low, *phigh = high;
4981 return 1;
4982 }
4983 \f
4984
4985 /* Subroutine of fold, looking inside expressions of the form
4986 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4987 of the COND_EXPR. This function is being used also to optimize
4988 A op B ? C : A, by reversing the comparison first.
4989
4990 Return a folded expression whose code is not a COND_EXPR
4991 anymore, or NULL_TREE if no folding opportunity is found. */
4992
4993 static tree
4994 fold_cond_expr_with_comparison (location_t loc, tree type,
4995 tree arg0, tree arg1, tree arg2)
4996 {
4997 enum tree_code comp_code = TREE_CODE (arg0);
4998 tree arg00 = TREE_OPERAND (arg0, 0);
4999 tree arg01 = TREE_OPERAND (arg0, 1);
5000 tree arg1_type = TREE_TYPE (arg1);
5001 tree tem;
5002
5003 STRIP_NOPS (arg1);
5004 STRIP_NOPS (arg2);
5005
5006 /* If we have A op 0 ? A : -A, consider applying the following
5007 transformations:
5008
5009 A == 0? A : -A same as -A
5010 A != 0? A : -A same as A
5011 A >= 0? A : -A same as abs (A)
5012 A > 0? A : -A same as abs (A)
5013 A <= 0? A : -A same as -abs (A)
5014 A < 0? A : -A same as -abs (A)
5015
5016 None of these transformations work for modes with signed
5017 zeros. If A is +/-0, the first two transformations will
5018 change the sign of the result (from +0 to -0, or vice
5019 versa). The last four will fix the sign of the result,
5020 even though the original expressions could be positive or
5021 negative, depending on the sign of A.
5022
5023 Note that all these transformations are correct if A is
5024 NaN, since the two alternatives (A and -A) are also NaNs. */
5025 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5026 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5027 ? real_zerop (arg01)
5028 : integer_zerop (arg01))
5029 && ((TREE_CODE (arg2) == NEGATE_EXPR
5030 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5031 /* In the case that A is of the form X-Y, '-A' (arg2) may
5032 have already been folded to Y-X, check for that. */
5033 || (TREE_CODE (arg1) == MINUS_EXPR
5034 && TREE_CODE (arg2) == MINUS_EXPR
5035 && operand_equal_p (TREE_OPERAND (arg1, 0),
5036 TREE_OPERAND (arg2, 1), 0)
5037 && operand_equal_p (TREE_OPERAND (arg1, 1),
5038 TREE_OPERAND (arg2, 0), 0))))
5039 switch (comp_code)
5040 {
5041 case EQ_EXPR:
5042 case UNEQ_EXPR:
5043 tem = fold_convert_loc (loc, arg1_type, arg1);
5044 return pedantic_non_lvalue_loc (loc,
5045 fold_convert_loc (loc, type,
5046 negate_expr (tem)));
5047 case NE_EXPR:
5048 case LTGT_EXPR:
5049 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5050 case UNGE_EXPR:
5051 case UNGT_EXPR:
5052 if (flag_trapping_math)
5053 break;
5054 /* Fall through. */
5055 case GE_EXPR:
5056 case GT_EXPR:
5057 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5058 break;
5059 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5060 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5061 case UNLE_EXPR:
5062 case UNLT_EXPR:
5063 if (flag_trapping_math)
5064 break;
5065 case LE_EXPR:
5066 case LT_EXPR:
5067 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5068 break;
5069 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5070 return negate_expr (fold_convert_loc (loc, type, tem));
5071 default:
5072 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5073 break;
5074 }
5075
5076 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5077 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5078 both transformations are correct when A is NaN: A != 0
5079 is then true, and A == 0 is false. */
5080
5081 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5082 && integer_zerop (arg01) && integer_zerop (arg2))
5083 {
5084 if (comp_code == NE_EXPR)
5085 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5086 else if (comp_code == EQ_EXPR)
5087 return build_zero_cst (type);
5088 }
5089
5090 /* Try some transformations of A op B ? A : B.
5091
5092 A == B? A : B same as B
5093 A != B? A : B same as A
5094 A >= B? A : B same as max (A, B)
5095 A > B? A : B same as max (B, A)
5096 A <= B? A : B same as min (A, B)
5097 A < B? A : B same as min (B, A)
5098
5099 As above, these transformations don't work in the presence
5100 of signed zeros. For example, if A and B are zeros of
5101 opposite sign, the first two transformations will change
5102 the sign of the result. In the last four, the original
5103 expressions give different results for (A=+0, B=-0) and
5104 (A=-0, B=+0), but the transformed expressions do not.
5105
5106 The first two transformations are correct if either A or B
5107 is a NaN. In the first transformation, the condition will
5108 be false, and B will indeed be chosen. In the case of the
5109 second transformation, the condition A != B will be true,
5110 and A will be chosen.
5111
5112 The conversions to max() and min() are not correct if B is
5113 a number and A is not. The conditions in the original
5114 expressions will be false, so all four give B. The min()
5115 and max() versions would give a NaN instead. */
5116 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5117 && operand_equal_for_comparison_p (arg01, arg2, arg00)
5118 /* Avoid these transformations if the COND_EXPR may be used
5119 as an lvalue in the C++ front-end. PR c++/19199. */
5120 && (in_gimple_form
5121 || VECTOR_TYPE_P (type)
5122 || (! lang_GNU_CXX ()
5123 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5124 || ! maybe_lvalue_p (arg1)
5125 || ! maybe_lvalue_p (arg2)))
5126 {
5127 tree comp_op0 = arg00;
5128 tree comp_op1 = arg01;
5129 tree comp_type = TREE_TYPE (comp_op0);
5130
5131 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
5132 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
5133 {
5134 comp_type = type;
5135 comp_op0 = arg1;
5136 comp_op1 = arg2;
5137 }
5138
5139 switch (comp_code)
5140 {
5141 case EQ_EXPR:
5142 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
5143 case NE_EXPR:
5144 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5145 case LE_EXPR:
5146 case LT_EXPR:
5147 case UNLE_EXPR:
5148 case UNLT_EXPR:
5149 /* In C++ a ?: expression can be an lvalue, so put the
5150 operand which will be used if they are equal first
5151 so that we can convert this back to the
5152 corresponding COND_EXPR. */
5153 if (!HONOR_NANS (arg1))
5154 {
5155 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5156 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5157 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5158 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5159 : fold_build2_loc (loc, MIN_EXPR, comp_type,
5160 comp_op1, comp_op0);
5161 return pedantic_non_lvalue_loc (loc,
5162 fold_convert_loc (loc, type, tem));
5163 }
5164 break;
5165 case GE_EXPR:
5166 case GT_EXPR:
5167 case UNGE_EXPR:
5168 case UNGT_EXPR:
5169 if (!HONOR_NANS (arg1))
5170 {
5171 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5172 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5173 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5174 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5175 : fold_build2_loc (loc, MAX_EXPR, comp_type,
5176 comp_op1, comp_op0);
5177 return pedantic_non_lvalue_loc (loc,
5178 fold_convert_loc (loc, type, tem));
5179 }
5180 break;
5181 case UNEQ_EXPR:
5182 if (!HONOR_NANS (arg1))
5183 return pedantic_non_lvalue_loc (loc,
5184 fold_convert_loc (loc, type, arg2));
5185 break;
5186 case LTGT_EXPR:
5187 if (!HONOR_NANS (arg1))
5188 return pedantic_non_lvalue_loc (loc,
5189 fold_convert_loc (loc, type, arg1));
5190 break;
5191 default:
5192 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5193 break;
5194 }
5195 }
5196
5197 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5198 we might still be able to simplify this. For example,
5199 if C1 is one less or one more than C2, this might have started
5200 out as a MIN or MAX and been transformed by this function.
5201 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5202
5203 if (INTEGRAL_TYPE_P (type)
5204 && TREE_CODE (arg01) == INTEGER_CST
5205 && TREE_CODE (arg2) == INTEGER_CST)
5206 switch (comp_code)
5207 {
5208 case EQ_EXPR:
5209 if (TREE_CODE (arg1) == INTEGER_CST)
5210 break;
5211 /* We can replace A with C1 in this case. */
5212 arg1 = fold_convert_loc (loc, type, arg01);
5213 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
5214
5215 case LT_EXPR:
5216 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
5217 MIN_EXPR, to preserve the signedness of the comparison. */
5218 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5219 OEP_ONLY_CONST)
5220 && operand_equal_p (arg01,
5221 const_binop (PLUS_EXPR, arg2,
5222 build_int_cst (type, 1)),
5223 OEP_ONLY_CONST))
5224 {
5225 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5226 fold_convert_loc (loc, TREE_TYPE (arg00),
5227 arg2));
5228 return pedantic_non_lvalue_loc (loc,
5229 fold_convert_loc (loc, type, tem));
5230 }
5231 break;
5232
5233 case LE_EXPR:
5234 /* If C1 is C2 - 1, this is min(A, C2), with the same care
5235 as above. */
5236 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5237 OEP_ONLY_CONST)
5238 && operand_equal_p (arg01,
5239 const_binop (MINUS_EXPR, arg2,
5240 build_int_cst (type, 1)),
5241 OEP_ONLY_CONST))
5242 {
5243 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5244 fold_convert_loc (loc, TREE_TYPE (arg00),
5245 arg2));
5246 return pedantic_non_lvalue_loc (loc,
5247 fold_convert_loc (loc, type, tem));
5248 }
5249 break;
5250
5251 case GT_EXPR:
5252 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
5253 MAX_EXPR, to preserve the signedness of the comparison. */
5254 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5255 OEP_ONLY_CONST)
5256 && operand_equal_p (arg01,
5257 const_binop (MINUS_EXPR, arg2,
5258 build_int_cst (type, 1)),
5259 OEP_ONLY_CONST))
5260 {
5261 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5262 fold_convert_loc (loc, TREE_TYPE (arg00),
5263 arg2));
5264 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5265 }
5266 break;
5267
5268 case GE_EXPR:
5269 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
5270 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5271 OEP_ONLY_CONST)
5272 && operand_equal_p (arg01,
5273 const_binop (PLUS_EXPR, arg2,
5274 build_int_cst (type, 1)),
5275 OEP_ONLY_CONST))
5276 {
5277 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5278 fold_convert_loc (loc, TREE_TYPE (arg00),
5279 arg2));
5280 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5281 }
5282 break;
5283 case NE_EXPR:
5284 break;
5285 default:
5286 gcc_unreachable ();
5287 }
5288
5289 return NULL_TREE;
5290 }
5291
5292
5293 \f
5294 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5295 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5296 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5297 false) >= 2)
5298 #endif
5299
5300 /* EXP is some logical combination of boolean tests. See if we can
5301 merge it into some range test. Return the new tree if so. */
5302
5303 static tree
5304 fold_range_test (location_t loc, enum tree_code code, tree type,
5305 tree op0, tree op1)
5306 {
5307 int or_op = (code == TRUTH_ORIF_EXPR
5308 || code == TRUTH_OR_EXPR);
5309 int in0_p, in1_p, in_p;
5310 tree low0, low1, low, high0, high1, high;
5311 bool strict_overflow_p = false;
5312 tree tem, lhs, rhs;
5313 const char * const warnmsg = G_("assuming signed overflow does not occur "
5314 "when simplifying range test");
5315
5316 if (!INTEGRAL_TYPE_P (type))
5317 return 0;
5318
5319 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5320 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5321
5322 /* If this is an OR operation, invert both sides; we will invert
5323 again at the end. */
5324 if (or_op)
5325 in0_p = ! in0_p, in1_p = ! in1_p;
5326
5327 /* If both expressions are the same, if we can merge the ranges, and we
5328 can build the range test, return it or it inverted. If one of the
5329 ranges is always true or always false, consider it to be the same
5330 expression as the other. */
5331 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5332 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5333 in1_p, low1, high1)
5334 && 0 != (tem = (build_range_check (loc, type,
5335 lhs != 0 ? lhs
5336 : rhs != 0 ? rhs : integer_zero_node,
5337 in_p, low, high))))
5338 {
5339 if (strict_overflow_p)
5340 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5341 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5342 }
5343
5344 /* On machines where the branch cost is expensive, if this is a
5345 short-circuited branch and the underlying object on both sides
5346 is the same, make a non-short-circuit operation. */
5347 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5348 && lhs != 0 && rhs != 0
5349 && (code == TRUTH_ANDIF_EXPR
5350 || code == TRUTH_ORIF_EXPR)
5351 && operand_equal_p (lhs, rhs, 0))
5352 {
5353 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5354 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5355 which cases we can't do this. */
5356 if (simple_operand_p (lhs))
5357 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5358 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5359 type, op0, op1);
5360
5361 else if (!lang_hooks.decls.global_bindings_p ()
5362 && !CONTAINS_PLACEHOLDER_P (lhs))
5363 {
5364 tree common = save_expr (lhs);
5365
5366 if (0 != (lhs = build_range_check (loc, type, common,
5367 or_op ? ! in0_p : in0_p,
5368 low0, high0))
5369 && (0 != (rhs = build_range_check (loc, type, common,
5370 or_op ? ! in1_p : in1_p,
5371 low1, high1))))
5372 {
5373 if (strict_overflow_p)
5374 fold_overflow_warning (warnmsg,
5375 WARN_STRICT_OVERFLOW_COMPARISON);
5376 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5377 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5378 type, lhs, rhs);
5379 }
5380 }
5381 }
5382
5383 return 0;
5384 }
5385 \f
5386 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5387 bit value. Arrange things so the extra bits will be set to zero if and
5388 only if C is signed-extended to its full width. If MASK is nonzero,
5389 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5390
5391 static tree
5392 unextend (tree c, int p, int unsignedp, tree mask)
5393 {
5394 tree type = TREE_TYPE (c);
5395 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5396 tree temp;
5397
5398 if (p == modesize || unsignedp)
5399 return c;
5400
5401 /* We work by getting just the sign bit into the low-order bit, then
5402 into the high-order bit, then sign-extend. We then XOR that value
5403 with C. */
5404 temp = build_int_cst (TREE_TYPE (c), wi::extract_uhwi (c, p - 1, 1));
5405
5406 /* We must use a signed type in order to get an arithmetic right shift.
5407 However, we must also avoid introducing accidental overflows, so that
5408 a subsequent call to integer_zerop will work. Hence we must
5409 do the type conversion here. At this point, the constant is either
5410 zero or one, and the conversion to a signed type can never overflow.
5411 We could get an overflow if this conversion is done anywhere else. */
5412 if (TYPE_UNSIGNED (type))
5413 temp = fold_convert (signed_type_for (type), temp);
5414
5415 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5416 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5417 if (mask != 0)
5418 temp = const_binop (BIT_AND_EXPR, temp,
5419 fold_convert (TREE_TYPE (c), mask));
5420 /* If necessary, convert the type back to match the type of C. */
5421 if (TYPE_UNSIGNED (type))
5422 temp = fold_convert (type, temp);
5423
5424 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5425 }
5426 \f
5427 /* For an expression that has the form
5428 (A && B) || ~B
5429 or
5430 (A || B) && ~B,
5431 we can drop one of the inner expressions and simplify to
5432 A || ~B
5433 or
5434 A && ~B
5435 LOC is the location of the resulting expression. OP is the inner
5436 logical operation; the left-hand side in the examples above, while CMPOP
5437 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5438 removing a condition that guards another, as in
5439 (A != NULL && A->...) || A == NULL
5440 which we must not transform. If RHS_ONLY is true, only eliminate the
5441 right-most operand of the inner logical operation. */
5442
5443 static tree
5444 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5445 bool rhs_only)
5446 {
5447 tree type = TREE_TYPE (cmpop);
5448 enum tree_code code = TREE_CODE (cmpop);
5449 enum tree_code truthop_code = TREE_CODE (op);
5450 tree lhs = TREE_OPERAND (op, 0);
5451 tree rhs = TREE_OPERAND (op, 1);
5452 tree orig_lhs = lhs, orig_rhs = rhs;
5453 enum tree_code rhs_code = TREE_CODE (rhs);
5454 enum tree_code lhs_code = TREE_CODE (lhs);
5455 enum tree_code inv_code;
5456
5457 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5458 return NULL_TREE;
5459
5460 if (TREE_CODE_CLASS (code) != tcc_comparison)
5461 return NULL_TREE;
5462
5463 if (rhs_code == truthop_code)
5464 {
5465 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5466 if (newrhs != NULL_TREE)
5467 {
5468 rhs = newrhs;
5469 rhs_code = TREE_CODE (rhs);
5470 }
5471 }
5472 if (lhs_code == truthop_code && !rhs_only)
5473 {
5474 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5475 if (newlhs != NULL_TREE)
5476 {
5477 lhs = newlhs;
5478 lhs_code = TREE_CODE (lhs);
5479 }
5480 }
5481
5482 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
5483 if (inv_code == rhs_code
5484 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5485 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5486 return lhs;
5487 if (!rhs_only && inv_code == lhs_code
5488 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5489 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5490 return rhs;
5491 if (rhs != orig_rhs || lhs != orig_lhs)
5492 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5493 lhs, rhs);
5494 return NULL_TREE;
5495 }
5496
5497 /* Find ways of folding logical expressions of LHS and RHS:
5498 Try to merge two comparisons to the same innermost item.
5499 Look for range tests like "ch >= '0' && ch <= '9'".
5500 Look for combinations of simple terms on machines with expensive branches
5501 and evaluate the RHS unconditionally.
5502
5503 For example, if we have p->a == 2 && p->b == 4 and we can make an
5504 object large enough to span both A and B, we can do this with a comparison
5505 against the object ANDed with the a mask.
5506
5507 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5508 operations to do this with one comparison.
5509
5510 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5511 function and the one above.
5512
5513 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5514 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5515
5516 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5517 two operands.
5518
5519 We return the simplified tree or 0 if no optimization is possible. */
5520
5521 static tree
5522 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5523 tree lhs, tree rhs)
5524 {
5525 /* If this is the "or" of two comparisons, we can do something if
5526 the comparisons are NE_EXPR. If this is the "and", we can do something
5527 if the comparisons are EQ_EXPR. I.e.,
5528 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5529
5530 WANTED_CODE is this operation code. For single bit fields, we can
5531 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5532 comparison for one-bit fields. */
5533
5534 enum tree_code wanted_code;
5535 enum tree_code lcode, rcode;
5536 tree ll_arg, lr_arg, rl_arg, rr_arg;
5537 tree ll_inner, lr_inner, rl_inner, rr_inner;
5538 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5539 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5540 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5541 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5542 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5543 int ll_reversep, lr_reversep, rl_reversep, rr_reversep;
5544 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5545 machine_mode lnmode, rnmode;
5546 tree ll_mask, lr_mask, rl_mask, rr_mask;
5547 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5548 tree l_const, r_const;
5549 tree lntype, rntype, result;
5550 HOST_WIDE_INT first_bit, end_bit;
5551 int volatilep;
5552
5553 /* Start by getting the comparison codes. Fail if anything is volatile.
5554 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5555 it were surrounded with a NE_EXPR. */
5556
5557 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5558 return 0;
5559
5560 lcode = TREE_CODE (lhs);
5561 rcode = TREE_CODE (rhs);
5562
5563 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5564 {
5565 lhs = build2 (NE_EXPR, truth_type, lhs,
5566 build_int_cst (TREE_TYPE (lhs), 0));
5567 lcode = NE_EXPR;
5568 }
5569
5570 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5571 {
5572 rhs = build2 (NE_EXPR, truth_type, rhs,
5573 build_int_cst (TREE_TYPE (rhs), 0));
5574 rcode = NE_EXPR;
5575 }
5576
5577 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5578 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5579 return 0;
5580
5581 ll_arg = TREE_OPERAND (lhs, 0);
5582 lr_arg = TREE_OPERAND (lhs, 1);
5583 rl_arg = TREE_OPERAND (rhs, 0);
5584 rr_arg = TREE_OPERAND (rhs, 1);
5585
5586 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5587 if (simple_operand_p (ll_arg)
5588 && simple_operand_p (lr_arg))
5589 {
5590 if (operand_equal_p (ll_arg, rl_arg, 0)
5591 && operand_equal_p (lr_arg, rr_arg, 0))
5592 {
5593 result = combine_comparisons (loc, code, lcode, rcode,
5594 truth_type, ll_arg, lr_arg);
5595 if (result)
5596 return result;
5597 }
5598 else if (operand_equal_p (ll_arg, rr_arg, 0)
5599 && operand_equal_p (lr_arg, rl_arg, 0))
5600 {
5601 result = combine_comparisons (loc, code, lcode,
5602 swap_tree_comparison (rcode),
5603 truth_type, ll_arg, lr_arg);
5604 if (result)
5605 return result;
5606 }
5607 }
5608
5609 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5610 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5611
5612 /* If the RHS can be evaluated unconditionally and its operands are
5613 simple, it wins to evaluate the RHS unconditionally on machines
5614 with expensive branches. In this case, this isn't a comparison
5615 that can be merged. */
5616
5617 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5618 false) >= 2
5619 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5620 && simple_operand_p (rl_arg)
5621 && simple_operand_p (rr_arg))
5622 {
5623 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5624 if (code == TRUTH_OR_EXPR
5625 && lcode == NE_EXPR && integer_zerop (lr_arg)
5626 && rcode == NE_EXPR && integer_zerop (rr_arg)
5627 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5628 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5629 return build2_loc (loc, NE_EXPR, truth_type,
5630 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5631 ll_arg, rl_arg),
5632 build_int_cst (TREE_TYPE (ll_arg), 0));
5633
5634 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5635 if (code == TRUTH_AND_EXPR
5636 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5637 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5638 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5639 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5640 return build2_loc (loc, EQ_EXPR, truth_type,
5641 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5642 ll_arg, rl_arg),
5643 build_int_cst (TREE_TYPE (ll_arg), 0));
5644 }
5645
5646 /* See if the comparisons can be merged. Then get all the parameters for
5647 each side. */
5648
5649 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5650 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5651 return 0;
5652
5653 ll_reversep = lr_reversep = rl_reversep = rr_reversep = 0;
5654 volatilep = 0;
5655 ll_inner = decode_field_reference (loc, ll_arg,
5656 &ll_bitsize, &ll_bitpos, &ll_mode,
5657 &ll_unsignedp, &ll_reversep, &volatilep,
5658 &ll_mask, &ll_and_mask);
5659 lr_inner = decode_field_reference (loc, lr_arg,
5660 &lr_bitsize, &lr_bitpos, &lr_mode,
5661 &lr_unsignedp, &lr_reversep, &volatilep,
5662 &lr_mask, &lr_and_mask);
5663 rl_inner = decode_field_reference (loc, rl_arg,
5664 &rl_bitsize, &rl_bitpos, &rl_mode,
5665 &rl_unsignedp, &rl_reversep, &volatilep,
5666 &rl_mask, &rl_and_mask);
5667 rr_inner = decode_field_reference (loc, rr_arg,
5668 &rr_bitsize, &rr_bitpos, &rr_mode,
5669 &rr_unsignedp, &rr_reversep, &volatilep,
5670 &rr_mask, &rr_and_mask);
5671
5672 /* It must be true that the inner operation on the lhs of each
5673 comparison must be the same if we are to be able to do anything.
5674 Then see if we have constants. If not, the same must be true for
5675 the rhs's. */
5676 if (volatilep
5677 || ll_reversep != rl_reversep
5678 || ll_inner == 0 || rl_inner == 0
5679 || ! operand_equal_p (ll_inner, rl_inner, 0))
5680 return 0;
5681
5682 if (TREE_CODE (lr_arg) == INTEGER_CST
5683 && TREE_CODE (rr_arg) == INTEGER_CST)
5684 {
5685 l_const = lr_arg, r_const = rr_arg;
5686 lr_reversep = ll_reversep;
5687 }
5688 else if (lr_reversep != rr_reversep
5689 || lr_inner == 0 || rr_inner == 0
5690 || ! operand_equal_p (lr_inner, rr_inner, 0))
5691 return 0;
5692 else
5693 l_const = r_const = 0;
5694
5695 /* If either comparison code is not correct for our logical operation,
5696 fail. However, we can convert a one-bit comparison against zero into
5697 the opposite comparison against that bit being set in the field. */
5698
5699 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5700 if (lcode != wanted_code)
5701 {
5702 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5703 {
5704 /* Make the left operand unsigned, since we are only interested
5705 in the value of one bit. Otherwise we are doing the wrong
5706 thing below. */
5707 ll_unsignedp = 1;
5708 l_const = ll_mask;
5709 }
5710 else
5711 return 0;
5712 }
5713
5714 /* This is analogous to the code for l_const above. */
5715 if (rcode != wanted_code)
5716 {
5717 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5718 {
5719 rl_unsignedp = 1;
5720 r_const = rl_mask;
5721 }
5722 else
5723 return 0;
5724 }
5725
5726 /* See if we can find a mode that contains both fields being compared on
5727 the left. If we can't, fail. Otherwise, update all constants and masks
5728 to be relative to a field of that size. */
5729 first_bit = MIN (ll_bitpos, rl_bitpos);
5730 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5731 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5732 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5733 volatilep);
5734 if (lnmode == VOIDmode)
5735 return 0;
5736
5737 lnbitsize = GET_MODE_BITSIZE (lnmode);
5738 lnbitpos = first_bit & ~ (lnbitsize - 1);
5739 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5740 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5741
5742 if (ll_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5743 {
5744 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5745 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5746 }
5747
5748 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5749 size_int (xll_bitpos));
5750 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5751 size_int (xrl_bitpos));
5752
5753 if (l_const)
5754 {
5755 l_const = fold_convert_loc (loc, lntype, l_const);
5756 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5757 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5758 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5759 fold_build1_loc (loc, BIT_NOT_EXPR,
5760 lntype, ll_mask))))
5761 {
5762 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5763
5764 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5765 }
5766 }
5767 if (r_const)
5768 {
5769 r_const = fold_convert_loc (loc, lntype, r_const);
5770 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5771 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5772 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5773 fold_build1_loc (loc, BIT_NOT_EXPR,
5774 lntype, rl_mask))))
5775 {
5776 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5777
5778 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5779 }
5780 }
5781
5782 /* If the right sides are not constant, do the same for it. Also,
5783 disallow this optimization if a size or signedness mismatch occurs
5784 between the left and right sides. */
5785 if (l_const == 0)
5786 {
5787 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5788 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5789 /* Make sure the two fields on the right
5790 correspond to the left without being swapped. */
5791 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5792 return 0;
5793
5794 first_bit = MIN (lr_bitpos, rr_bitpos);
5795 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5796 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5797 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5798 volatilep);
5799 if (rnmode == VOIDmode)
5800 return 0;
5801
5802 rnbitsize = GET_MODE_BITSIZE (rnmode);
5803 rnbitpos = first_bit & ~ (rnbitsize - 1);
5804 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5805 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5806
5807 if (lr_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5808 {
5809 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5810 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5811 }
5812
5813 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5814 rntype, lr_mask),
5815 size_int (xlr_bitpos));
5816 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5817 rntype, rr_mask),
5818 size_int (xrr_bitpos));
5819
5820 /* Make a mask that corresponds to both fields being compared.
5821 Do this for both items being compared. If the operands are the
5822 same size and the bits being compared are in the same position
5823 then we can do this by masking both and comparing the masked
5824 results. */
5825 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5826 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5827 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5828 {
5829 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5830 ll_unsignedp || rl_unsignedp, ll_reversep);
5831 if (! all_ones_mask_p (ll_mask, lnbitsize))
5832 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5833
5834 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5835 lr_unsignedp || rr_unsignedp, lr_reversep);
5836 if (! all_ones_mask_p (lr_mask, rnbitsize))
5837 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5838
5839 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5840 }
5841
5842 /* There is still another way we can do something: If both pairs of
5843 fields being compared are adjacent, we may be able to make a wider
5844 field containing them both.
5845
5846 Note that we still must mask the lhs/rhs expressions. Furthermore,
5847 the mask must be shifted to account for the shift done by
5848 make_bit_field_ref. */
5849 if ((ll_bitsize + ll_bitpos == rl_bitpos
5850 && lr_bitsize + lr_bitpos == rr_bitpos)
5851 || (ll_bitpos == rl_bitpos + rl_bitsize
5852 && lr_bitpos == rr_bitpos + rr_bitsize))
5853 {
5854 tree type;
5855
5856 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5857 ll_bitsize + rl_bitsize,
5858 MIN (ll_bitpos, rl_bitpos),
5859 ll_unsignedp, ll_reversep);
5860 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5861 lr_bitsize + rr_bitsize,
5862 MIN (lr_bitpos, rr_bitpos),
5863 lr_unsignedp, lr_reversep);
5864
5865 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5866 size_int (MIN (xll_bitpos, xrl_bitpos)));
5867 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5868 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5869
5870 /* Convert to the smaller type before masking out unwanted bits. */
5871 type = lntype;
5872 if (lntype != rntype)
5873 {
5874 if (lnbitsize > rnbitsize)
5875 {
5876 lhs = fold_convert_loc (loc, rntype, lhs);
5877 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5878 type = rntype;
5879 }
5880 else if (lnbitsize < rnbitsize)
5881 {
5882 rhs = fold_convert_loc (loc, lntype, rhs);
5883 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5884 type = lntype;
5885 }
5886 }
5887
5888 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5889 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5890
5891 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5892 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5893
5894 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5895 }
5896
5897 return 0;
5898 }
5899
5900 /* Handle the case of comparisons with constants. If there is something in
5901 common between the masks, those bits of the constants must be the same.
5902 If not, the condition is always false. Test for this to avoid generating
5903 incorrect code below. */
5904 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5905 if (! integer_zerop (result)
5906 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5907 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5908 {
5909 if (wanted_code == NE_EXPR)
5910 {
5911 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5912 return constant_boolean_node (true, truth_type);
5913 }
5914 else
5915 {
5916 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5917 return constant_boolean_node (false, truth_type);
5918 }
5919 }
5920
5921 /* Construct the expression we will return. First get the component
5922 reference we will make. Unless the mask is all ones the width of
5923 that field, perform the mask operation. Then compare with the
5924 merged constant. */
5925 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5926 ll_unsignedp || rl_unsignedp, ll_reversep);
5927
5928 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5929 if (! all_ones_mask_p (ll_mask, lnbitsize))
5930 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5931
5932 return build2_loc (loc, wanted_code, truth_type, result,
5933 const_binop (BIT_IOR_EXPR, l_const, r_const));
5934 }
5935 \f
5936 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5937 constant. */
5938
5939 static tree
5940 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5941 tree op0, tree op1)
5942 {
5943 tree arg0 = op0;
5944 enum tree_code op_code;
5945 tree comp_const;
5946 tree minmax_const;
5947 int consts_equal, consts_lt;
5948 tree inner;
5949
5950 STRIP_SIGN_NOPS (arg0);
5951
5952 op_code = TREE_CODE (arg0);
5953 minmax_const = TREE_OPERAND (arg0, 1);
5954 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5955 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5956 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5957 inner = TREE_OPERAND (arg0, 0);
5958
5959 /* If something does not permit us to optimize, return the original tree. */
5960 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5961 || TREE_CODE (comp_const) != INTEGER_CST
5962 || TREE_OVERFLOW (comp_const)
5963 || TREE_CODE (minmax_const) != INTEGER_CST
5964 || TREE_OVERFLOW (minmax_const))
5965 return NULL_TREE;
5966
5967 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5968 and GT_EXPR, doing the rest with recursive calls using logical
5969 simplifications. */
5970 switch (code)
5971 {
5972 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5973 {
5974 tree tem
5975 = optimize_minmax_comparison (loc,
5976 invert_tree_comparison (code, false),
5977 type, op0, op1);
5978 if (tem)
5979 return invert_truthvalue_loc (loc, tem);
5980 return NULL_TREE;
5981 }
5982
5983 case GE_EXPR:
5984 return
5985 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5986 optimize_minmax_comparison
5987 (loc, EQ_EXPR, type, arg0, comp_const),
5988 optimize_minmax_comparison
5989 (loc, GT_EXPR, type, arg0, comp_const));
5990
5991 case EQ_EXPR:
5992 if (op_code == MAX_EXPR && consts_equal)
5993 /* MAX (X, 0) == 0 -> X <= 0 */
5994 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5995
5996 else if (op_code == MAX_EXPR && consts_lt)
5997 /* MAX (X, 0) == 5 -> X == 5 */
5998 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5999
6000 else if (op_code == MAX_EXPR)
6001 /* MAX (X, 0) == -1 -> false */
6002 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
6003
6004 else if (consts_equal)
6005 /* MIN (X, 0) == 0 -> X >= 0 */
6006 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
6007
6008 else if (consts_lt)
6009 /* MIN (X, 0) == 5 -> false */
6010 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
6011
6012 else
6013 /* MIN (X, 0) == -1 -> X == -1 */
6014 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
6015
6016 case GT_EXPR:
6017 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
6018 /* MAX (X, 0) > 0 -> X > 0
6019 MAX (X, 0) > 5 -> X > 5 */
6020 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
6021
6022 else if (op_code == MAX_EXPR)
6023 /* MAX (X, 0) > -1 -> true */
6024 return omit_one_operand_loc (loc, type, integer_one_node, inner);
6025
6026 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
6027 /* MIN (X, 0) > 0 -> false
6028 MIN (X, 0) > 5 -> false */
6029 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
6030
6031 else
6032 /* MIN (X, 0) > -1 -> X > -1 */
6033 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
6034
6035 default:
6036 return NULL_TREE;
6037 }
6038 }
6039 \f
6040 /* T is an integer expression that is being multiplied, divided, or taken a
6041 modulus (CODE says which and what kind of divide or modulus) by a
6042 constant C. See if we can eliminate that operation by folding it with
6043 other operations already in T. WIDE_TYPE, if non-null, is a type that
6044 should be used for the computation if wider than our type.
6045
6046 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6047 (X * 2) + (Y * 4). We must, however, be assured that either the original
6048 expression would not overflow or that overflow is undefined for the type
6049 in the language in question.
6050
6051 If we return a non-null expression, it is an equivalent form of the
6052 original computation, but need not be in the original type.
6053
6054 We set *STRICT_OVERFLOW_P to true if the return values depends on
6055 signed overflow being undefined. Otherwise we do not change
6056 *STRICT_OVERFLOW_P. */
6057
6058 static tree
6059 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6060 bool *strict_overflow_p)
6061 {
6062 /* To avoid exponential search depth, refuse to allow recursion past
6063 three levels. Beyond that (1) it's highly unlikely that we'll find
6064 something interesting and (2) we've probably processed it before
6065 when we built the inner expression. */
6066
6067 static int depth;
6068 tree ret;
6069
6070 if (depth > 3)
6071 return NULL;
6072
6073 depth++;
6074 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6075 depth--;
6076
6077 return ret;
6078 }
6079
6080 static tree
6081 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6082 bool *strict_overflow_p)
6083 {
6084 tree type = TREE_TYPE (t);
6085 enum tree_code tcode = TREE_CODE (t);
6086 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
6087 > GET_MODE_SIZE (TYPE_MODE (type)))
6088 ? wide_type : type);
6089 tree t1, t2;
6090 int same_p = tcode == code;
6091 tree op0 = NULL_TREE, op1 = NULL_TREE;
6092 bool sub_strict_overflow_p;
6093
6094 /* Don't deal with constants of zero here; they confuse the code below. */
6095 if (integer_zerop (c))
6096 return NULL_TREE;
6097
6098 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6099 op0 = TREE_OPERAND (t, 0);
6100
6101 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6102 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6103
6104 /* Note that we need not handle conditional operations here since fold
6105 already handles those cases. So just do arithmetic here. */
6106 switch (tcode)
6107 {
6108 case INTEGER_CST:
6109 /* For a constant, we can always simplify if we are a multiply
6110 or (for divide and modulus) if it is a multiple of our constant. */
6111 if (code == MULT_EXPR
6112 || wi::multiple_of_p (t, c, TYPE_SIGN (type)))
6113 {
6114 tree tem = const_binop (code, fold_convert (ctype, t),
6115 fold_convert (ctype, c));
6116 /* If the multiplication overflowed to INT_MIN then we lost sign
6117 information on it and a subsequent multiplication might
6118 spuriously overflow. See PR68142. */
6119 if (TREE_OVERFLOW (tem)
6120 && wi::eq_p (tem, wi::min_value (TYPE_PRECISION (ctype), SIGNED)))
6121 return NULL_TREE;
6122 return tem;
6123 }
6124 break;
6125
6126 CASE_CONVERT: case NON_LVALUE_EXPR:
6127 /* If op0 is an expression ... */
6128 if ((COMPARISON_CLASS_P (op0)
6129 || UNARY_CLASS_P (op0)
6130 || BINARY_CLASS_P (op0)
6131 || VL_EXP_CLASS_P (op0)
6132 || EXPRESSION_CLASS_P (op0))
6133 /* ... and has wrapping overflow, and its type is smaller
6134 than ctype, then we cannot pass through as widening. */
6135 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6136 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0)))
6137 && (TYPE_PRECISION (ctype)
6138 > TYPE_PRECISION (TREE_TYPE (op0))))
6139 /* ... or this is a truncation (t is narrower than op0),
6140 then we cannot pass through this narrowing. */
6141 || (TYPE_PRECISION (type)
6142 < TYPE_PRECISION (TREE_TYPE (op0)))
6143 /* ... or signedness changes for division or modulus,
6144 then we cannot pass through this conversion. */
6145 || (code != MULT_EXPR
6146 && (TYPE_UNSIGNED (ctype)
6147 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6148 /* ... or has undefined overflow while the converted to
6149 type has not, we cannot do the operation in the inner type
6150 as that would introduce undefined overflow. */
6151 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6152 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0)))
6153 && !TYPE_OVERFLOW_UNDEFINED (type))))
6154 break;
6155
6156 /* Pass the constant down and see if we can make a simplification. If
6157 we can, replace this expression with the inner simplification for
6158 possible later conversion to our or some other type. */
6159 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6160 && TREE_CODE (t2) == INTEGER_CST
6161 && !TREE_OVERFLOW (t2)
6162 && (0 != (t1 = extract_muldiv (op0, t2, code,
6163 code == MULT_EXPR
6164 ? ctype : NULL_TREE,
6165 strict_overflow_p))))
6166 return t1;
6167 break;
6168
6169 case ABS_EXPR:
6170 /* If widening the type changes it from signed to unsigned, then we
6171 must avoid building ABS_EXPR itself as unsigned. */
6172 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6173 {
6174 tree cstype = (*signed_type_for) (ctype);
6175 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6176 != 0)
6177 {
6178 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6179 return fold_convert (ctype, t1);
6180 }
6181 break;
6182 }
6183 /* If the constant is negative, we cannot simplify this. */
6184 if (tree_int_cst_sgn (c) == -1)
6185 break;
6186 /* FALLTHROUGH */
6187 case NEGATE_EXPR:
6188 /* For division and modulus, type can't be unsigned, as e.g.
6189 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6190 For signed types, even with wrapping overflow, this is fine. */
6191 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6192 break;
6193 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6194 != 0)
6195 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6196 break;
6197
6198 case MIN_EXPR: case MAX_EXPR:
6199 /* If widening the type changes the signedness, then we can't perform
6200 this optimization as that changes the result. */
6201 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6202 break;
6203
6204 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6205 sub_strict_overflow_p = false;
6206 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6207 &sub_strict_overflow_p)) != 0
6208 && (t2 = extract_muldiv (op1, c, code, wide_type,
6209 &sub_strict_overflow_p)) != 0)
6210 {
6211 if (tree_int_cst_sgn (c) < 0)
6212 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6213 if (sub_strict_overflow_p)
6214 *strict_overflow_p = true;
6215 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6216 fold_convert (ctype, t2));
6217 }
6218 break;
6219
6220 case LSHIFT_EXPR: case RSHIFT_EXPR:
6221 /* If the second operand is constant, this is a multiplication
6222 or floor division, by a power of two, so we can treat it that
6223 way unless the multiplier or divisor overflows. Signed
6224 left-shift overflow is implementation-defined rather than
6225 undefined in C90, so do not convert signed left shift into
6226 multiplication. */
6227 if (TREE_CODE (op1) == INTEGER_CST
6228 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6229 /* const_binop may not detect overflow correctly,
6230 so check for it explicitly here. */
6231 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
6232 && 0 != (t1 = fold_convert (ctype,
6233 const_binop (LSHIFT_EXPR,
6234 size_one_node,
6235 op1)))
6236 && !TREE_OVERFLOW (t1))
6237 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6238 ? MULT_EXPR : FLOOR_DIV_EXPR,
6239 ctype,
6240 fold_convert (ctype, op0),
6241 t1),
6242 c, code, wide_type, strict_overflow_p);
6243 break;
6244
6245 case PLUS_EXPR: case MINUS_EXPR:
6246 /* See if we can eliminate the operation on both sides. If we can, we
6247 can return a new PLUS or MINUS. If we can't, the only remaining
6248 cases where we can do anything are if the second operand is a
6249 constant. */
6250 sub_strict_overflow_p = false;
6251 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6252 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6253 if (t1 != 0 && t2 != 0
6254 && (code == MULT_EXPR
6255 /* If not multiplication, we can only do this if both operands
6256 are divisible by c. */
6257 || (multiple_of_p (ctype, op0, c)
6258 && multiple_of_p (ctype, op1, c))))
6259 {
6260 if (sub_strict_overflow_p)
6261 *strict_overflow_p = true;
6262 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6263 fold_convert (ctype, t2));
6264 }
6265
6266 /* If this was a subtraction, negate OP1 and set it to be an addition.
6267 This simplifies the logic below. */
6268 if (tcode == MINUS_EXPR)
6269 {
6270 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6271 /* If OP1 was not easily negatable, the constant may be OP0. */
6272 if (TREE_CODE (op0) == INTEGER_CST)
6273 {
6274 std::swap (op0, op1);
6275 std::swap (t1, t2);
6276 }
6277 }
6278
6279 if (TREE_CODE (op1) != INTEGER_CST)
6280 break;
6281
6282 /* If either OP1 or C are negative, this optimization is not safe for
6283 some of the division and remainder types while for others we need
6284 to change the code. */
6285 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6286 {
6287 if (code == CEIL_DIV_EXPR)
6288 code = FLOOR_DIV_EXPR;
6289 else if (code == FLOOR_DIV_EXPR)
6290 code = CEIL_DIV_EXPR;
6291 else if (code != MULT_EXPR
6292 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6293 break;
6294 }
6295
6296 /* If it's a multiply or a division/modulus operation of a multiple
6297 of our constant, do the operation and verify it doesn't overflow. */
6298 if (code == MULT_EXPR
6299 || wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6300 {
6301 op1 = const_binop (code, fold_convert (ctype, op1),
6302 fold_convert (ctype, c));
6303 /* We allow the constant to overflow with wrapping semantics. */
6304 if (op1 == 0
6305 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6306 break;
6307 }
6308 else
6309 break;
6310
6311 /* If we have an unsigned type, we cannot widen the operation since it
6312 will change the result if the original computation overflowed. */
6313 if (TYPE_UNSIGNED (ctype) && ctype != type)
6314 break;
6315
6316 /* If we were able to eliminate our operation from the first side,
6317 apply our operation to the second side and reform the PLUS. */
6318 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
6319 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
6320
6321 /* The last case is if we are a multiply. In that case, we can
6322 apply the distributive law to commute the multiply and addition
6323 if the multiplication of the constants doesn't overflow
6324 and overflow is defined. With undefined overflow
6325 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
6326 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
6327 return fold_build2 (tcode, ctype,
6328 fold_build2 (code, ctype,
6329 fold_convert (ctype, op0),
6330 fold_convert (ctype, c)),
6331 op1);
6332
6333 break;
6334
6335 case MULT_EXPR:
6336 /* We have a special case here if we are doing something like
6337 (C * 8) % 4 since we know that's zero. */
6338 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6339 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6340 /* If the multiplication can overflow we cannot optimize this. */
6341 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6342 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6343 && wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6344 {
6345 *strict_overflow_p = true;
6346 return omit_one_operand (type, integer_zero_node, op0);
6347 }
6348
6349 /* ... fall through ... */
6350
6351 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6352 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6353 /* If we can extract our operation from the LHS, do so and return a
6354 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6355 do something only if the second operand is a constant. */
6356 if (same_p
6357 && (t1 = extract_muldiv (op0, c, code, wide_type,
6358 strict_overflow_p)) != 0)
6359 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6360 fold_convert (ctype, op1));
6361 else if (tcode == MULT_EXPR && code == MULT_EXPR
6362 && (t1 = extract_muldiv (op1, c, code, wide_type,
6363 strict_overflow_p)) != 0)
6364 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6365 fold_convert (ctype, t1));
6366 else if (TREE_CODE (op1) != INTEGER_CST)
6367 return 0;
6368
6369 /* If these are the same operation types, we can associate them
6370 assuming no overflow. */
6371 if (tcode == code)
6372 {
6373 bool overflow_p = false;
6374 bool overflow_mul_p;
6375 signop sign = TYPE_SIGN (ctype);
6376 wide_int mul = wi::mul (op1, c, sign, &overflow_mul_p);
6377 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6378 if (overflow_mul_p
6379 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6380 overflow_p = true;
6381 if (!overflow_p)
6382 {
6383 mul = wide_int::from (mul, TYPE_PRECISION (ctype),
6384 TYPE_SIGN (TREE_TYPE (op1)));
6385 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6386 wide_int_to_tree (ctype, mul));
6387 }
6388 }
6389
6390 /* If these operations "cancel" each other, we have the main
6391 optimizations of this pass, which occur when either constant is a
6392 multiple of the other, in which case we replace this with either an
6393 operation or CODE or TCODE.
6394
6395 If we have an unsigned type, we cannot do this since it will change
6396 the result if the original computation overflowed. */
6397 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6398 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6399 || (tcode == MULT_EXPR
6400 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6401 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6402 && code != MULT_EXPR)))
6403 {
6404 if (wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6405 {
6406 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6407 *strict_overflow_p = true;
6408 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6409 fold_convert (ctype,
6410 const_binop (TRUNC_DIV_EXPR,
6411 op1, c)));
6412 }
6413 else if (wi::multiple_of_p (c, op1, TYPE_SIGN (type)))
6414 {
6415 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6416 *strict_overflow_p = true;
6417 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6418 fold_convert (ctype,
6419 const_binop (TRUNC_DIV_EXPR,
6420 c, op1)));
6421 }
6422 }
6423 break;
6424
6425 default:
6426 break;
6427 }
6428
6429 return 0;
6430 }
6431 \f
6432 /* Return a node which has the indicated constant VALUE (either 0 or
6433 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6434 and is of the indicated TYPE. */
6435
6436 tree
6437 constant_boolean_node (bool value, tree type)
6438 {
6439 if (type == integer_type_node)
6440 return value ? integer_one_node : integer_zero_node;
6441 else if (type == boolean_type_node)
6442 return value ? boolean_true_node : boolean_false_node;
6443 else if (TREE_CODE (type) == VECTOR_TYPE)
6444 return build_vector_from_val (type,
6445 build_int_cst (TREE_TYPE (type),
6446 value ? -1 : 0));
6447 else
6448 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6449 }
6450
6451
6452 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6453 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6454 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6455 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6456 COND is the first argument to CODE; otherwise (as in the example
6457 given here), it is the second argument. TYPE is the type of the
6458 original expression. Return NULL_TREE if no simplification is
6459 possible. */
6460
6461 static tree
6462 fold_binary_op_with_conditional_arg (location_t loc,
6463 enum tree_code code,
6464 tree type, tree op0, tree op1,
6465 tree cond, tree arg, int cond_first_p)
6466 {
6467 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6468 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6469 tree test, true_value, false_value;
6470 tree lhs = NULL_TREE;
6471 tree rhs = NULL_TREE;
6472 enum tree_code cond_code = COND_EXPR;
6473
6474 if (TREE_CODE (cond) == COND_EXPR
6475 || TREE_CODE (cond) == VEC_COND_EXPR)
6476 {
6477 test = TREE_OPERAND (cond, 0);
6478 true_value = TREE_OPERAND (cond, 1);
6479 false_value = TREE_OPERAND (cond, 2);
6480 /* If this operand throws an expression, then it does not make
6481 sense to try to perform a logical or arithmetic operation
6482 involving it. */
6483 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6484 lhs = true_value;
6485 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6486 rhs = false_value;
6487 }
6488 else if (!(TREE_CODE (type) != VECTOR_TYPE
6489 && TREE_CODE (TREE_TYPE (cond)) == VECTOR_TYPE))
6490 {
6491 tree testtype = TREE_TYPE (cond);
6492 test = cond;
6493 true_value = constant_boolean_node (true, testtype);
6494 false_value = constant_boolean_node (false, testtype);
6495 }
6496 else
6497 /* Detect the case of mixing vector and scalar types - bail out. */
6498 return NULL_TREE;
6499
6500 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6501 cond_code = VEC_COND_EXPR;
6502
6503 /* This transformation is only worthwhile if we don't have to wrap ARG
6504 in a SAVE_EXPR and the operation can be simplified without recursing
6505 on at least one of the branches once its pushed inside the COND_EXPR. */
6506 if (!TREE_CONSTANT (arg)
6507 && (TREE_SIDE_EFFECTS (arg)
6508 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6509 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6510 return NULL_TREE;
6511
6512 arg = fold_convert_loc (loc, arg_type, arg);
6513 if (lhs == 0)
6514 {
6515 true_value = fold_convert_loc (loc, cond_type, true_value);
6516 if (cond_first_p)
6517 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6518 else
6519 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6520 }
6521 if (rhs == 0)
6522 {
6523 false_value = fold_convert_loc (loc, cond_type, false_value);
6524 if (cond_first_p)
6525 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6526 else
6527 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6528 }
6529
6530 /* Check that we have simplified at least one of the branches. */
6531 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6532 return NULL_TREE;
6533
6534 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6535 }
6536
6537 \f
6538 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6539
6540 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6541 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6542 ADDEND is the same as X.
6543
6544 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6545 and finite. The problematic cases are when X is zero, and its mode
6546 has signed zeros. In the case of rounding towards -infinity,
6547 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6548 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6549
6550 bool
6551 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6552 {
6553 if (!real_zerop (addend))
6554 return false;
6555
6556 /* Don't allow the fold with -fsignaling-nans. */
6557 if (HONOR_SNANS (element_mode (type)))
6558 return false;
6559
6560 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6561 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
6562 return true;
6563
6564 /* In a vector or complex, we would need to check the sign of all zeros. */
6565 if (TREE_CODE (addend) != REAL_CST)
6566 return false;
6567
6568 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6569 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6570 negate = !negate;
6571
6572 /* The mode has signed zeros, and we have to honor their sign.
6573 In this situation, there is only one case we can return true for.
6574 X - 0 is the same as X unless rounding towards -infinity is
6575 supported. */
6576 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type));
6577 }
6578
6579 /* Subroutine of fold() that optimizes comparisons of a division by
6580 a nonzero integer constant against an integer constant, i.e.
6581 X/C1 op C2.
6582
6583 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6584 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6585 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6586
6587 The function returns the constant folded tree if a simplification
6588 can be made, and NULL_TREE otherwise. */
6589
6590 static tree
6591 fold_div_compare (location_t loc,
6592 enum tree_code code, tree type, tree arg0, tree arg1)
6593 {
6594 tree prod, tmp, hi, lo;
6595 tree arg00 = TREE_OPERAND (arg0, 0);
6596 tree arg01 = TREE_OPERAND (arg0, 1);
6597 signop sign = TYPE_SIGN (TREE_TYPE (arg0));
6598 bool neg_overflow = false;
6599 bool overflow;
6600
6601 /* We have to do this the hard way to detect unsigned overflow.
6602 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6603 wide_int val = wi::mul (arg01, arg1, sign, &overflow);
6604 prod = force_fit_type (TREE_TYPE (arg00), val, -1, overflow);
6605 neg_overflow = false;
6606
6607 if (sign == UNSIGNED)
6608 {
6609 tmp = int_const_binop (MINUS_EXPR, arg01,
6610 build_int_cst (TREE_TYPE (arg01), 1));
6611 lo = prod;
6612
6613 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6614 val = wi::add (prod, tmp, sign, &overflow);
6615 hi = force_fit_type (TREE_TYPE (arg00), val,
6616 -1, overflow | TREE_OVERFLOW (prod));
6617 }
6618 else if (tree_int_cst_sgn (arg01) >= 0)
6619 {
6620 tmp = int_const_binop (MINUS_EXPR, arg01,
6621 build_int_cst (TREE_TYPE (arg01), 1));
6622 switch (tree_int_cst_sgn (arg1))
6623 {
6624 case -1:
6625 neg_overflow = true;
6626 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6627 hi = prod;
6628 break;
6629
6630 case 0:
6631 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6632 hi = tmp;
6633 break;
6634
6635 case 1:
6636 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6637 lo = prod;
6638 break;
6639
6640 default:
6641 gcc_unreachable ();
6642 }
6643 }
6644 else
6645 {
6646 /* A negative divisor reverses the relational operators. */
6647 code = swap_tree_comparison (code);
6648
6649 tmp = int_const_binop (PLUS_EXPR, arg01,
6650 build_int_cst (TREE_TYPE (arg01), 1));
6651 switch (tree_int_cst_sgn (arg1))
6652 {
6653 case -1:
6654 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6655 lo = prod;
6656 break;
6657
6658 case 0:
6659 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6660 lo = tmp;
6661 break;
6662
6663 case 1:
6664 neg_overflow = true;
6665 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6666 hi = prod;
6667 break;
6668
6669 default:
6670 gcc_unreachable ();
6671 }
6672 }
6673
6674 switch (code)
6675 {
6676 case EQ_EXPR:
6677 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6678 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6679 if (TREE_OVERFLOW (hi))
6680 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6681 if (TREE_OVERFLOW (lo))
6682 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6683 return build_range_check (loc, type, arg00, 1, lo, hi);
6684
6685 case NE_EXPR:
6686 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6687 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6688 if (TREE_OVERFLOW (hi))
6689 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6690 if (TREE_OVERFLOW (lo))
6691 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6692 return build_range_check (loc, type, arg00, 0, lo, hi);
6693
6694 case LT_EXPR:
6695 if (TREE_OVERFLOW (lo))
6696 {
6697 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6698 return omit_one_operand_loc (loc, type, tmp, arg00);
6699 }
6700 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6701
6702 case LE_EXPR:
6703 if (TREE_OVERFLOW (hi))
6704 {
6705 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6706 return omit_one_operand_loc (loc, type, tmp, arg00);
6707 }
6708 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6709
6710 case GT_EXPR:
6711 if (TREE_OVERFLOW (hi))
6712 {
6713 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6714 return omit_one_operand_loc (loc, type, tmp, arg00);
6715 }
6716 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6717
6718 case GE_EXPR:
6719 if (TREE_OVERFLOW (lo))
6720 {
6721 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6722 return omit_one_operand_loc (loc, type, tmp, arg00);
6723 }
6724 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6725
6726 default:
6727 break;
6728 }
6729
6730 return NULL_TREE;
6731 }
6732
6733
6734 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6735 equality/inequality test, then return a simplified form of the test
6736 using a sign testing. Otherwise return NULL. TYPE is the desired
6737 result type. */
6738
6739 static tree
6740 fold_single_bit_test_into_sign_test (location_t loc,
6741 enum tree_code code, tree arg0, tree arg1,
6742 tree result_type)
6743 {
6744 /* If this is testing a single bit, we can optimize the test. */
6745 if ((code == NE_EXPR || code == EQ_EXPR)
6746 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6747 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6748 {
6749 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6750 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6751 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6752
6753 if (arg00 != NULL_TREE
6754 /* This is only a win if casting to a signed type is cheap,
6755 i.e. when arg00's type is not a partial mode. */
6756 && TYPE_PRECISION (TREE_TYPE (arg00))
6757 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6758 {
6759 tree stype = signed_type_for (TREE_TYPE (arg00));
6760 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6761 result_type,
6762 fold_convert_loc (loc, stype, arg00),
6763 build_int_cst (stype, 0));
6764 }
6765 }
6766
6767 return NULL_TREE;
6768 }
6769
6770 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6771 equality/inequality test, then return a simplified form of
6772 the test using shifts and logical operations. Otherwise return
6773 NULL. TYPE is the desired result type. */
6774
6775 tree
6776 fold_single_bit_test (location_t loc, enum tree_code code,
6777 tree arg0, tree arg1, tree result_type)
6778 {
6779 /* If this is testing a single bit, we can optimize the test. */
6780 if ((code == NE_EXPR || code == EQ_EXPR)
6781 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6782 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6783 {
6784 tree inner = TREE_OPERAND (arg0, 0);
6785 tree type = TREE_TYPE (arg0);
6786 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6787 machine_mode operand_mode = TYPE_MODE (type);
6788 int ops_unsigned;
6789 tree signed_type, unsigned_type, intermediate_type;
6790 tree tem, one;
6791
6792 /* First, see if we can fold the single bit test into a sign-bit
6793 test. */
6794 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6795 result_type);
6796 if (tem)
6797 return tem;
6798
6799 /* Otherwise we have (A & C) != 0 where C is a single bit,
6800 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6801 Similarly for (A & C) == 0. */
6802
6803 /* If INNER is a right shift of a constant and it plus BITNUM does
6804 not overflow, adjust BITNUM and INNER. */
6805 if (TREE_CODE (inner) == RSHIFT_EXPR
6806 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6807 && bitnum < TYPE_PRECISION (type)
6808 && wi::ltu_p (TREE_OPERAND (inner, 1),
6809 TYPE_PRECISION (type) - bitnum))
6810 {
6811 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6812 inner = TREE_OPERAND (inner, 0);
6813 }
6814
6815 /* If we are going to be able to omit the AND below, we must do our
6816 operations as unsigned. If we must use the AND, we have a choice.
6817 Normally unsigned is faster, but for some machines signed is. */
6818 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6819 && !flag_syntax_only) ? 0 : 1;
6820
6821 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6822 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6823 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6824 inner = fold_convert_loc (loc, intermediate_type, inner);
6825
6826 if (bitnum != 0)
6827 inner = build2 (RSHIFT_EXPR, intermediate_type,
6828 inner, size_int (bitnum));
6829
6830 one = build_int_cst (intermediate_type, 1);
6831
6832 if (code == EQ_EXPR)
6833 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6834
6835 /* Put the AND last so it can combine with more things. */
6836 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6837
6838 /* Make sure to return the proper type. */
6839 inner = fold_convert_loc (loc, result_type, inner);
6840
6841 return inner;
6842 }
6843 return NULL_TREE;
6844 }
6845
6846 /* Check whether we are allowed to reorder operands arg0 and arg1,
6847 such that the evaluation of arg1 occurs before arg0. */
6848
6849 static bool
6850 reorder_operands_p (const_tree arg0, const_tree arg1)
6851 {
6852 if (! flag_evaluation_order)
6853 return true;
6854 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6855 return true;
6856 return ! TREE_SIDE_EFFECTS (arg0)
6857 && ! TREE_SIDE_EFFECTS (arg1);
6858 }
6859
6860 /* Test whether it is preferable two swap two operands, ARG0 and
6861 ARG1, for example because ARG0 is an integer constant and ARG1
6862 isn't. If REORDER is true, only recommend swapping if we can
6863 evaluate the operands in reverse order. */
6864
6865 bool
6866 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6867 {
6868 if (CONSTANT_CLASS_P (arg1))
6869 return 0;
6870 if (CONSTANT_CLASS_P (arg0))
6871 return 1;
6872
6873 STRIP_NOPS (arg0);
6874 STRIP_NOPS (arg1);
6875
6876 if (TREE_CONSTANT (arg1))
6877 return 0;
6878 if (TREE_CONSTANT (arg0))
6879 return 1;
6880
6881 if (reorder && flag_evaluation_order
6882 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6883 return 0;
6884
6885 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6886 for commutative and comparison operators. Ensuring a canonical
6887 form allows the optimizers to find additional redundancies without
6888 having to explicitly check for both orderings. */
6889 if (TREE_CODE (arg0) == SSA_NAME
6890 && TREE_CODE (arg1) == SSA_NAME
6891 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6892 return 1;
6893
6894 /* Put SSA_NAMEs last. */
6895 if (TREE_CODE (arg1) == SSA_NAME)
6896 return 0;
6897 if (TREE_CODE (arg0) == SSA_NAME)
6898 return 1;
6899
6900 /* Put variables last. */
6901 if (DECL_P (arg1))
6902 return 0;
6903 if (DECL_P (arg0))
6904 return 1;
6905
6906 return 0;
6907 }
6908
6909
6910 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6911 means A >= Y && A != MAX, but in this case we know that
6912 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6913
6914 static tree
6915 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6916 {
6917 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6918
6919 if (TREE_CODE (bound) == LT_EXPR)
6920 a = TREE_OPERAND (bound, 0);
6921 else if (TREE_CODE (bound) == GT_EXPR)
6922 a = TREE_OPERAND (bound, 1);
6923 else
6924 return NULL_TREE;
6925
6926 typea = TREE_TYPE (a);
6927 if (!INTEGRAL_TYPE_P (typea)
6928 && !POINTER_TYPE_P (typea))
6929 return NULL_TREE;
6930
6931 if (TREE_CODE (ineq) == LT_EXPR)
6932 {
6933 a1 = TREE_OPERAND (ineq, 1);
6934 y = TREE_OPERAND (ineq, 0);
6935 }
6936 else if (TREE_CODE (ineq) == GT_EXPR)
6937 {
6938 a1 = TREE_OPERAND (ineq, 0);
6939 y = TREE_OPERAND (ineq, 1);
6940 }
6941 else
6942 return NULL_TREE;
6943
6944 if (TREE_TYPE (a1) != typea)
6945 return NULL_TREE;
6946
6947 if (POINTER_TYPE_P (typea))
6948 {
6949 /* Convert the pointer types into integer before taking the difference. */
6950 tree ta = fold_convert_loc (loc, ssizetype, a);
6951 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
6952 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
6953 }
6954 else
6955 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
6956
6957 if (!diff || !integer_onep (diff))
6958 return NULL_TREE;
6959
6960 return fold_build2_loc (loc, GE_EXPR, type, a, y);
6961 }
6962
6963 /* Fold a sum or difference of at least one multiplication.
6964 Returns the folded tree or NULL if no simplification could be made. */
6965
6966 static tree
6967 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
6968 tree arg0, tree arg1)
6969 {
6970 tree arg00, arg01, arg10, arg11;
6971 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6972
6973 /* (A * C) +- (B * C) -> (A+-B) * C.
6974 (A * C) +- A -> A * (C+-1).
6975 We are most concerned about the case where C is a constant,
6976 but other combinations show up during loop reduction. Since
6977 it is not difficult, try all four possibilities. */
6978
6979 if (TREE_CODE (arg0) == MULT_EXPR)
6980 {
6981 arg00 = TREE_OPERAND (arg0, 0);
6982 arg01 = TREE_OPERAND (arg0, 1);
6983 }
6984 else if (TREE_CODE (arg0) == INTEGER_CST)
6985 {
6986 arg00 = build_one_cst (type);
6987 arg01 = arg0;
6988 }
6989 else
6990 {
6991 /* We cannot generate constant 1 for fract. */
6992 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6993 return NULL_TREE;
6994 arg00 = arg0;
6995 arg01 = build_one_cst (type);
6996 }
6997 if (TREE_CODE (arg1) == MULT_EXPR)
6998 {
6999 arg10 = TREE_OPERAND (arg1, 0);
7000 arg11 = TREE_OPERAND (arg1, 1);
7001 }
7002 else if (TREE_CODE (arg1) == INTEGER_CST)
7003 {
7004 arg10 = build_one_cst (type);
7005 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7006 the purpose of this canonicalization. */
7007 if (wi::neg_p (arg1, TYPE_SIGN (TREE_TYPE (arg1)))
7008 && negate_expr_p (arg1)
7009 && code == PLUS_EXPR)
7010 {
7011 arg11 = negate_expr (arg1);
7012 code = MINUS_EXPR;
7013 }
7014 else
7015 arg11 = arg1;
7016 }
7017 else
7018 {
7019 /* We cannot generate constant 1 for fract. */
7020 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7021 return NULL_TREE;
7022 arg10 = arg1;
7023 arg11 = build_one_cst (type);
7024 }
7025 same = NULL_TREE;
7026
7027 if (operand_equal_p (arg01, arg11, 0))
7028 same = arg01, alt0 = arg00, alt1 = arg10;
7029 else if (operand_equal_p (arg00, arg10, 0))
7030 same = arg00, alt0 = arg01, alt1 = arg11;
7031 else if (operand_equal_p (arg00, arg11, 0))
7032 same = arg00, alt0 = arg01, alt1 = arg10;
7033 else if (operand_equal_p (arg01, arg10, 0))
7034 same = arg01, alt0 = arg00, alt1 = arg11;
7035
7036 /* No identical multiplicands; see if we can find a common
7037 power-of-two factor in non-power-of-two multiplies. This
7038 can help in multi-dimensional array access. */
7039 else if (tree_fits_shwi_p (arg01)
7040 && tree_fits_shwi_p (arg11))
7041 {
7042 HOST_WIDE_INT int01, int11, tmp;
7043 bool swap = false;
7044 tree maybe_same;
7045 int01 = tree_to_shwi (arg01);
7046 int11 = tree_to_shwi (arg11);
7047
7048 /* Move min of absolute values to int11. */
7049 if (absu_hwi (int01) < absu_hwi (int11))
7050 {
7051 tmp = int01, int01 = int11, int11 = tmp;
7052 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7053 maybe_same = arg01;
7054 swap = true;
7055 }
7056 else
7057 maybe_same = arg11;
7058
7059 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7060 /* The remainder should not be a constant, otherwise we
7061 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7062 increased the number of multiplications necessary. */
7063 && TREE_CODE (arg10) != INTEGER_CST)
7064 {
7065 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7066 build_int_cst (TREE_TYPE (arg00),
7067 int01 / int11));
7068 alt1 = arg10;
7069 same = maybe_same;
7070 if (swap)
7071 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7072 }
7073 }
7074
7075 if (same)
7076 return fold_build2_loc (loc, MULT_EXPR, type,
7077 fold_build2_loc (loc, code, type,
7078 fold_convert_loc (loc, type, alt0),
7079 fold_convert_loc (loc, type, alt1)),
7080 fold_convert_loc (loc, type, same));
7081
7082 return NULL_TREE;
7083 }
7084
7085 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7086 specified by EXPR into the buffer PTR of length LEN bytes.
7087 Return the number of bytes placed in the buffer, or zero
7088 upon failure. */
7089
7090 static int
7091 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7092 {
7093 tree type = TREE_TYPE (expr);
7094 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7095 int byte, offset, word, words;
7096 unsigned char value;
7097
7098 if ((off == -1 && total_bytes > len)
7099 || off >= total_bytes)
7100 return 0;
7101 if (off == -1)
7102 off = 0;
7103 words = total_bytes / UNITS_PER_WORD;
7104
7105 for (byte = 0; byte < total_bytes; byte++)
7106 {
7107 int bitpos = byte * BITS_PER_UNIT;
7108 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7109 number of bytes. */
7110 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7111
7112 if (total_bytes > UNITS_PER_WORD)
7113 {
7114 word = byte / UNITS_PER_WORD;
7115 if (WORDS_BIG_ENDIAN)
7116 word = (words - 1) - word;
7117 offset = word * UNITS_PER_WORD;
7118 if (BYTES_BIG_ENDIAN)
7119 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7120 else
7121 offset += byte % UNITS_PER_WORD;
7122 }
7123 else
7124 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7125 if (offset >= off
7126 && offset - off < len)
7127 ptr[offset - off] = value;
7128 }
7129 return MIN (len, total_bytes - off);
7130 }
7131
7132
7133 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7134 specified by EXPR into the buffer PTR of length LEN bytes.
7135 Return the number of bytes placed in the buffer, or zero
7136 upon failure. */
7137
7138 static int
7139 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7140 {
7141 tree type = TREE_TYPE (expr);
7142 machine_mode mode = TYPE_MODE (type);
7143 int total_bytes = GET_MODE_SIZE (mode);
7144 FIXED_VALUE_TYPE value;
7145 tree i_value, i_type;
7146
7147 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7148 return 0;
7149
7150 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7151
7152 if (NULL_TREE == i_type
7153 || TYPE_PRECISION (i_type) != total_bytes)
7154 return 0;
7155
7156 value = TREE_FIXED_CST (expr);
7157 i_value = double_int_to_tree (i_type, value.data);
7158
7159 return native_encode_int (i_value, ptr, len, off);
7160 }
7161
7162
7163 /* Subroutine of native_encode_expr. Encode the REAL_CST
7164 specified by EXPR into the buffer PTR of length LEN bytes.
7165 Return the number of bytes placed in the buffer, or zero
7166 upon failure. */
7167
7168 static int
7169 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7170 {
7171 tree type = TREE_TYPE (expr);
7172 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7173 int byte, offset, word, words, bitpos;
7174 unsigned char value;
7175
7176 /* There are always 32 bits in each long, no matter the size of
7177 the hosts long. We handle floating point representations with
7178 up to 192 bits. */
7179 long tmp[6];
7180
7181 if ((off == -1 && total_bytes > len)
7182 || off >= total_bytes)
7183 return 0;
7184 if (off == -1)
7185 off = 0;
7186 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7187
7188 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7189
7190 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7191 bitpos += BITS_PER_UNIT)
7192 {
7193 byte = (bitpos / BITS_PER_UNIT) & 3;
7194 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7195
7196 if (UNITS_PER_WORD < 4)
7197 {
7198 word = byte / UNITS_PER_WORD;
7199 if (WORDS_BIG_ENDIAN)
7200 word = (words - 1) - word;
7201 offset = word * UNITS_PER_WORD;
7202 if (BYTES_BIG_ENDIAN)
7203 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7204 else
7205 offset += byte % UNITS_PER_WORD;
7206 }
7207 else
7208 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7209 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7210 if (offset >= off
7211 && offset - off < len)
7212 ptr[offset - off] = value;
7213 }
7214 return MIN (len, total_bytes - off);
7215 }
7216
7217 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7218 specified by EXPR into the buffer PTR of length LEN bytes.
7219 Return the number of bytes placed in the buffer, or zero
7220 upon failure. */
7221
7222 static int
7223 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7224 {
7225 int rsize, isize;
7226 tree part;
7227
7228 part = TREE_REALPART (expr);
7229 rsize = native_encode_expr (part, ptr, len, off);
7230 if (off == -1
7231 && rsize == 0)
7232 return 0;
7233 part = TREE_IMAGPART (expr);
7234 if (off != -1)
7235 off = MAX (0, off - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part))));
7236 isize = native_encode_expr (part, ptr+rsize, len-rsize, off);
7237 if (off == -1
7238 && isize != rsize)
7239 return 0;
7240 return rsize + isize;
7241 }
7242
7243
7244 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7245 specified by EXPR into the buffer PTR of length LEN bytes.
7246 Return the number of bytes placed in the buffer, or zero
7247 upon failure. */
7248
7249 static int
7250 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7251 {
7252 unsigned i, count;
7253 int size, offset;
7254 tree itype, elem;
7255
7256 offset = 0;
7257 count = VECTOR_CST_NELTS (expr);
7258 itype = TREE_TYPE (TREE_TYPE (expr));
7259 size = GET_MODE_SIZE (TYPE_MODE (itype));
7260 for (i = 0; i < count; i++)
7261 {
7262 if (off >= size)
7263 {
7264 off -= size;
7265 continue;
7266 }
7267 elem = VECTOR_CST_ELT (expr, i);
7268 int res = native_encode_expr (elem, ptr+offset, len-offset, off);
7269 if ((off == -1 && res != size)
7270 || res == 0)
7271 return 0;
7272 offset += res;
7273 if (offset >= len)
7274 return offset;
7275 if (off != -1)
7276 off = 0;
7277 }
7278 return offset;
7279 }
7280
7281
7282 /* Subroutine of native_encode_expr. Encode the STRING_CST
7283 specified by EXPR into the buffer PTR of length LEN bytes.
7284 Return the number of bytes placed in the buffer, or zero
7285 upon failure. */
7286
7287 static int
7288 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7289 {
7290 tree type = TREE_TYPE (expr);
7291 HOST_WIDE_INT total_bytes;
7292
7293 if (TREE_CODE (type) != ARRAY_TYPE
7294 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7295 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7296 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7297 return 0;
7298 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7299 if ((off == -1 && total_bytes > len)
7300 || off >= total_bytes)
7301 return 0;
7302 if (off == -1)
7303 off = 0;
7304 if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7305 {
7306 int written = 0;
7307 if (off < TREE_STRING_LENGTH (expr))
7308 {
7309 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7310 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7311 }
7312 memset (ptr + written, 0,
7313 MIN (total_bytes - written, len - written));
7314 }
7315 else
7316 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7317 return MIN (total_bytes - off, len);
7318 }
7319
7320
7321 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7322 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7323 buffer PTR of length LEN bytes. If OFF is not -1 then start
7324 the encoding at byte offset OFF and encode at most LEN bytes.
7325 Return the number of bytes placed in the buffer, or zero upon failure. */
7326
7327 int
7328 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7329 {
7330 /* We don't support starting at negative offset and -1 is special. */
7331 if (off < -1)
7332 return 0;
7333
7334 switch (TREE_CODE (expr))
7335 {
7336 case INTEGER_CST:
7337 return native_encode_int (expr, ptr, len, off);
7338
7339 case REAL_CST:
7340 return native_encode_real (expr, ptr, len, off);
7341
7342 case FIXED_CST:
7343 return native_encode_fixed (expr, ptr, len, off);
7344
7345 case COMPLEX_CST:
7346 return native_encode_complex (expr, ptr, len, off);
7347
7348 case VECTOR_CST:
7349 return native_encode_vector (expr, ptr, len, off);
7350
7351 case STRING_CST:
7352 return native_encode_string (expr, ptr, len, off);
7353
7354 default:
7355 return 0;
7356 }
7357 }
7358
7359
7360 /* Subroutine of native_interpret_expr. Interpret the contents of
7361 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7362 If the buffer cannot be interpreted, return NULL_TREE. */
7363
7364 static tree
7365 native_interpret_int (tree type, const unsigned char *ptr, int len)
7366 {
7367 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7368
7369 if (total_bytes > len
7370 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7371 return NULL_TREE;
7372
7373 wide_int result = wi::from_buffer (ptr, total_bytes);
7374
7375 return wide_int_to_tree (type, result);
7376 }
7377
7378
7379 /* Subroutine of native_interpret_expr. Interpret the contents of
7380 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7381 If the buffer cannot be interpreted, return NULL_TREE. */
7382
7383 static tree
7384 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7385 {
7386 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7387 double_int result;
7388 FIXED_VALUE_TYPE fixed_value;
7389
7390 if (total_bytes > len
7391 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7392 return NULL_TREE;
7393
7394 result = double_int::from_buffer (ptr, total_bytes);
7395 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7396
7397 return build_fixed (type, fixed_value);
7398 }
7399
7400
7401 /* Subroutine of native_interpret_expr. Interpret the contents of
7402 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7403 If the buffer cannot be interpreted, return NULL_TREE. */
7404
7405 static tree
7406 native_interpret_real (tree type, const unsigned char *ptr, int len)
7407 {
7408 machine_mode mode = TYPE_MODE (type);
7409 int total_bytes = GET_MODE_SIZE (mode);
7410 unsigned char value;
7411 /* There are always 32 bits in each long, no matter the size of
7412 the hosts long. We handle floating point representations with
7413 up to 192 bits. */
7414 REAL_VALUE_TYPE r;
7415 long tmp[6];
7416
7417 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7418 if (total_bytes > len || total_bytes > 24)
7419 return NULL_TREE;
7420 int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7421
7422 memset (tmp, 0, sizeof (tmp));
7423 for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7424 bitpos += BITS_PER_UNIT)
7425 {
7426 /* Both OFFSET and BYTE index within a long;
7427 bitpos indexes the whole float. */
7428 int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
7429 if (UNITS_PER_WORD < 4)
7430 {
7431 int word = byte / UNITS_PER_WORD;
7432 if (WORDS_BIG_ENDIAN)
7433 word = (words - 1) - word;
7434 offset = word * UNITS_PER_WORD;
7435 if (BYTES_BIG_ENDIAN)
7436 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7437 else
7438 offset += byte % UNITS_PER_WORD;
7439 }
7440 else
7441 {
7442 offset = byte;
7443 if (BYTES_BIG_ENDIAN)
7444 {
7445 /* Reverse bytes within each long, or within the entire float
7446 if it's smaller than a long (for HFmode). */
7447 offset = MIN (3, total_bytes - 1) - offset;
7448 gcc_assert (offset >= 0);
7449 }
7450 }
7451 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7452
7453 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7454 }
7455
7456 real_from_target (&r, tmp, mode);
7457 return build_real (type, r);
7458 }
7459
7460
7461 /* Subroutine of native_interpret_expr. Interpret the contents of
7462 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7463 If the buffer cannot be interpreted, return NULL_TREE. */
7464
7465 static tree
7466 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7467 {
7468 tree etype, rpart, ipart;
7469 int size;
7470
7471 etype = TREE_TYPE (type);
7472 size = GET_MODE_SIZE (TYPE_MODE (etype));
7473 if (size * 2 > len)
7474 return NULL_TREE;
7475 rpart = native_interpret_expr (etype, ptr, size);
7476 if (!rpart)
7477 return NULL_TREE;
7478 ipart = native_interpret_expr (etype, ptr+size, size);
7479 if (!ipart)
7480 return NULL_TREE;
7481 return build_complex (type, rpart, ipart);
7482 }
7483
7484
7485 /* Subroutine of native_interpret_expr. Interpret the contents of
7486 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7487 If the buffer cannot be interpreted, return NULL_TREE. */
7488
7489 static tree
7490 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7491 {
7492 tree etype, elem;
7493 int i, size, count;
7494 tree *elements;
7495
7496 etype = TREE_TYPE (type);
7497 size = GET_MODE_SIZE (TYPE_MODE (etype));
7498 count = TYPE_VECTOR_SUBPARTS (type);
7499 if (size * count > len)
7500 return NULL_TREE;
7501
7502 elements = XALLOCAVEC (tree, count);
7503 for (i = count - 1; i >= 0; i--)
7504 {
7505 elem = native_interpret_expr (etype, ptr+(i*size), size);
7506 if (!elem)
7507 return NULL_TREE;
7508 elements[i] = elem;
7509 }
7510 return build_vector (type, elements);
7511 }
7512
7513
7514 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7515 the buffer PTR of length LEN as a constant of type TYPE. For
7516 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7517 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7518 return NULL_TREE. */
7519
7520 tree
7521 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7522 {
7523 switch (TREE_CODE (type))
7524 {
7525 case INTEGER_TYPE:
7526 case ENUMERAL_TYPE:
7527 case BOOLEAN_TYPE:
7528 case POINTER_TYPE:
7529 case REFERENCE_TYPE:
7530 return native_interpret_int (type, ptr, len);
7531
7532 case REAL_TYPE:
7533 return native_interpret_real (type, ptr, len);
7534
7535 case FIXED_POINT_TYPE:
7536 return native_interpret_fixed (type, ptr, len);
7537
7538 case COMPLEX_TYPE:
7539 return native_interpret_complex (type, ptr, len);
7540
7541 case VECTOR_TYPE:
7542 return native_interpret_vector (type, ptr, len);
7543
7544 default:
7545 return NULL_TREE;
7546 }
7547 }
7548
7549 /* Returns true if we can interpret the contents of a native encoding
7550 as TYPE. */
7551
7552 static bool
7553 can_native_interpret_type_p (tree type)
7554 {
7555 switch (TREE_CODE (type))
7556 {
7557 case INTEGER_TYPE:
7558 case ENUMERAL_TYPE:
7559 case BOOLEAN_TYPE:
7560 case POINTER_TYPE:
7561 case REFERENCE_TYPE:
7562 case FIXED_POINT_TYPE:
7563 case REAL_TYPE:
7564 case COMPLEX_TYPE:
7565 case VECTOR_TYPE:
7566 return true;
7567 default:
7568 return false;
7569 }
7570 }
7571
7572 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7573 TYPE at compile-time. If we're unable to perform the conversion
7574 return NULL_TREE. */
7575
7576 static tree
7577 fold_view_convert_expr (tree type, tree expr)
7578 {
7579 /* We support up to 512-bit values (for V8DFmode). */
7580 unsigned char buffer[64];
7581 int len;
7582
7583 /* Check that the host and target are sane. */
7584 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7585 return NULL_TREE;
7586
7587 len = native_encode_expr (expr, buffer, sizeof (buffer));
7588 if (len == 0)
7589 return NULL_TREE;
7590
7591 return native_interpret_expr (type, buffer, len);
7592 }
7593
7594 /* Build an expression for the address of T. Folds away INDIRECT_REF
7595 to avoid confusing the gimplify process. */
7596
7597 tree
7598 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7599 {
7600 /* The size of the object is not relevant when talking about its address. */
7601 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7602 t = TREE_OPERAND (t, 0);
7603
7604 if (TREE_CODE (t) == INDIRECT_REF)
7605 {
7606 t = TREE_OPERAND (t, 0);
7607
7608 if (TREE_TYPE (t) != ptrtype)
7609 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7610 }
7611 else if (TREE_CODE (t) == MEM_REF
7612 && integer_zerop (TREE_OPERAND (t, 1)))
7613 return TREE_OPERAND (t, 0);
7614 else if (TREE_CODE (t) == MEM_REF
7615 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7616 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7617 TREE_OPERAND (t, 0),
7618 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7619 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7620 {
7621 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7622
7623 if (TREE_TYPE (t) != ptrtype)
7624 t = fold_convert_loc (loc, ptrtype, t);
7625 }
7626 else
7627 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7628
7629 return t;
7630 }
7631
7632 /* Build an expression for the address of T. */
7633
7634 tree
7635 build_fold_addr_expr_loc (location_t loc, tree t)
7636 {
7637 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7638
7639 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7640 }
7641
7642 /* Fold a unary expression of code CODE and type TYPE with operand
7643 OP0. Return the folded expression if folding is successful.
7644 Otherwise, return NULL_TREE. */
7645
7646 tree
7647 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7648 {
7649 tree tem;
7650 tree arg0;
7651 enum tree_code_class kind = TREE_CODE_CLASS (code);
7652
7653 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7654 && TREE_CODE_LENGTH (code) == 1);
7655
7656 arg0 = op0;
7657 if (arg0)
7658 {
7659 if (CONVERT_EXPR_CODE_P (code)
7660 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7661 {
7662 /* Don't use STRIP_NOPS, because signedness of argument type
7663 matters. */
7664 STRIP_SIGN_NOPS (arg0);
7665 }
7666 else
7667 {
7668 /* Strip any conversions that don't change the mode. This
7669 is safe for every expression, except for a comparison
7670 expression because its signedness is derived from its
7671 operands.
7672
7673 Note that this is done as an internal manipulation within
7674 the constant folder, in order to find the simplest
7675 representation of the arguments so that their form can be
7676 studied. In any cases, the appropriate type conversions
7677 should be put back in the tree that will get out of the
7678 constant folder. */
7679 STRIP_NOPS (arg0);
7680 }
7681
7682 if (CONSTANT_CLASS_P (arg0))
7683 {
7684 tree tem = const_unop (code, type, arg0);
7685 if (tem)
7686 {
7687 if (TREE_TYPE (tem) != type)
7688 tem = fold_convert_loc (loc, type, tem);
7689 return tem;
7690 }
7691 }
7692 }
7693
7694 tem = generic_simplify (loc, code, type, op0);
7695 if (tem)
7696 return tem;
7697
7698 if (TREE_CODE_CLASS (code) == tcc_unary)
7699 {
7700 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7701 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7702 fold_build1_loc (loc, code, type,
7703 fold_convert_loc (loc, TREE_TYPE (op0),
7704 TREE_OPERAND (arg0, 1))));
7705 else if (TREE_CODE (arg0) == COND_EXPR)
7706 {
7707 tree arg01 = TREE_OPERAND (arg0, 1);
7708 tree arg02 = TREE_OPERAND (arg0, 2);
7709 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7710 arg01 = fold_build1_loc (loc, code, type,
7711 fold_convert_loc (loc,
7712 TREE_TYPE (op0), arg01));
7713 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7714 arg02 = fold_build1_loc (loc, code, type,
7715 fold_convert_loc (loc,
7716 TREE_TYPE (op0), arg02));
7717 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7718 arg01, arg02);
7719
7720 /* If this was a conversion, and all we did was to move into
7721 inside the COND_EXPR, bring it back out. But leave it if
7722 it is a conversion from integer to integer and the
7723 result precision is no wider than a word since such a
7724 conversion is cheap and may be optimized away by combine,
7725 while it couldn't if it were outside the COND_EXPR. Then return
7726 so we don't get into an infinite recursion loop taking the
7727 conversion out and then back in. */
7728
7729 if ((CONVERT_EXPR_CODE_P (code)
7730 || code == NON_LVALUE_EXPR)
7731 && TREE_CODE (tem) == COND_EXPR
7732 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7733 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7734 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7735 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7736 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7737 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7738 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7739 && (INTEGRAL_TYPE_P
7740 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7741 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7742 || flag_syntax_only))
7743 tem = build1_loc (loc, code, type,
7744 build3 (COND_EXPR,
7745 TREE_TYPE (TREE_OPERAND
7746 (TREE_OPERAND (tem, 1), 0)),
7747 TREE_OPERAND (tem, 0),
7748 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7749 TREE_OPERAND (TREE_OPERAND (tem, 2),
7750 0)));
7751 return tem;
7752 }
7753 }
7754
7755 switch (code)
7756 {
7757 case NON_LVALUE_EXPR:
7758 if (!maybe_lvalue_p (op0))
7759 return fold_convert_loc (loc, type, op0);
7760 return NULL_TREE;
7761
7762 CASE_CONVERT:
7763 case FLOAT_EXPR:
7764 case FIX_TRUNC_EXPR:
7765 if (COMPARISON_CLASS_P (op0))
7766 {
7767 /* If we have (type) (a CMP b) and type is an integral type, return
7768 new expression involving the new type. Canonicalize
7769 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7770 non-integral type.
7771 Do not fold the result as that would not simplify further, also
7772 folding again results in recursions. */
7773 if (TREE_CODE (type) == BOOLEAN_TYPE)
7774 return build2_loc (loc, TREE_CODE (op0), type,
7775 TREE_OPERAND (op0, 0),
7776 TREE_OPERAND (op0, 1));
7777 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7778 && TREE_CODE (type) != VECTOR_TYPE)
7779 return build3_loc (loc, COND_EXPR, type, op0,
7780 constant_boolean_node (true, type),
7781 constant_boolean_node (false, type));
7782 }
7783
7784 /* Handle (T *)&A.B.C for A being of type T and B and C
7785 living at offset zero. This occurs frequently in
7786 C++ upcasting and then accessing the base. */
7787 if (TREE_CODE (op0) == ADDR_EXPR
7788 && POINTER_TYPE_P (type)
7789 && handled_component_p (TREE_OPERAND (op0, 0)))
7790 {
7791 HOST_WIDE_INT bitsize, bitpos;
7792 tree offset;
7793 machine_mode mode;
7794 int unsignedp, reversep, volatilep;
7795 tree base
7796 = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
7797 &offset, &mode, &unsignedp, &reversep,
7798 &volatilep, false);
7799 /* If the reference was to a (constant) zero offset, we can use
7800 the address of the base if it has the same base type
7801 as the result type and the pointer type is unqualified. */
7802 if (! offset && bitpos == 0
7803 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7804 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7805 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7806 return fold_convert_loc (loc, type,
7807 build_fold_addr_expr_loc (loc, base));
7808 }
7809
7810 if (TREE_CODE (op0) == MODIFY_EXPR
7811 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7812 /* Detect assigning a bitfield. */
7813 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7814 && DECL_BIT_FIELD
7815 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7816 {
7817 /* Don't leave an assignment inside a conversion
7818 unless assigning a bitfield. */
7819 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7820 /* First do the assignment, then return converted constant. */
7821 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7822 TREE_NO_WARNING (tem) = 1;
7823 TREE_USED (tem) = 1;
7824 return tem;
7825 }
7826
7827 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7828 constants (if x has signed type, the sign bit cannot be set
7829 in c). This folds extension into the BIT_AND_EXPR.
7830 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7831 very likely don't have maximal range for their precision and this
7832 transformation effectively doesn't preserve non-maximal ranges. */
7833 if (TREE_CODE (type) == INTEGER_TYPE
7834 && TREE_CODE (op0) == BIT_AND_EXPR
7835 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7836 {
7837 tree and_expr = op0;
7838 tree and0 = TREE_OPERAND (and_expr, 0);
7839 tree and1 = TREE_OPERAND (and_expr, 1);
7840 int change = 0;
7841
7842 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7843 || (TYPE_PRECISION (type)
7844 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7845 change = 1;
7846 else if (TYPE_PRECISION (TREE_TYPE (and1))
7847 <= HOST_BITS_PER_WIDE_INT
7848 && tree_fits_uhwi_p (and1))
7849 {
7850 unsigned HOST_WIDE_INT cst;
7851
7852 cst = tree_to_uhwi (and1);
7853 cst &= HOST_WIDE_INT_M1U
7854 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7855 change = (cst == 0);
7856 if (change
7857 && !flag_syntax_only
7858 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7859 == ZERO_EXTEND))
7860 {
7861 tree uns = unsigned_type_for (TREE_TYPE (and0));
7862 and0 = fold_convert_loc (loc, uns, and0);
7863 and1 = fold_convert_loc (loc, uns, and1);
7864 }
7865 }
7866 if (change)
7867 {
7868 tem = force_fit_type (type, wi::to_widest (and1), 0,
7869 TREE_OVERFLOW (and1));
7870 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7871 fold_convert_loc (loc, type, and0), tem);
7872 }
7873 }
7874
7875 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
7876 cast (T1)X will fold away. We assume that this happens when X itself
7877 is a cast. */
7878 if (POINTER_TYPE_P (type)
7879 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7880 && CONVERT_EXPR_P (TREE_OPERAND (arg0, 0)))
7881 {
7882 tree arg00 = TREE_OPERAND (arg0, 0);
7883 tree arg01 = TREE_OPERAND (arg0, 1);
7884
7885 return fold_build_pointer_plus_loc
7886 (loc, fold_convert_loc (loc, type, arg00), arg01);
7887 }
7888
7889 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7890 of the same precision, and X is an integer type not narrower than
7891 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7892 if (INTEGRAL_TYPE_P (type)
7893 && TREE_CODE (op0) == BIT_NOT_EXPR
7894 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7895 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7896 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7897 {
7898 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7899 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7900 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7901 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7902 fold_convert_loc (loc, type, tem));
7903 }
7904
7905 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7906 type of X and Y (integer types only). */
7907 if (INTEGRAL_TYPE_P (type)
7908 && TREE_CODE (op0) == MULT_EXPR
7909 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7910 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7911 {
7912 /* Be careful not to introduce new overflows. */
7913 tree mult_type;
7914 if (TYPE_OVERFLOW_WRAPS (type))
7915 mult_type = type;
7916 else
7917 mult_type = unsigned_type_for (type);
7918
7919 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7920 {
7921 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7922 fold_convert_loc (loc, mult_type,
7923 TREE_OPERAND (op0, 0)),
7924 fold_convert_loc (loc, mult_type,
7925 TREE_OPERAND (op0, 1)));
7926 return fold_convert_loc (loc, type, tem);
7927 }
7928 }
7929
7930 return NULL_TREE;
7931
7932 case VIEW_CONVERT_EXPR:
7933 if (TREE_CODE (op0) == MEM_REF)
7934 {
7935 tem = fold_build2_loc (loc, MEM_REF, type,
7936 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
7937 REF_REVERSE_STORAGE_ORDER (tem) = REF_REVERSE_STORAGE_ORDER (op0);
7938 return tem;
7939 }
7940
7941 return NULL_TREE;
7942
7943 case NEGATE_EXPR:
7944 tem = fold_negate_expr (loc, arg0);
7945 if (tem)
7946 return fold_convert_loc (loc, type, tem);
7947 return NULL_TREE;
7948
7949 case ABS_EXPR:
7950 /* Convert fabs((double)float) into (double)fabsf(float). */
7951 if (TREE_CODE (arg0) == NOP_EXPR
7952 && TREE_CODE (type) == REAL_TYPE)
7953 {
7954 tree targ0 = strip_float_extensions (arg0);
7955 if (targ0 != arg0)
7956 return fold_convert_loc (loc, type,
7957 fold_build1_loc (loc, ABS_EXPR,
7958 TREE_TYPE (targ0),
7959 targ0));
7960 }
7961 return NULL_TREE;
7962
7963 case BIT_NOT_EXPR:
7964 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7965 if (TREE_CODE (arg0) == BIT_XOR_EXPR
7966 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7967 fold_convert_loc (loc, type,
7968 TREE_OPERAND (arg0, 0)))))
7969 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
7970 fold_convert_loc (loc, type,
7971 TREE_OPERAND (arg0, 1)));
7972 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7973 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7974 fold_convert_loc (loc, type,
7975 TREE_OPERAND (arg0, 1)))))
7976 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
7977 fold_convert_loc (loc, type,
7978 TREE_OPERAND (arg0, 0)), tem);
7979
7980 return NULL_TREE;
7981
7982 case TRUTH_NOT_EXPR:
7983 /* Note that the operand of this must be an int
7984 and its values must be 0 or 1.
7985 ("true" is a fixed value perhaps depending on the language,
7986 but we don't handle values other than 1 correctly yet.) */
7987 tem = fold_truth_not_expr (loc, arg0);
7988 if (!tem)
7989 return NULL_TREE;
7990 return fold_convert_loc (loc, type, tem);
7991
7992 case INDIRECT_REF:
7993 /* Fold *&X to X if X is an lvalue. */
7994 if (TREE_CODE (op0) == ADDR_EXPR)
7995 {
7996 tree op00 = TREE_OPERAND (op0, 0);
7997 if ((TREE_CODE (op00) == VAR_DECL
7998 || TREE_CODE (op00) == PARM_DECL
7999 || TREE_CODE (op00) == RESULT_DECL)
8000 && !TREE_READONLY (op00))
8001 return op00;
8002 }
8003 return NULL_TREE;
8004
8005 default:
8006 return NULL_TREE;
8007 } /* switch (code) */
8008 }
8009
8010
8011 /* If the operation was a conversion do _not_ mark a resulting constant
8012 with TREE_OVERFLOW if the original constant was not. These conversions
8013 have implementation defined behavior and retaining the TREE_OVERFLOW
8014 flag here would confuse later passes such as VRP. */
8015 tree
8016 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8017 tree type, tree op0)
8018 {
8019 tree res = fold_unary_loc (loc, code, type, op0);
8020 if (res
8021 && TREE_CODE (res) == INTEGER_CST
8022 && TREE_CODE (op0) == INTEGER_CST
8023 && CONVERT_EXPR_CODE_P (code))
8024 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8025
8026 return res;
8027 }
8028
8029 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8030 operands OP0 and OP1. LOC is the location of the resulting expression.
8031 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8032 Return the folded expression if folding is successful. Otherwise,
8033 return NULL_TREE. */
8034 static tree
8035 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8036 tree arg0, tree arg1, tree op0, tree op1)
8037 {
8038 tree tem;
8039
8040 /* We only do these simplifications if we are optimizing. */
8041 if (!optimize)
8042 return NULL_TREE;
8043
8044 /* Check for things like (A || B) && (A || C). We can convert this
8045 to A || (B && C). Note that either operator can be any of the four
8046 truth and/or operations and the transformation will still be
8047 valid. Also note that we only care about order for the
8048 ANDIF and ORIF operators. If B contains side effects, this
8049 might change the truth-value of A. */
8050 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8051 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8052 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8053 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8054 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8055 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8056 {
8057 tree a00 = TREE_OPERAND (arg0, 0);
8058 tree a01 = TREE_OPERAND (arg0, 1);
8059 tree a10 = TREE_OPERAND (arg1, 0);
8060 tree a11 = TREE_OPERAND (arg1, 1);
8061 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8062 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8063 && (code == TRUTH_AND_EXPR
8064 || code == TRUTH_OR_EXPR));
8065
8066 if (operand_equal_p (a00, a10, 0))
8067 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8068 fold_build2_loc (loc, code, type, a01, a11));
8069 else if (commutative && operand_equal_p (a00, a11, 0))
8070 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8071 fold_build2_loc (loc, code, type, a01, a10));
8072 else if (commutative && operand_equal_p (a01, a10, 0))
8073 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8074 fold_build2_loc (loc, code, type, a00, a11));
8075
8076 /* This case if tricky because we must either have commutative
8077 operators or else A10 must not have side-effects. */
8078
8079 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8080 && operand_equal_p (a01, a11, 0))
8081 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8082 fold_build2_loc (loc, code, type, a00, a10),
8083 a01);
8084 }
8085
8086 /* See if we can build a range comparison. */
8087 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8088 return tem;
8089
8090 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8091 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8092 {
8093 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8094 if (tem)
8095 return fold_build2_loc (loc, code, type, tem, arg1);
8096 }
8097
8098 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8099 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8100 {
8101 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8102 if (tem)
8103 return fold_build2_loc (loc, code, type, arg0, tem);
8104 }
8105
8106 /* Check for the possibility of merging component references. If our
8107 lhs is another similar operation, try to merge its rhs with our
8108 rhs. Then try to merge our lhs and rhs. */
8109 if (TREE_CODE (arg0) == code
8110 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8111 TREE_OPERAND (arg0, 1), arg1)))
8112 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8113
8114 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8115 return tem;
8116
8117 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8118 && (code == TRUTH_AND_EXPR
8119 || code == TRUTH_ANDIF_EXPR
8120 || code == TRUTH_OR_EXPR
8121 || code == TRUTH_ORIF_EXPR))
8122 {
8123 enum tree_code ncode, icode;
8124
8125 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8126 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8127 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8128
8129 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8130 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8131 We don't want to pack more than two leafs to a non-IF AND/OR
8132 expression.
8133 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8134 equal to IF-CODE, then we don't want to add right-hand operand.
8135 If the inner right-hand side of left-hand operand has
8136 side-effects, or isn't simple, then we can't add to it,
8137 as otherwise we might destroy if-sequence. */
8138 if (TREE_CODE (arg0) == icode
8139 && simple_operand_p_2 (arg1)
8140 /* Needed for sequence points to handle trappings, and
8141 side-effects. */
8142 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8143 {
8144 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8145 arg1);
8146 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8147 tem);
8148 }
8149 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8150 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8151 else if (TREE_CODE (arg1) == icode
8152 && simple_operand_p_2 (arg0)
8153 /* Needed for sequence points to handle trappings, and
8154 side-effects. */
8155 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8156 {
8157 tem = fold_build2_loc (loc, ncode, type,
8158 arg0, TREE_OPERAND (arg1, 0));
8159 return fold_build2_loc (loc, icode, type, tem,
8160 TREE_OPERAND (arg1, 1));
8161 }
8162 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8163 into (A OR B).
8164 For sequence point consistancy, we need to check for trapping,
8165 and side-effects. */
8166 else if (code == icode && simple_operand_p_2 (arg0)
8167 && simple_operand_p_2 (arg1))
8168 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8169 }
8170
8171 return NULL_TREE;
8172 }
8173
8174 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8175 by changing CODE to reduce the magnitude of constants involved in
8176 ARG0 of the comparison.
8177 Returns a canonicalized comparison tree if a simplification was
8178 possible, otherwise returns NULL_TREE.
8179 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8180 valid if signed overflow is undefined. */
8181
8182 static tree
8183 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8184 tree arg0, tree arg1,
8185 bool *strict_overflow_p)
8186 {
8187 enum tree_code code0 = TREE_CODE (arg0);
8188 tree t, cst0 = NULL_TREE;
8189 int sgn0;
8190
8191 /* Match A +- CST code arg1. We can change this only if overflow
8192 is undefined. */
8193 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8194 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8195 /* In principle pointers also have undefined overflow behavior,
8196 but that causes problems elsewhere. */
8197 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8198 && (code0 == MINUS_EXPR
8199 || code0 == PLUS_EXPR)
8200 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
8201 return NULL_TREE;
8202
8203 /* Identify the constant in arg0 and its sign. */
8204 cst0 = TREE_OPERAND (arg0, 1);
8205 sgn0 = tree_int_cst_sgn (cst0);
8206
8207 /* Overflowed constants and zero will cause problems. */
8208 if (integer_zerop (cst0)
8209 || TREE_OVERFLOW (cst0))
8210 return NULL_TREE;
8211
8212 /* See if we can reduce the magnitude of the constant in
8213 arg0 by changing the comparison code. */
8214 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8215 if (code == LT_EXPR
8216 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8217 code = LE_EXPR;
8218 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8219 else if (code == GT_EXPR
8220 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8221 code = GE_EXPR;
8222 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8223 else if (code == LE_EXPR
8224 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8225 code = LT_EXPR;
8226 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8227 else if (code == GE_EXPR
8228 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8229 code = GT_EXPR;
8230 else
8231 return NULL_TREE;
8232 *strict_overflow_p = true;
8233
8234 /* Now build the constant reduced in magnitude. But not if that
8235 would produce one outside of its types range. */
8236 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8237 && ((sgn0 == 1
8238 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8239 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8240 || (sgn0 == -1
8241 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8242 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8243 return NULL_TREE;
8244
8245 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8246 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8247 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8248 t = fold_convert (TREE_TYPE (arg1), t);
8249
8250 return fold_build2_loc (loc, code, type, t, arg1);
8251 }
8252
8253 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8254 overflow further. Try to decrease the magnitude of constants involved
8255 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8256 and put sole constants at the second argument position.
8257 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8258
8259 static tree
8260 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8261 tree arg0, tree arg1)
8262 {
8263 tree t;
8264 bool strict_overflow_p;
8265 const char * const warnmsg = G_("assuming signed overflow does not occur "
8266 "when reducing constant in comparison");
8267
8268 /* Try canonicalization by simplifying arg0. */
8269 strict_overflow_p = false;
8270 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8271 &strict_overflow_p);
8272 if (t)
8273 {
8274 if (strict_overflow_p)
8275 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8276 return t;
8277 }
8278
8279 /* Try canonicalization by simplifying arg1 using the swapped
8280 comparison. */
8281 code = swap_tree_comparison (code);
8282 strict_overflow_p = false;
8283 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8284 &strict_overflow_p);
8285 if (t && strict_overflow_p)
8286 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8287 return t;
8288 }
8289
8290 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8291 space. This is used to avoid issuing overflow warnings for
8292 expressions like &p->x which can not wrap. */
8293
8294 static bool
8295 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8296 {
8297 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8298 return true;
8299
8300 if (bitpos < 0)
8301 return true;
8302
8303 wide_int wi_offset;
8304 int precision = TYPE_PRECISION (TREE_TYPE (base));
8305 if (offset == NULL_TREE)
8306 wi_offset = wi::zero (precision);
8307 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8308 return true;
8309 else
8310 wi_offset = offset;
8311
8312 bool overflow;
8313 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8314 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8315 if (overflow)
8316 return true;
8317
8318 if (!wi::fits_uhwi_p (total))
8319 return true;
8320
8321 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8322 if (size <= 0)
8323 return true;
8324
8325 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8326 array. */
8327 if (TREE_CODE (base) == ADDR_EXPR)
8328 {
8329 HOST_WIDE_INT base_size;
8330
8331 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8332 if (base_size > 0 && size < base_size)
8333 size = base_size;
8334 }
8335
8336 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8337 }
8338
8339 /* Subroutine of fold_binary. This routine performs all of the
8340 transformations that are common to the equality/inequality
8341 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8342 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8343 fold_binary should call fold_binary. Fold a comparison with
8344 tree code CODE and type TYPE with operands OP0 and OP1. Return
8345 the folded comparison or NULL_TREE. */
8346
8347 static tree
8348 fold_comparison (location_t loc, enum tree_code code, tree type,
8349 tree op0, tree op1)
8350 {
8351 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8352 tree arg0, arg1, tem;
8353
8354 arg0 = op0;
8355 arg1 = op1;
8356
8357 STRIP_SIGN_NOPS (arg0);
8358 STRIP_SIGN_NOPS (arg1);
8359
8360 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
8361 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8362 && (equality_code
8363 || (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8364 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
8365 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8366 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8367 && TREE_CODE (arg1) == INTEGER_CST
8368 && !TREE_OVERFLOW (arg1))
8369 {
8370 const enum tree_code
8371 reverse_op = TREE_CODE (arg0) == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR;
8372 tree const1 = TREE_OPERAND (arg0, 1);
8373 tree const2 = fold_convert_loc (loc, TREE_TYPE (const1), arg1);
8374 tree variable = TREE_OPERAND (arg0, 0);
8375 tree new_const = int_const_binop (reverse_op, const2, const1);
8376
8377 /* If the constant operation overflowed this can be
8378 simplified as a comparison against INT_MAX/INT_MIN. */
8379 if (TREE_OVERFLOW (new_const)
8380 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
8381 {
8382 int const1_sgn = tree_int_cst_sgn (const1);
8383 enum tree_code code2 = code;
8384
8385 /* Get the sign of the constant on the lhs if the
8386 operation were VARIABLE + CONST1. */
8387 if (TREE_CODE (arg0) == MINUS_EXPR)
8388 const1_sgn = -const1_sgn;
8389
8390 /* The sign of the constant determines if we overflowed
8391 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8392 Canonicalize to the INT_MIN overflow by swapping the comparison
8393 if necessary. */
8394 if (const1_sgn == -1)
8395 code2 = swap_tree_comparison (code);
8396
8397 /* We now can look at the canonicalized case
8398 VARIABLE + 1 CODE2 INT_MIN
8399 and decide on the result. */
8400 switch (code2)
8401 {
8402 case EQ_EXPR:
8403 case LT_EXPR:
8404 case LE_EXPR:
8405 return
8406 omit_one_operand_loc (loc, type, boolean_false_node, variable);
8407
8408 case NE_EXPR:
8409 case GE_EXPR:
8410 case GT_EXPR:
8411 return
8412 omit_one_operand_loc (loc, type, boolean_true_node, variable);
8413
8414 default:
8415 gcc_unreachable ();
8416 }
8417 }
8418 else
8419 {
8420 if (!equality_code)
8421 fold_overflow_warning ("assuming signed overflow does not occur "
8422 "when changing X +- C1 cmp C2 to "
8423 "X cmp C2 -+ C1",
8424 WARN_STRICT_OVERFLOW_COMPARISON);
8425 return fold_build2_loc (loc, code, type, variable, new_const);
8426 }
8427 }
8428
8429 /* For comparisons of pointers we can decompose it to a compile time
8430 comparison of the base objects and the offsets into the object.
8431 This requires at least one operand being an ADDR_EXPR or a
8432 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8433 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8434 && (TREE_CODE (arg0) == ADDR_EXPR
8435 || TREE_CODE (arg1) == ADDR_EXPR
8436 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8437 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8438 {
8439 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8440 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8441 machine_mode mode;
8442 int volatilep, reversep, unsignedp;
8443 bool indirect_base0 = false, indirect_base1 = false;
8444
8445 /* Get base and offset for the access. Strip ADDR_EXPR for
8446 get_inner_reference, but put it back by stripping INDIRECT_REF
8447 off the base object if possible. indirect_baseN will be true
8448 if baseN is not an address but refers to the object itself. */
8449 base0 = arg0;
8450 if (TREE_CODE (arg0) == ADDR_EXPR)
8451 {
8452 base0
8453 = get_inner_reference (TREE_OPERAND (arg0, 0),
8454 &bitsize, &bitpos0, &offset0, &mode,
8455 &unsignedp, &reversep, &volatilep, false);
8456 if (TREE_CODE (base0) == INDIRECT_REF)
8457 base0 = TREE_OPERAND (base0, 0);
8458 else
8459 indirect_base0 = true;
8460 }
8461 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8462 {
8463 base0 = TREE_OPERAND (arg0, 0);
8464 STRIP_SIGN_NOPS (base0);
8465 if (TREE_CODE (base0) == ADDR_EXPR)
8466 {
8467 base0
8468 = get_inner_reference (TREE_OPERAND (base0, 0),
8469 &bitsize, &bitpos0, &offset0, &mode,
8470 &unsignedp, &reversep, &volatilep,
8471 false);
8472 if (TREE_CODE (base0) == INDIRECT_REF)
8473 base0 = TREE_OPERAND (base0, 0);
8474 else
8475 indirect_base0 = true;
8476 }
8477 if (offset0 == NULL_TREE || integer_zerop (offset0))
8478 offset0 = TREE_OPERAND (arg0, 1);
8479 else
8480 offset0 = size_binop (PLUS_EXPR, offset0,
8481 TREE_OPERAND (arg0, 1));
8482 if (TREE_CODE (offset0) == INTEGER_CST)
8483 {
8484 offset_int tem = wi::sext (wi::to_offset (offset0),
8485 TYPE_PRECISION (sizetype));
8486 tem = wi::lshift (tem, LOG2_BITS_PER_UNIT);
8487 tem += bitpos0;
8488 if (wi::fits_shwi_p (tem))
8489 {
8490 bitpos0 = tem.to_shwi ();
8491 offset0 = NULL_TREE;
8492 }
8493 }
8494 }
8495
8496 base1 = arg1;
8497 if (TREE_CODE (arg1) == ADDR_EXPR)
8498 {
8499 base1
8500 = get_inner_reference (TREE_OPERAND (arg1, 0),
8501 &bitsize, &bitpos1, &offset1, &mode,
8502 &unsignedp, &reversep, &volatilep, false);
8503 if (TREE_CODE (base1) == INDIRECT_REF)
8504 base1 = TREE_OPERAND (base1, 0);
8505 else
8506 indirect_base1 = true;
8507 }
8508 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8509 {
8510 base1 = TREE_OPERAND (arg1, 0);
8511 STRIP_SIGN_NOPS (base1);
8512 if (TREE_CODE (base1) == ADDR_EXPR)
8513 {
8514 base1
8515 = get_inner_reference (TREE_OPERAND (base1, 0),
8516 &bitsize, &bitpos1, &offset1, &mode,
8517 &unsignedp, &reversep, &volatilep,
8518 false);
8519 if (TREE_CODE (base1) == INDIRECT_REF)
8520 base1 = TREE_OPERAND (base1, 0);
8521 else
8522 indirect_base1 = true;
8523 }
8524 if (offset1 == NULL_TREE || integer_zerop (offset1))
8525 offset1 = TREE_OPERAND (arg1, 1);
8526 else
8527 offset1 = size_binop (PLUS_EXPR, offset1,
8528 TREE_OPERAND (arg1, 1));
8529 if (TREE_CODE (offset1) == INTEGER_CST)
8530 {
8531 offset_int tem = wi::sext (wi::to_offset (offset1),
8532 TYPE_PRECISION (sizetype));
8533 tem = wi::lshift (tem, LOG2_BITS_PER_UNIT);
8534 tem += bitpos1;
8535 if (wi::fits_shwi_p (tem))
8536 {
8537 bitpos1 = tem.to_shwi ();
8538 offset1 = NULL_TREE;
8539 }
8540 }
8541 }
8542
8543 /* If we have equivalent bases we might be able to simplify. */
8544 if (indirect_base0 == indirect_base1
8545 && operand_equal_p (base0, base1,
8546 indirect_base0 ? OEP_ADDRESS_OF : 0))
8547 {
8548 /* We can fold this expression to a constant if the non-constant
8549 offset parts are equal. */
8550 if ((offset0 == offset1
8551 || (offset0 && offset1
8552 && operand_equal_p (offset0, offset1, 0)))
8553 && (code == EQ_EXPR
8554 || code == NE_EXPR
8555 || (indirect_base0 && DECL_P (base0))
8556 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8557
8558 {
8559 if (!equality_code
8560 && bitpos0 != bitpos1
8561 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8562 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8563 fold_overflow_warning (("assuming pointer wraparound does not "
8564 "occur when comparing P +- C1 with "
8565 "P +- C2"),
8566 WARN_STRICT_OVERFLOW_CONDITIONAL);
8567
8568 switch (code)
8569 {
8570 case EQ_EXPR:
8571 return constant_boolean_node (bitpos0 == bitpos1, type);
8572 case NE_EXPR:
8573 return constant_boolean_node (bitpos0 != bitpos1, type);
8574 case LT_EXPR:
8575 return constant_boolean_node (bitpos0 < bitpos1, type);
8576 case LE_EXPR:
8577 return constant_boolean_node (bitpos0 <= bitpos1, type);
8578 case GE_EXPR:
8579 return constant_boolean_node (bitpos0 >= bitpos1, type);
8580 case GT_EXPR:
8581 return constant_boolean_node (bitpos0 > bitpos1, type);
8582 default:;
8583 }
8584 }
8585 /* We can simplify the comparison to a comparison of the variable
8586 offset parts if the constant offset parts are equal.
8587 Be careful to use signed sizetype here because otherwise we
8588 mess with array offsets in the wrong way. This is possible
8589 because pointer arithmetic is restricted to retain within an
8590 object and overflow on pointer differences is undefined as of
8591 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8592 else if (bitpos0 == bitpos1
8593 && (equality_code
8594 || (indirect_base0 && DECL_P (base0))
8595 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8596 {
8597 /* By converting to signed sizetype we cover middle-end pointer
8598 arithmetic which operates on unsigned pointer types of size
8599 type size and ARRAY_REF offsets which are properly sign or
8600 zero extended from their type in case it is narrower than
8601 sizetype. */
8602 if (offset0 == NULL_TREE)
8603 offset0 = build_int_cst (ssizetype, 0);
8604 else
8605 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8606 if (offset1 == NULL_TREE)
8607 offset1 = build_int_cst (ssizetype, 0);
8608 else
8609 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8610
8611 if (!equality_code
8612 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8613 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8614 fold_overflow_warning (("assuming pointer wraparound does not "
8615 "occur when comparing P +- C1 with "
8616 "P +- C2"),
8617 WARN_STRICT_OVERFLOW_COMPARISON);
8618
8619 return fold_build2_loc (loc, code, type, offset0, offset1);
8620 }
8621 }
8622 /* For equal offsets we can simplify to a comparison of the
8623 base addresses. */
8624 else if (bitpos0 == bitpos1
8625 && (indirect_base0
8626 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8627 && (indirect_base1
8628 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8629 && ((offset0 == offset1)
8630 || (offset0 && offset1
8631 && operand_equal_p (offset0, offset1, 0))))
8632 {
8633 if (indirect_base0)
8634 base0 = build_fold_addr_expr_loc (loc, base0);
8635 if (indirect_base1)
8636 base1 = build_fold_addr_expr_loc (loc, base1);
8637 return fold_build2_loc (loc, code, type, base0, base1);
8638 }
8639 }
8640
8641 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8642 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8643 the resulting offset is smaller in absolute value than the
8644 original one and has the same sign. */
8645 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8646 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8647 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8648 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8649 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8650 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8651 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8652 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8653 {
8654 tree const1 = TREE_OPERAND (arg0, 1);
8655 tree const2 = TREE_OPERAND (arg1, 1);
8656 tree variable1 = TREE_OPERAND (arg0, 0);
8657 tree variable2 = TREE_OPERAND (arg1, 0);
8658 tree cst;
8659 const char * const warnmsg = G_("assuming signed overflow does not "
8660 "occur when combining constants around "
8661 "a comparison");
8662
8663 /* Put the constant on the side where it doesn't overflow and is
8664 of lower absolute value and of same sign than before. */
8665 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8666 ? MINUS_EXPR : PLUS_EXPR,
8667 const2, const1);
8668 if (!TREE_OVERFLOW (cst)
8669 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
8670 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
8671 {
8672 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8673 return fold_build2_loc (loc, code, type,
8674 variable1,
8675 fold_build2_loc (loc, TREE_CODE (arg1),
8676 TREE_TYPE (arg1),
8677 variable2, cst));
8678 }
8679
8680 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8681 ? MINUS_EXPR : PLUS_EXPR,
8682 const1, const2);
8683 if (!TREE_OVERFLOW (cst)
8684 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
8685 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
8686 {
8687 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8688 return fold_build2_loc (loc, code, type,
8689 fold_build2_loc (loc, TREE_CODE (arg0),
8690 TREE_TYPE (arg0),
8691 variable1, cst),
8692 variable2);
8693 }
8694 }
8695
8696 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
8697 if (tem)
8698 return tem;
8699
8700 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8701 constant, we can simplify it. */
8702 if (TREE_CODE (arg1) == INTEGER_CST
8703 && (TREE_CODE (arg0) == MIN_EXPR
8704 || TREE_CODE (arg0) == MAX_EXPR)
8705 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8706 {
8707 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
8708 if (tem)
8709 return tem;
8710 }
8711
8712 /* If we are comparing an expression that just has comparisons
8713 of two integer values, arithmetic expressions of those comparisons,
8714 and constants, we can simplify it. There are only three cases
8715 to check: the two values can either be equal, the first can be
8716 greater, or the second can be greater. Fold the expression for
8717 those three values. Since each value must be 0 or 1, we have
8718 eight possibilities, each of which corresponds to the constant 0
8719 or 1 or one of the six possible comparisons.
8720
8721 This handles common cases like (a > b) == 0 but also handles
8722 expressions like ((x > y) - (y > x)) > 0, which supposedly
8723 occur in macroized code. */
8724
8725 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8726 {
8727 tree cval1 = 0, cval2 = 0;
8728 int save_p = 0;
8729
8730 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8731 /* Don't handle degenerate cases here; they should already
8732 have been handled anyway. */
8733 && cval1 != 0 && cval2 != 0
8734 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8735 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8736 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8737 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8738 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8739 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8740 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8741 {
8742 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8743 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8744
8745 /* We can't just pass T to eval_subst in case cval1 or cval2
8746 was the same as ARG1. */
8747
8748 tree high_result
8749 = fold_build2_loc (loc, code, type,
8750 eval_subst (loc, arg0, cval1, maxval,
8751 cval2, minval),
8752 arg1);
8753 tree equal_result
8754 = fold_build2_loc (loc, code, type,
8755 eval_subst (loc, arg0, cval1, maxval,
8756 cval2, maxval),
8757 arg1);
8758 tree low_result
8759 = fold_build2_loc (loc, code, type,
8760 eval_subst (loc, arg0, cval1, minval,
8761 cval2, maxval),
8762 arg1);
8763
8764 /* All three of these results should be 0 or 1. Confirm they are.
8765 Then use those values to select the proper code to use. */
8766
8767 if (TREE_CODE (high_result) == INTEGER_CST
8768 && TREE_CODE (equal_result) == INTEGER_CST
8769 && TREE_CODE (low_result) == INTEGER_CST)
8770 {
8771 /* Make a 3-bit mask with the high-order bit being the
8772 value for `>', the next for '=', and the low for '<'. */
8773 switch ((integer_onep (high_result) * 4)
8774 + (integer_onep (equal_result) * 2)
8775 + integer_onep (low_result))
8776 {
8777 case 0:
8778 /* Always false. */
8779 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
8780 case 1:
8781 code = LT_EXPR;
8782 break;
8783 case 2:
8784 code = EQ_EXPR;
8785 break;
8786 case 3:
8787 code = LE_EXPR;
8788 break;
8789 case 4:
8790 code = GT_EXPR;
8791 break;
8792 case 5:
8793 code = NE_EXPR;
8794 break;
8795 case 6:
8796 code = GE_EXPR;
8797 break;
8798 case 7:
8799 /* Always true. */
8800 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
8801 }
8802
8803 if (save_p)
8804 {
8805 tem = save_expr (build2 (code, type, cval1, cval2));
8806 SET_EXPR_LOCATION (tem, loc);
8807 return tem;
8808 }
8809 return fold_build2_loc (loc, code, type, cval1, cval2);
8810 }
8811 }
8812 }
8813
8814 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8815 into a single range test. */
8816 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
8817 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
8818 && TREE_CODE (arg1) == INTEGER_CST
8819 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8820 && !integer_zerop (TREE_OPERAND (arg0, 1))
8821 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8822 && !TREE_OVERFLOW (arg1))
8823 {
8824 tem = fold_div_compare (loc, code, type, arg0, arg1);
8825 if (tem != NULL_TREE)
8826 return tem;
8827 }
8828
8829 return NULL_TREE;
8830 }
8831
8832
8833 /* Subroutine of fold_binary. Optimize complex multiplications of the
8834 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8835 argument EXPR represents the expression "z" of type TYPE. */
8836
8837 static tree
8838 fold_mult_zconjz (location_t loc, tree type, tree expr)
8839 {
8840 tree itype = TREE_TYPE (type);
8841 tree rpart, ipart, tem;
8842
8843 if (TREE_CODE (expr) == COMPLEX_EXPR)
8844 {
8845 rpart = TREE_OPERAND (expr, 0);
8846 ipart = TREE_OPERAND (expr, 1);
8847 }
8848 else if (TREE_CODE (expr) == COMPLEX_CST)
8849 {
8850 rpart = TREE_REALPART (expr);
8851 ipart = TREE_IMAGPART (expr);
8852 }
8853 else
8854 {
8855 expr = save_expr (expr);
8856 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
8857 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
8858 }
8859
8860 rpart = save_expr (rpart);
8861 ipart = save_expr (ipart);
8862 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
8863 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
8864 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
8865 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
8866 build_zero_cst (itype));
8867 }
8868
8869
8870 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
8871 CONSTRUCTOR ARG into array ELTS and return true if successful. */
8872
8873 static bool
8874 vec_cst_ctor_to_array (tree arg, tree *elts)
8875 {
8876 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
8877
8878 if (TREE_CODE (arg) == VECTOR_CST)
8879 {
8880 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
8881 elts[i] = VECTOR_CST_ELT (arg, i);
8882 }
8883 else if (TREE_CODE (arg) == CONSTRUCTOR)
8884 {
8885 constructor_elt *elt;
8886
8887 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
8888 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
8889 return false;
8890 else
8891 elts[i] = elt->value;
8892 }
8893 else
8894 return false;
8895 for (; i < nelts; i++)
8896 elts[i]
8897 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
8898 return true;
8899 }
8900
8901 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
8902 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
8903 NULL_TREE otherwise. */
8904
8905 static tree
8906 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
8907 {
8908 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8909 tree *elts;
8910 bool need_ctor = false;
8911
8912 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
8913 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
8914 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
8915 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
8916 return NULL_TREE;
8917
8918 elts = XALLOCAVEC (tree, nelts * 3);
8919 if (!vec_cst_ctor_to_array (arg0, elts)
8920 || !vec_cst_ctor_to_array (arg1, elts + nelts))
8921 return NULL_TREE;
8922
8923 for (i = 0; i < nelts; i++)
8924 {
8925 if (!CONSTANT_CLASS_P (elts[sel[i]]))
8926 need_ctor = true;
8927 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
8928 }
8929
8930 if (need_ctor)
8931 {
8932 vec<constructor_elt, va_gc> *v;
8933 vec_alloc (v, nelts);
8934 for (i = 0; i < nelts; i++)
8935 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
8936 return build_constructor (type, v);
8937 }
8938 else
8939 return build_vector (type, &elts[2 * nelts]);
8940 }
8941
8942 /* Try to fold a pointer difference of type TYPE two address expressions of
8943 array references AREF0 and AREF1 using location LOC. Return a
8944 simplified expression for the difference or NULL_TREE. */
8945
8946 static tree
8947 fold_addr_of_array_ref_difference (location_t loc, tree type,
8948 tree aref0, tree aref1)
8949 {
8950 tree base0 = TREE_OPERAND (aref0, 0);
8951 tree base1 = TREE_OPERAND (aref1, 0);
8952 tree base_offset = build_int_cst (type, 0);
8953
8954 /* If the bases are array references as well, recurse. If the bases
8955 are pointer indirections compute the difference of the pointers.
8956 If the bases are equal, we are set. */
8957 if ((TREE_CODE (base0) == ARRAY_REF
8958 && TREE_CODE (base1) == ARRAY_REF
8959 && (base_offset
8960 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
8961 || (INDIRECT_REF_P (base0)
8962 && INDIRECT_REF_P (base1)
8963 && (base_offset
8964 = fold_binary_loc (loc, MINUS_EXPR, type,
8965 fold_convert (type, TREE_OPERAND (base0, 0)),
8966 fold_convert (type,
8967 TREE_OPERAND (base1, 0)))))
8968 || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
8969 {
8970 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
8971 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
8972 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
8973 tree diff = build2 (MINUS_EXPR, type, op0, op1);
8974 return fold_build2_loc (loc, PLUS_EXPR, type,
8975 base_offset,
8976 fold_build2_loc (loc, MULT_EXPR, type,
8977 diff, esz));
8978 }
8979 return NULL_TREE;
8980 }
8981
8982 /* If the real or vector real constant CST of type TYPE has an exact
8983 inverse, return it, else return NULL. */
8984
8985 tree
8986 exact_inverse (tree type, tree cst)
8987 {
8988 REAL_VALUE_TYPE r;
8989 tree unit_type, *elts;
8990 machine_mode mode;
8991 unsigned vec_nelts, i;
8992
8993 switch (TREE_CODE (cst))
8994 {
8995 case REAL_CST:
8996 r = TREE_REAL_CST (cst);
8997
8998 if (exact_real_inverse (TYPE_MODE (type), &r))
8999 return build_real (type, r);
9000
9001 return NULL_TREE;
9002
9003 case VECTOR_CST:
9004 vec_nelts = VECTOR_CST_NELTS (cst);
9005 elts = XALLOCAVEC (tree, vec_nelts);
9006 unit_type = TREE_TYPE (type);
9007 mode = TYPE_MODE (unit_type);
9008
9009 for (i = 0; i < vec_nelts; i++)
9010 {
9011 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9012 if (!exact_real_inverse (mode, &r))
9013 return NULL_TREE;
9014 elts[i] = build_real (unit_type, r);
9015 }
9016
9017 return build_vector (type, elts);
9018
9019 default:
9020 return NULL_TREE;
9021 }
9022 }
9023
9024 /* Mask out the tz least significant bits of X of type TYPE where
9025 tz is the number of trailing zeroes in Y. */
9026 static wide_int
9027 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
9028 {
9029 int tz = wi::ctz (y);
9030 if (tz > 0)
9031 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
9032 return x;
9033 }
9034
9035 /* Return true when T is an address and is known to be nonzero.
9036 For floating point we further ensure that T is not denormal.
9037 Similar logic is present in nonzero_address in rtlanal.h.
9038
9039 If the return value is based on the assumption that signed overflow
9040 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9041 change *STRICT_OVERFLOW_P. */
9042
9043 static bool
9044 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
9045 {
9046 tree type = TREE_TYPE (t);
9047 enum tree_code code;
9048
9049 /* Doing something useful for floating point would need more work. */
9050 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9051 return false;
9052
9053 code = TREE_CODE (t);
9054 switch (TREE_CODE_CLASS (code))
9055 {
9056 case tcc_unary:
9057 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9058 strict_overflow_p);
9059 case tcc_binary:
9060 case tcc_comparison:
9061 return tree_binary_nonzero_warnv_p (code, type,
9062 TREE_OPERAND (t, 0),
9063 TREE_OPERAND (t, 1),
9064 strict_overflow_p);
9065 case tcc_constant:
9066 case tcc_declaration:
9067 case tcc_reference:
9068 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9069
9070 default:
9071 break;
9072 }
9073
9074 switch (code)
9075 {
9076 case TRUTH_NOT_EXPR:
9077 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9078 strict_overflow_p);
9079
9080 case TRUTH_AND_EXPR:
9081 case TRUTH_OR_EXPR:
9082 case TRUTH_XOR_EXPR:
9083 return tree_binary_nonzero_warnv_p (code, type,
9084 TREE_OPERAND (t, 0),
9085 TREE_OPERAND (t, 1),
9086 strict_overflow_p);
9087
9088 case COND_EXPR:
9089 case CONSTRUCTOR:
9090 case OBJ_TYPE_REF:
9091 case ASSERT_EXPR:
9092 case ADDR_EXPR:
9093 case WITH_SIZE_EXPR:
9094 case SSA_NAME:
9095 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9096
9097 case COMPOUND_EXPR:
9098 case MODIFY_EXPR:
9099 case BIND_EXPR:
9100 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
9101 strict_overflow_p);
9102
9103 case SAVE_EXPR:
9104 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
9105 strict_overflow_p);
9106
9107 case CALL_EXPR:
9108 {
9109 tree fndecl = get_callee_fndecl (t);
9110 if (!fndecl) return false;
9111 if (flag_delete_null_pointer_checks && !flag_check_new
9112 && DECL_IS_OPERATOR_NEW (fndecl)
9113 && !TREE_NOTHROW (fndecl))
9114 return true;
9115 if (flag_delete_null_pointer_checks
9116 && lookup_attribute ("returns_nonnull",
9117 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9118 return true;
9119 return alloca_call_p (t);
9120 }
9121
9122 default:
9123 break;
9124 }
9125 return false;
9126 }
9127
9128 /* Return true when T is an address and is known to be nonzero.
9129 Handle warnings about undefined signed overflow. */
9130
9131 static bool
9132 tree_expr_nonzero_p (tree t)
9133 {
9134 bool ret, strict_overflow_p;
9135
9136 strict_overflow_p = false;
9137 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9138 if (strict_overflow_p)
9139 fold_overflow_warning (("assuming signed overflow does not occur when "
9140 "determining that expression is always "
9141 "non-zero"),
9142 WARN_STRICT_OVERFLOW_MISC);
9143 return ret;
9144 }
9145
9146 /* Return true if T is known not to be equal to an integer W. */
9147
9148 bool
9149 expr_not_equal_to (tree t, const wide_int &w)
9150 {
9151 wide_int min, max, nz;
9152 value_range_type rtype;
9153 switch (TREE_CODE (t))
9154 {
9155 case INTEGER_CST:
9156 return wi::ne_p (t, w);
9157
9158 case SSA_NAME:
9159 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
9160 return false;
9161 rtype = get_range_info (t, &min, &max);
9162 if (rtype == VR_RANGE)
9163 {
9164 if (wi::lt_p (max, w, TYPE_SIGN (TREE_TYPE (t))))
9165 return true;
9166 if (wi::lt_p (w, min, TYPE_SIGN (TREE_TYPE (t))))
9167 return true;
9168 }
9169 else if (rtype == VR_ANTI_RANGE
9170 && wi::le_p (min, w, TYPE_SIGN (TREE_TYPE (t)))
9171 && wi::le_p (w, max, TYPE_SIGN (TREE_TYPE (t))))
9172 return true;
9173 /* If T has some known zero bits and W has any of those bits set,
9174 then T is known not to be equal to W. */
9175 if (wi::ne_p (wi::zext (wi::bit_and_not (w, get_nonzero_bits (t)),
9176 TYPE_PRECISION (TREE_TYPE (t))), 0))
9177 return true;
9178 return false;
9179
9180 default:
9181 return false;
9182 }
9183 }
9184
9185 /* Fold a binary expression of code CODE and type TYPE with operands
9186 OP0 and OP1. LOC is the location of the resulting expression.
9187 Return the folded expression if folding is successful. Otherwise,
9188 return NULL_TREE. */
9189
9190 tree
9191 fold_binary_loc (location_t loc,
9192 enum tree_code code, tree type, tree op0, tree op1)
9193 {
9194 enum tree_code_class kind = TREE_CODE_CLASS (code);
9195 tree arg0, arg1, tem;
9196 tree t1 = NULL_TREE;
9197 bool strict_overflow_p;
9198 unsigned int prec;
9199
9200 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9201 && TREE_CODE_LENGTH (code) == 2
9202 && op0 != NULL_TREE
9203 && op1 != NULL_TREE);
9204
9205 arg0 = op0;
9206 arg1 = op1;
9207
9208 /* Strip any conversions that don't change the mode. This is
9209 safe for every expression, except for a comparison expression
9210 because its signedness is derived from its operands. So, in
9211 the latter case, only strip conversions that don't change the
9212 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9213 preserved.
9214
9215 Note that this is done as an internal manipulation within the
9216 constant folder, in order to find the simplest representation
9217 of the arguments so that their form can be studied. In any
9218 cases, the appropriate type conversions should be put back in
9219 the tree that will get out of the constant folder. */
9220
9221 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9222 {
9223 STRIP_SIGN_NOPS (arg0);
9224 STRIP_SIGN_NOPS (arg1);
9225 }
9226 else
9227 {
9228 STRIP_NOPS (arg0);
9229 STRIP_NOPS (arg1);
9230 }
9231
9232 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9233 constant but we can't do arithmetic on them. */
9234 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
9235 {
9236 tem = const_binop (code, type, arg0, arg1);
9237 if (tem != NULL_TREE)
9238 {
9239 if (TREE_TYPE (tem) != type)
9240 tem = fold_convert_loc (loc, type, tem);
9241 return tem;
9242 }
9243 }
9244
9245 /* If this is a commutative operation, and ARG0 is a constant, move it
9246 to ARG1 to reduce the number of tests below. */
9247 if (commutative_tree_code (code)
9248 && tree_swap_operands_p (arg0, arg1, true))
9249 return fold_build2_loc (loc, code, type, op1, op0);
9250
9251 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9252 to ARG1 to reduce the number of tests below. */
9253 if (kind == tcc_comparison
9254 && tree_swap_operands_p (arg0, arg1, true))
9255 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9256
9257 tem = generic_simplify (loc, code, type, op0, op1);
9258 if (tem)
9259 return tem;
9260
9261 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9262
9263 First check for cases where an arithmetic operation is applied to a
9264 compound, conditional, or comparison operation. Push the arithmetic
9265 operation inside the compound or conditional to see if any folding
9266 can then be done. Convert comparison to conditional for this purpose.
9267 The also optimizes non-constant cases that used to be done in
9268 expand_expr.
9269
9270 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9271 one of the operands is a comparison and the other is a comparison, a
9272 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9273 code below would make the expression more complex. Change it to a
9274 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9275 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9276
9277 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9278 || code == EQ_EXPR || code == NE_EXPR)
9279 && TREE_CODE (type) != VECTOR_TYPE
9280 && ((truth_value_p (TREE_CODE (arg0))
9281 && (truth_value_p (TREE_CODE (arg1))
9282 || (TREE_CODE (arg1) == BIT_AND_EXPR
9283 && integer_onep (TREE_OPERAND (arg1, 1)))))
9284 || (truth_value_p (TREE_CODE (arg1))
9285 && (truth_value_p (TREE_CODE (arg0))
9286 || (TREE_CODE (arg0) == BIT_AND_EXPR
9287 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9288 {
9289 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9290 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9291 : TRUTH_XOR_EXPR,
9292 boolean_type_node,
9293 fold_convert_loc (loc, boolean_type_node, arg0),
9294 fold_convert_loc (loc, boolean_type_node, arg1));
9295
9296 if (code == EQ_EXPR)
9297 tem = invert_truthvalue_loc (loc, tem);
9298
9299 return fold_convert_loc (loc, type, tem);
9300 }
9301
9302 if (TREE_CODE_CLASS (code) == tcc_binary
9303 || TREE_CODE_CLASS (code) == tcc_comparison)
9304 {
9305 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9306 {
9307 tem = fold_build2_loc (loc, code, type,
9308 fold_convert_loc (loc, TREE_TYPE (op0),
9309 TREE_OPERAND (arg0, 1)), op1);
9310 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9311 tem);
9312 }
9313 if (TREE_CODE (arg1) == COMPOUND_EXPR
9314 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9315 {
9316 tem = fold_build2_loc (loc, code, type, op0,
9317 fold_convert_loc (loc, TREE_TYPE (op1),
9318 TREE_OPERAND (arg1, 1)));
9319 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9320 tem);
9321 }
9322
9323 if (TREE_CODE (arg0) == COND_EXPR
9324 || TREE_CODE (arg0) == VEC_COND_EXPR
9325 || COMPARISON_CLASS_P (arg0))
9326 {
9327 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9328 arg0, arg1,
9329 /*cond_first_p=*/1);
9330 if (tem != NULL_TREE)
9331 return tem;
9332 }
9333
9334 if (TREE_CODE (arg1) == COND_EXPR
9335 || TREE_CODE (arg1) == VEC_COND_EXPR
9336 || COMPARISON_CLASS_P (arg1))
9337 {
9338 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9339 arg1, arg0,
9340 /*cond_first_p=*/0);
9341 if (tem != NULL_TREE)
9342 return tem;
9343 }
9344 }
9345
9346 switch (code)
9347 {
9348 case MEM_REF:
9349 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9350 if (TREE_CODE (arg0) == ADDR_EXPR
9351 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9352 {
9353 tree iref = TREE_OPERAND (arg0, 0);
9354 return fold_build2 (MEM_REF, type,
9355 TREE_OPERAND (iref, 0),
9356 int_const_binop (PLUS_EXPR, arg1,
9357 TREE_OPERAND (iref, 1)));
9358 }
9359
9360 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9361 if (TREE_CODE (arg0) == ADDR_EXPR
9362 && handled_component_p (TREE_OPERAND (arg0, 0)))
9363 {
9364 tree base;
9365 HOST_WIDE_INT coffset;
9366 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9367 &coffset);
9368 if (!base)
9369 return NULL_TREE;
9370 return fold_build2 (MEM_REF, type,
9371 build_fold_addr_expr (base),
9372 int_const_binop (PLUS_EXPR, arg1,
9373 size_int (coffset)));
9374 }
9375
9376 return NULL_TREE;
9377
9378 case POINTER_PLUS_EXPR:
9379 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9380 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9381 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9382 return fold_convert_loc (loc, type,
9383 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9384 fold_convert_loc (loc, sizetype,
9385 arg1),
9386 fold_convert_loc (loc, sizetype,
9387 arg0)));
9388
9389 return NULL_TREE;
9390
9391 case PLUS_EXPR:
9392 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
9393 {
9394 /* X + (X / CST) * -CST is X % CST. */
9395 if (TREE_CODE (arg1) == MULT_EXPR
9396 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9397 && operand_equal_p (arg0,
9398 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9399 {
9400 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9401 tree cst1 = TREE_OPERAND (arg1, 1);
9402 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9403 cst1, cst0);
9404 if (sum && integer_zerop (sum))
9405 return fold_convert_loc (loc, type,
9406 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9407 TREE_TYPE (arg0), arg0,
9408 cst0));
9409 }
9410 }
9411
9412 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
9413 one. Make sure the type is not saturating and has the signedness of
9414 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9415 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9416 if ((TREE_CODE (arg0) == MULT_EXPR
9417 || TREE_CODE (arg1) == MULT_EXPR)
9418 && !TYPE_SATURATING (type)
9419 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9420 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9421 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9422 {
9423 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9424 if (tem)
9425 return tem;
9426 }
9427
9428 if (! FLOAT_TYPE_P (type))
9429 {
9430 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9431 (plus (plus (mult) (mult)) (foo)) so that we can
9432 take advantage of the factoring cases below. */
9433 if (ANY_INTEGRAL_TYPE_P (type)
9434 && TYPE_OVERFLOW_WRAPS (type)
9435 && (((TREE_CODE (arg0) == PLUS_EXPR
9436 || TREE_CODE (arg0) == MINUS_EXPR)
9437 && TREE_CODE (arg1) == MULT_EXPR)
9438 || ((TREE_CODE (arg1) == PLUS_EXPR
9439 || TREE_CODE (arg1) == MINUS_EXPR)
9440 && TREE_CODE (arg0) == MULT_EXPR)))
9441 {
9442 tree parg0, parg1, parg, marg;
9443 enum tree_code pcode;
9444
9445 if (TREE_CODE (arg1) == MULT_EXPR)
9446 parg = arg0, marg = arg1;
9447 else
9448 parg = arg1, marg = arg0;
9449 pcode = TREE_CODE (parg);
9450 parg0 = TREE_OPERAND (parg, 0);
9451 parg1 = TREE_OPERAND (parg, 1);
9452 STRIP_NOPS (parg0);
9453 STRIP_NOPS (parg1);
9454
9455 if (TREE_CODE (parg0) == MULT_EXPR
9456 && TREE_CODE (parg1) != MULT_EXPR)
9457 return fold_build2_loc (loc, pcode, type,
9458 fold_build2_loc (loc, PLUS_EXPR, type,
9459 fold_convert_loc (loc, type,
9460 parg0),
9461 fold_convert_loc (loc, type,
9462 marg)),
9463 fold_convert_loc (loc, type, parg1));
9464 if (TREE_CODE (parg0) != MULT_EXPR
9465 && TREE_CODE (parg1) == MULT_EXPR)
9466 return
9467 fold_build2_loc (loc, PLUS_EXPR, type,
9468 fold_convert_loc (loc, type, parg0),
9469 fold_build2_loc (loc, pcode, type,
9470 fold_convert_loc (loc, type, marg),
9471 fold_convert_loc (loc, type,
9472 parg1)));
9473 }
9474 }
9475 else
9476 {
9477 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9478 to __complex__ ( x, y ). This is not the same for SNaNs or
9479 if signed zeros are involved. */
9480 if (!HONOR_SNANS (element_mode (arg0))
9481 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9482 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9483 {
9484 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9485 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9486 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9487 bool arg0rz = false, arg0iz = false;
9488 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9489 || (arg0i && (arg0iz = real_zerop (arg0i))))
9490 {
9491 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9492 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9493 if (arg0rz && arg1i && real_zerop (arg1i))
9494 {
9495 tree rp = arg1r ? arg1r
9496 : build1 (REALPART_EXPR, rtype, arg1);
9497 tree ip = arg0i ? arg0i
9498 : build1 (IMAGPART_EXPR, rtype, arg0);
9499 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9500 }
9501 else if (arg0iz && arg1r && real_zerop (arg1r))
9502 {
9503 tree rp = arg0r ? arg0r
9504 : build1 (REALPART_EXPR, rtype, arg0);
9505 tree ip = arg1i ? arg1i
9506 : build1 (IMAGPART_EXPR, rtype, arg1);
9507 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9508 }
9509 }
9510 }
9511
9512 if (flag_unsafe_math_optimizations
9513 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9514 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9515 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9516 return tem;
9517
9518 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9519 We associate floats only if the user has specified
9520 -fassociative-math. */
9521 if (flag_associative_math
9522 && TREE_CODE (arg1) == PLUS_EXPR
9523 && TREE_CODE (arg0) != MULT_EXPR)
9524 {
9525 tree tree10 = TREE_OPERAND (arg1, 0);
9526 tree tree11 = TREE_OPERAND (arg1, 1);
9527 if (TREE_CODE (tree11) == MULT_EXPR
9528 && TREE_CODE (tree10) == MULT_EXPR)
9529 {
9530 tree tree0;
9531 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
9532 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
9533 }
9534 }
9535 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9536 We associate floats only if the user has specified
9537 -fassociative-math. */
9538 if (flag_associative_math
9539 && TREE_CODE (arg0) == PLUS_EXPR
9540 && TREE_CODE (arg1) != MULT_EXPR)
9541 {
9542 tree tree00 = TREE_OPERAND (arg0, 0);
9543 tree tree01 = TREE_OPERAND (arg0, 1);
9544 if (TREE_CODE (tree01) == MULT_EXPR
9545 && TREE_CODE (tree00) == MULT_EXPR)
9546 {
9547 tree tree0;
9548 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
9549 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
9550 }
9551 }
9552 }
9553
9554 bit_rotate:
9555 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9556 is a rotate of A by C1 bits. */
9557 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9558 is a rotate of A by B bits. */
9559 {
9560 enum tree_code code0, code1;
9561 tree rtype;
9562 code0 = TREE_CODE (arg0);
9563 code1 = TREE_CODE (arg1);
9564 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9565 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9566 && operand_equal_p (TREE_OPERAND (arg0, 0),
9567 TREE_OPERAND (arg1, 0), 0)
9568 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9569 TYPE_UNSIGNED (rtype))
9570 /* Only create rotates in complete modes. Other cases are not
9571 expanded properly. */
9572 && (element_precision (rtype)
9573 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
9574 {
9575 tree tree01, tree11;
9576 enum tree_code code01, code11;
9577
9578 tree01 = TREE_OPERAND (arg0, 1);
9579 tree11 = TREE_OPERAND (arg1, 1);
9580 STRIP_NOPS (tree01);
9581 STRIP_NOPS (tree11);
9582 code01 = TREE_CODE (tree01);
9583 code11 = TREE_CODE (tree11);
9584 if (code01 == INTEGER_CST
9585 && code11 == INTEGER_CST
9586 && (wi::to_widest (tree01) + wi::to_widest (tree11)
9587 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9588 {
9589 tem = build2_loc (loc, LROTATE_EXPR,
9590 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9591 TREE_OPERAND (arg0, 0),
9592 code0 == LSHIFT_EXPR
9593 ? TREE_OPERAND (arg0, 1)
9594 : TREE_OPERAND (arg1, 1));
9595 return fold_convert_loc (loc, type, tem);
9596 }
9597 else if (code11 == MINUS_EXPR)
9598 {
9599 tree tree110, tree111;
9600 tree110 = TREE_OPERAND (tree11, 0);
9601 tree111 = TREE_OPERAND (tree11, 1);
9602 STRIP_NOPS (tree110);
9603 STRIP_NOPS (tree111);
9604 if (TREE_CODE (tree110) == INTEGER_CST
9605 && 0 == compare_tree_int (tree110,
9606 element_precision
9607 (TREE_TYPE (TREE_OPERAND
9608 (arg0, 0))))
9609 && operand_equal_p (tree01, tree111, 0))
9610 return
9611 fold_convert_loc (loc, type,
9612 build2 ((code0 == LSHIFT_EXPR
9613 ? LROTATE_EXPR
9614 : RROTATE_EXPR),
9615 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9616 TREE_OPERAND (arg0, 0),
9617 TREE_OPERAND (arg0, 1)));
9618 }
9619 else if (code01 == MINUS_EXPR)
9620 {
9621 tree tree010, tree011;
9622 tree010 = TREE_OPERAND (tree01, 0);
9623 tree011 = TREE_OPERAND (tree01, 1);
9624 STRIP_NOPS (tree010);
9625 STRIP_NOPS (tree011);
9626 if (TREE_CODE (tree010) == INTEGER_CST
9627 && 0 == compare_tree_int (tree010,
9628 element_precision
9629 (TREE_TYPE (TREE_OPERAND
9630 (arg0, 0))))
9631 && operand_equal_p (tree11, tree011, 0))
9632 return fold_convert_loc
9633 (loc, type,
9634 build2 ((code0 != LSHIFT_EXPR
9635 ? LROTATE_EXPR
9636 : RROTATE_EXPR),
9637 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9638 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1)));
9639 }
9640 }
9641 }
9642
9643 associate:
9644 /* In most languages, can't associate operations on floats through
9645 parentheses. Rather than remember where the parentheses were, we
9646 don't associate floats at all, unless the user has specified
9647 -fassociative-math.
9648 And, we need to make sure type is not saturating. */
9649
9650 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9651 && !TYPE_SATURATING (type))
9652 {
9653 tree var0, con0, lit0, minus_lit0;
9654 tree var1, con1, lit1, minus_lit1;
9655 tree atype = type;
9656 bool ok = true;
9657
9658 /* Split both trees into variables, constants, and literals. Then
9659 associate each group together, the constants with literals,
9660 then the result with variables. This increases the chances of
9661 literals being recombined later and of generating relocatable
9662 expressions for the sum of a constant and literal. */
9663 var0 = split_tree (loc, arg0, type, code,
9664 &con0, &lit0, &minus_lit0, 0);
9665 var1 = split_tree (loc, arg1, type, code,
9666 &con1, &lit1, &minus_lit1, code == MINUS_EXPR);
9667
9668 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9669 if (code == MINUS_EXPR)
9670 code = PLUS_EXPR;
9671
9672 /* With undefined overflow prefer doing association in a type
9673 which wraps on overflow, if that is one of the operand types. */
9674 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9675 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9676 {
9677 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9678 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
9679 atype = TREE_TYPE (arg0);
9680 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9681 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
9682 atype = TREE_TYPE (arg1);
9683 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
9684 }
9685
9686 /* With undefined overflow we can only associate constants with one
9687 variable, and constants whose association doesn't overflow. */
9688 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9689 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
9690 {
9691 if (var0 && var1)
9692 {
9693 tree tmp0 = var0;
9694 tree tmp1 = var1;
9695 bool one_neg = false;
9696
9697 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9698 {
9699 tmp0 = TREE_OPERAND (tmp0, 0);
9700 one_neg = !one_neg;
9701 }
9702 if (CONVERT_EXPR_P (tmp0)
9703 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9704 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9705 <= TYPE_PRECISION (atype)))
9706 tmp0 = TREE_OPERAND (tmp0, 0);
9707 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9708 {
9709 tmp1 = TREE_OPERAND (tmp1, 0);
9710 one_neg = !one_neg;
9711 }
9712 if (CONVERT_EXPR_P (tmp1)
9713 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9714 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9715 <= TYPE_PRECISION (atype)))
9716 tmp1 = TREE_OPERAND (tmp1, 0);
9717 /* The only case we can still associate with two variables
9718 is if they cancel out. */
9719 if (!one_neg
9720 || !operand_equal_p (tmp0, tmp1, 0))
9721 ok = false;
9722 }
9723 }
9724
9725 /* Only do something if we found more than two objects. Otherwise,
9726 nothing has changed and we risk infinite recursion. */
9727 if (ok
9728 && (2 < ((var0 != 0) + (var1 != 0)
9729 + (con0 != 0) + (con1 != 0)
9730 + (lit0 != 0) + (lit1 != 0)
9731 + (minus_lit0 != 0) + (minus_lit1 != 0))))
9732 {
9733 bool any_overflows = false;
9734 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
9735 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
9736 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
9737 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
9738 var0 = associate_trees (loc, var0, var1, code, atype);
9739 con0 = associate_trees (loc, con0, con1, code, atype);
9740 lit0 = associate_trees (loc, lit0, lit1, code, atype);
9741 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
9742 code, atype);
9743
9744 /* Preserve the MINUS_EXPR if the negative part of the literal is
9745 greater than the positive part. Otherwise, the multiplicative
9746 folding code (i.e extract_muldiv) may be fooled in case
9747 unsigned constants are subtracted, like in the following
9748 example: ((X*2 + 4) - 8U)/2. */
9749 if (minus_lit0 && lit0)
9750 {
9751 if (TREE_CODE (lit0) == INTEGER_CST
9752 && TREE_CODE (minus_lit0) == INTEGER_CST
9753 && tree_int_cst_lt (lit0, minus_lit0))
9754 {
9755 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
9756 MINUS_EXPR, atype);
9757 lit0 = 0;
9758 }
9759 else
9760 {
9761 lit0 = associate_trees (loc, lit0, minus_lit0,
9762 MINUS_EXPR, atype);
9763 minus_lit0 = 0;
9764 }
9765 }
9766
9767 /* Don't introduce overflows through reassociation. */
9768 if (!any_overflows
9769 && ((lit0 && TREE_OVERFLOW_P (lit0))
9770 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0))))
9771 return NULL_TREE;
9772
9773 if (minus_lit0)
9774 {
9775 if (con0 == 0)
9776 return
9777 fold_convert_loc (loc, type,
9778 associate_trees (loc, var0, minus_lit0,
9779 MINUS_EXPR, atype));
9780 else
9781 {
9782 con0 = associate_trees (loc, con0, minus_lit0,
9783 MINUS_EXPR, atype);
9784 return
9785 fold_convert_loc (loc, type,
9786 associate_trees (loc, var0, con0,
9787 PLUS_EXPR, atype));
9788 }
9789 }
9790
9791 con0 = associate_trees (loc, con0, lit0, code, atype);
9792 return
9793 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
9794 code, atype));
9795 }
9796 }
9797
9798 return NULL_TREE;
9799
9800 case MINUS_EXPR:
9801 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9802 if (TREE_CODE (arg0) == NEGATE_EXPR
9803 && negate_expr_p (op1)
9804 && reorder_operands_p (arg0, arg1))
9805 return fold_build2_loc (loc, MINUS_EXPR, type,
9806 negate_expr (op1),
9807 fold_convert_loc (loc, type,
9808 TREE_OPERAND (arg0, 0)));
9809
9810 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
9811 __complex__ ( x, -y ). This is not the same for SNaNs or if
9812 signed zeros are involved. */
9813 if (!HONOR_SNANS (element_mode (arg0))
9814 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9815 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9816 {
9817 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9818 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9819 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9820 bool arg0rz = false, arg0iz = false;
9821 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9822 || (arg0i && (arg0iz = real_zerop (arg0i))))
9823 {
9824 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9825 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9826 if (arg0rz && arg1i && real_zerop (arg1i))
9827 {
9828 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9829 arg1r ? arg1r
9830 : build1 (REALPART_EXPR, rtype, arg1));
9831 tree ip = arg0i ? arg0i
9832 : build1 (IMAGPART_EXPR, rtype, arg0);
9833 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9834 }
9835 else if (arg0iz && arg1r && real_zerop (arg1r))
9836 {
9837 tree rp = arg0r ? arg0r
9838 : build1 (REALPART_EXPR, rtype, arg0);
9839 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9840 arg1i ? arg1i
9841 : build1 (IMAGPART_EXPR, rtype, arg1));
9842 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9843 }
9844 }
9845 }
9846
9847 /* A - B -> A + (-B) if B is easily negatable. */
9848 if (negate_expr_p (op1)
9849 && ! TYPE_OVERFLOW_SANITIZED (type)
9850 && ((FLOAT_TYPE_P (type)
9851 /* Avoid this transformation if B is a positive REAL_CST. */
9852 && (TREE_CODE (op1) != REAL_CST
9853 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1))))
9854 || INTEGRAL_TYPE_P (type)))
9855 return fold_build2_loc (loc, PLUS_EXPR, type,
9856 fold_convert_loc (loc, type, arg0),
9857 negate_expr (op1));
9858
9859 /* Fold &a[i] - &a[j] to i-j. */
9860 if (TREE_CODE (arg0) == ADDR_EXPR
9861 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9862 && TREE_CODE (arg1) == ADDR_EXPR
9863 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9864 {
9865 tree tem = fold_addr_of_array_ref_difference (loc, type,
9866 TREE_OPERAND (arg0, 0),
9867 TREE_OPERAND (arg1, 0));
9868 if (tem)
9869 return tem;
9870 }
9871
9872 if (FLOAT_TYPE_P (type)
9873 && flag_unsafe_math_optimizations
9874 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9875 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9876 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9877 return tem;
9878
9879 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
9880 one. Make sure the type is not saturating and has the signedness of
9881 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9882 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9883 if ((TREE_CODE (arg0) == MULT_EXPR
9884 || TREE_CODE (arg1) == MULT_EXPR)
9885 && !TYPE_SATURATING (type)
9886 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9887 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9888 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9889 {
9890 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9891 if (tem)
9892 return tem;
9893 }
9894
9895 goto associate;
9896
9897 case MULT_EXPR:
9898 if (! FLOAT_TYPE_P (type))
9899 {
9900 /* Transform x * -C into -x * C if x is easily negatable. */
9901 if (TREE_CODE (op1) == INTEGER_CST
9902 && tree_int_cst_sgn (op1) == -1
9903 && negate_expr_p (op0)
9904 && (tem = negate_expr (op1)) != op1
9905 && ! TREE_OVERFLOW (tem))
9906 return fold_build2_loc (loc, MULT_EXPR, type,
9907 fold_convert_loc (loc, type,
9908 negate_expr (op0)), tem);
9909
9910 /* (A + A) * C -> A * 2 * C */
9911 if (TREE_CODE (arg0) == PLUS_EXPR
9912 && TREE_CODE (arg1) == INTEGER_CST
9913 && operand_equal_p (TREE_OPERAND (arg0, 0),
9914 TREE_OPERAND (arg0, 1), 0))
9915 return fold_build2_loc (loc, MULT_EXPR, type,
9916 omit_one_operand_loc (loc, type,
9917 TREE_OPERAND (arg0, 0),
9918 TREE_OPERAND (arg0, 1)),
9919 fold_build2_loc (loc, MULT_EXPR, type,
9920 build_int_cst (type, 2) , arg1));
9921
9922 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
9923 sign-changing only. */
9924 if (TREE_CODE (arg1) == INTEGER_CST
9925 && TREE_CODE (arg0) == EXACT_DIV_EXPR
9926 && operand_equal_p (arg1, TREE_OPERAND (arg0, 1), 0))
9927 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
9928
9929 strict_overflow_p = false;
9930 if (TREE_CODE (arg1) == INTEGER_CST
9931 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
9932 &strict_overflow_p)))
9933 {
9934 if (strict_overflow_p)
9935 fold_overflow_warning (("assuming signed overflow does not "
9936 "occur when simplifying "
9937 "multiplication"),
9938 WARN_STRICT_OVERFLOW_MISC);
9939 return fold_convert_loc (loc, type, tem);
9940 }
9941
9942 /* Optimize z * conj(z) for integer complex numbers. */
9943 if (TREE_CODE (arg0) == CONJ_EXPR
9944 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9945 return fold_mult_zconjz (loc, type, arg1);
9946 if (TREE_CODE (arg1) == CONJ_EXPR
9947 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9948 return fold_mult_zconjz (loc, type, arg0);
9949 }
9950 else
9951 {
9952 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
9953 This is not the same for NaNs or if signed zeros are
9954 involved. */
9955 if (!HONOR_NANS (arg0)
9956 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9957 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9958 && TREE_CODE (arg1) == COMPLEX_CST
9959 && real_zerop (TREE_REALPART (arg1)))
9960 {
9961 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9962 if (real_onep (TREE_IMAGPART (arg1)))
9963 return
9964 fold_build2_loc (loc, COMPLEX_EXPR, type,
9965 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
9966 rtype, arg0)),
9967 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
9968 else if (real_minus_onep (TREE_IMAGPART (arg1)))
9969 return
9970 fold_build2_loc (loc, COMPLEX_EXPR, type,
9971 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
9972 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
9973 rtype, arg0)));
9974 }
9975
9976 /* Optimize z * conj(z) for floating point complex numbers.
9977 Guarded by flag_unsafe_math_optimizations as non-finite
9978 imaginary components don't produce scalar results. */
9979 if (flag_unsafe_math_optimizations
9980 && TREE_CODE (arg0) == CONJ_EXPR
9981 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9982 return fold_mult_zconjz (loc, type, arg1);
9983 if (flag_unsafe_math_optimizations
9984 && TREE_CODE (arg1) == CONJ_EXPR
9985 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9986 return fold_mult_zconjz (loc, type, arg0);
9987
9988 if (flag_unsafe_math_optimizations)
9989 {
9990
9991 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
9992 if (!in_gimple_form
9993 && optimize
9994 && operand_equal_p (arg0, arg1, 0))
9995 {
9996 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
9997
9998 if (powfn)
9999 {
10000 tree arg = build_real (type, dconst2);
10001 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10002 }
10003 }
10004 }
10005 }
10006 goto associate;
10007
10008 case BIT_IOR_EXPR:
10009 /* Canonicalize (X & C1) | C2. */
10010 if (TREE_CODE (arg0) == BIT_AND_EXPR
10011 && TREE_CODE (arg1) == INTEGER_CST
10012 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10013 {
10014 int width = TYPE_PRECISION (type), w;
10015 wide_int c1 = TREE_OPERAND (arg0, 1);
10016 wide_int c2 = arg1;
10017
10018 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10019 if ((c1 & c2) == c1)
10020 return omit_one_operand_loc (loc, type, arg1,
10021 TREE_OPERAND (arg0, 0));
10022
10023 wide_int msk = wi::mask (width, false,
10024 TYPE_PRECISION (TREE_TYPE (arg1)));
10025
10026 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10027 if (msk.and_not (c1 | c2) == 0)
10028 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10029 TREE_OPERAND (arg0, 0), arg1);
10030
10031 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10032 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10033 mode which allows further optimizations. */
10034 c1 &= msk;
10035 c2 &= msk;
10036 wide_int c3 = c1.and_not (c2);
10037 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
10038 {
10039 wide_int mask = wi::mask (w, false,
10040 TYPE_PRECISION (type));
10041 if (((c1 | c2) & mask) == mask && c1.and_not (mask) == 0)
10042 {
10043 c3 = mask;
10044 break;
10045 }
10046 }
10047
10048 if (c3 != c1)
10049 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10050 fold_build2_loc (loc, BIT_AND_EXPR, type,
10051 TREE_OPERAND (arg0, 0),
10052 wide_int_to_tree (type,
10053 c3)),
10054 arg1);
10055 }
10056
10057 /* See if this can be simplified into a rotate first. If that
10058 is unsuccessful continue in the association code. */
10059 goto bit_rotate;
10060
10061 case BIT_XOR_EXPR:
10062 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10063 if (TREE_CODE (arg0) == BIT_AND_EXPR
10064 && INTEGRAL_TYPE_P (type)
10065 && integer_onep (TREE_OPERAND (arg0, 1))
10066 && integer_onep (arg1))
10067 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
10068 build_zero_cst (TREE_TYPE (arg0)));
10069
10070 /* See if this can be simplified into a rotate first. If that
10071 is unsuccessful continue in the association code. */
10072 goto bit_rotate;
10073
10074 case BIT_AND_EXPR:
10075 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10076 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10077 && INTEGRAL_TYPE_P (type)
10078 && integer_onep (TREE_OPERAND (arg0, 1))
10079 && integer_onep (arg1))
10080 {
10081 tree tem2;
10082 tem = TREE_OPERAND (arg0, 0);
10083 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
10084 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
10085 tem, tem2);
10086 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
10087 build_zero_cst (TREE_TYPE (tem)));
10088 }
10089 /* Fold ~X & 1 as (X & 1) == 0. */
10090 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10091 && INTEGRAL_TYPE_P (type)
10092 && integer_onep (arg1))
10093 {
10094 tree tem2;
10095 tem = TREE_OPERAND (arg0, 0);
10096 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
10097 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
10098 tem, tem2);
10099 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
10100 build_zero_cst (TREE_TYPE (tem)));
10101 }
10102 /* Fold !X & 1 as X == 0. */
10103 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10104 && integer_onep (arg1))
10105 {
10106 tem = TREE_OPERAND (arg0, 0);
10107 return fold_build2_loc (loc, EQ_EXPR, type, tem,
10108 build_zero_cst (TREE_TYPE (tem)));
10109 }
10110
10111 /* Fold (X ^ Y) & Y as ~X & Y. */
10112 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10113 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10114 {
10115 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10116 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10117 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10118 fold_convert_loc (loc, type, arg1));
10119 }
10120 /* Fold (X ^ Y) & X as ~Y & X. */
10121 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10122 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10123 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10124 {
10125 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10126 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10127 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10128 fold_convert_loc (loc, type, arg1));
10129 }
10130 /* Fold X & (X ^ Y) as X & ~Y. */
10131 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10132 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10133 {
10134 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10135 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10136 fold_convert_loc (loc, type, arg0),
10137 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
10138 }
10139 /* Fold X & (Y ^ X) as ~Y & X. */
10140 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10141 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10142 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10143 {
10144 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10145 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10146 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10147 fold_convert_loc (loc, type, arg0));
10148 }
10149
10150 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
10151 multiple of 1 << CST. */
10152 if (TREE_CODE (arg1) == INTEGER_CST)
10153 {
10154 wide_int cst1 = arg1;
10155 wide_int ncst1 = -cst1;
10156 if ((cst1 & ncst1) == ncst1
10157 && multiple_of_p (type, arg0,
10158 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
10159 return fold_convert_loc (loc, type, arg0);
10160 }
10161
10162 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
10163 bits from CST2. */
10164 if (TREE_CODE (arg1) == INTEGER_CST
10165 && TREE_CODE (arg0) == MULT_EXPR
10166 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10167 {
10168 wide_int warg1 = arg1;
10169 wide_int masked = mask_with_tz (type, warg1, TREE_OPERAND (arg0, 1));
10170
10171 if (masked == 0)
10172 return omit_two_operands_loc (loc, type, build_zero_cst (type),
10173 arg0, arg1);
10174 else if (masked != warg1)
10175 {
10176 /* Avoid the transform if arg1 is a mask of some
10177 mode which allows further optimizations. */
10178 int pop = wi::popcount (warg1);
10179 if (!(pop >= BITS_PER_UNIT
10180 && exact_log2 (pop) != -1
10181 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
10182 return fold_build2_loc (loc, code, type, op0,
10183 wide_int_to_tree (type, masked));
10184 }
10185 }
10186
10187 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
10188 ((A & N) + B) & M -> (A + B) & M
10189 Similarly if (N & M) == 0,
10190 ((A | N) + B) & M -> (A + B) & M
10191 and for - instead of + (or unary - instead of +)
10192 and/or ^ instead of |.
10193 If B is constant and (B & M) == 0, fold into A & M. */
10194 if (TREE_CODE (arg1) == INTEGER_CST)
10195 {
10196 wide_int cst1 = arg1;
10197 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
10198 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10199 && (TREE_CODE (arg0) == PLUS_EXPR
10200 || TREE_CODE (arg0) == MINUS_EXPR
10201 || TREE_CODE (arg0) == NEGATE_EXPR)
10202 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
10203 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
10204 {
10205 tree pmop[2];
10206 int which = 0;
10207 wide_int cst0;
10208
10209 /* Now we know that arg0 is (C + D) or (C - D) or
10210 -C and arg1 (M) is == (1LL << cst) - 1.
10211 Store C into PMOP[0] and D into PMOP[1]. */
10212 pmop[0] = TREE_OPERAND (arg0, 0);
10213 pmop[1] = NULL;
10214 if (TREE_CODE (arg0) != NEGATE_EXPR)
10215 {
10216 pmop[1] = TREE_OPERAND (arg0, 1);
10217 which = 1;
10218 }
10219
10220 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
10221 which = -1;
10222
10223 for (; which >= 0; which--)
10224 switch (TREE_CODE (pmop[which]))
10225 {
10226 case BIT_AND_EXPR:
10227 case BIT_IOR_EXPR:
10228 case BIT_XOR_EXPR:
10229 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
10230 != INTEGER_CST)
10231 break;
10232 cst0 = TREE_OPERAND (pmop[which], 1);
10233 cst0 &= cst1;
10234 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
10235 {
10236 if (cst0 != cst1)
10237 break;
10238 }
10239 else if (cst0 != 0)
10240 break;
10241 /* If C or D is of the form (A & N) where
10242 (N & M) == M, or of the form (A | N) or
10243 (A ^ N) where (N & M) == 0, replace it with A. */
10244 pmop[which] = TREE_OPERAND (pmop[which], 0);
10245 break;
10246 case INTEGER_CST:
10247 /* If C or D is a N where (N & M) == 0, it can be
10248 omitted (assumed 0). */
10249 if ((TREE_CODE (arg0) == PLUS_EXPR
10250 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
10251 && (cst1 & pmop[which]) == 0)
10252 pmop[which] = NULL;
10253 break;
10254 default:
10255 break;
10256 }
10257
10258 /* Only build anything new if we optimized one or both arguments
10259 above. */
10260 if (pmop[0] != TREE_OPERAND (arg0, 0)
10261 || (TREE_CODE (arg0) != NEGATE_EXPR
10262 && pmop[1] != TREE_OPERAND (arg0, 1)))
10263 {
10264 tree utype = TREE_TYPE (arg0);
10265 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10266 {
10267 /* Perform the operations in a type that has defined
10268 overflow behavior. */
10269 utype = unsigned_type_for (TREE_TYPE (arg0));
10270 if (pmop[0] != NULL)
10271 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
10272 if (pmop[1] != NULL)
10273 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
10274 }
10275
10276 if (TREE_CODE (arg0) == NEGATE_EXPR)
10277 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
10278 else if (TREE_CODE (arg0) == PLUS_EXPR)
10279 {
10280 if (pmop[0] != NULL && pmop[1] != NULL)
10281 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
10282 pmop[0], pmop[1]);
10283 else if (pmop[0] != NULL)
10284 tem = pmop[0];
10285 else if (pmop[1] != NULL)
10286 tem = pmop[1];
10287 else
10288 return build_int_cst (type, 0);
10289 }
10290 else if (pmop[0] == NULL)
10291 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
10292 else
10293 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
10294 pmop[0], pmop[1]);
10295 /* TEM is now the new binary +, - or unary - replacement. */
10296 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
10297 fold_convert_loc (loc, utype, arg1));
10298 return fold_convert_loc (loc, type, tem);
10299 }
10300 }
10301 }
10302
10303 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10304 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10305 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10306 {
10307 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10308
10309 wide_int mask = wide_int::from (arg1, prec, UNSIGNED);
10310 if (mask == -1)
10311 return
10312 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10313 }
10314
10315 goto associate;
10316
10317 case RDIV_EXPR:
10318 /* Don't touch a floating-point divide by zero unless the mode
10319 of the constant can represent infinity. */
10320 if (TREE_CODE (arg1) == REAL_CST
10321 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10322 && real_zerop (arg1))
10323 return NULL_TREE;
10324
10325 /* (-A) / (-B) -> A / B */
10326 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10327 return fold_build2_loc (loc, RDIV_EXPR, type,
10328 TREE_OPERAND (arg0, 0),
10329 negate_expr (arg1));
10330 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10331 return fold_build2_loc (loc, RDIV_EXPR, type,
10332 negate_expr (arg0),
10333 TREE_OPERAND (arg1, 0));
10334 return NULL_TREE;
10335
10336 case TRUNC_DIV_EXPR:
10337 /* Fall through */
10338
10339 case FLOOR_DIV_EXPR:
10340 /* Simplify A / (B << N) where A and B are positive and B is
10341 a power of 2, to A >> (N + log2(B)). */
10342 strict_overflow_p = false;
10343 if (TREE_CODE (arg1) == LSHIFT_EXPR
10344 && (TYPE_UNSIGNED (type)
10345 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
10346 {
10347 tree sval = TREE_OPERAND (arg1, 0);
10348 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10349 {
10350 tree sh_cnt = TREE_OPERAND (arg1, 1);
10351 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
10352 wi::exact_log2 (sval));
10353
10354 if (strict_overflow_p)
10355 fold_overflow_warning (("assuming signed overflow does not "
10356 "occur when simplifying A / (B << N)"),
10357 WARN_STRICT_OVERFLOW_MISC);
10358
10359 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
10360 sh_cnt, pow2);
10361 return fold_build2_loc (loc, RSHIFT_EXPR, type,
10362 fold_convert_loc (loc, type, arg0), sh_cnt);
10363 }
10364 }
10365
10366 /* Fall through */
10367
10368 case ROUND_DIV_EXPR:
10369 case CEIL_DIV_EXPR:
10370 case EXACT_DIV_EXPR:
10371 if (integer_zerop (arg1))
10372 return NULL_TREE;
10373
10374 /* Convert -A / -B to A / B when the type is signed and overflow is
10375 undefined. */
10376 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10377 && TREE_CODE (arg0) == NEGATE_EXPR
10378 && negate_expr_p (op1))
10379 {
10380 if (INTEGRAL_TYPE_P (type))
10381 fold_overflow_warning (("assuming signed overflow does not occur "
10382 "when distributing negation across "
10383 "division"),
10384 WARN_STRICT_OVERFLOW_MISC);
10385 return fold_build2_loc (loc, code, type,
10386 fold_convert_loc (loc, type,
10387 TREE_OPERAND (arg0, 0)),
10388 negate_expr (op1));
10389 }
10390 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10391 && TREE_CODE (arg1) == NEGATE_EXPR
10392 && negate_expr_p (op0))
10393 {
10394 if (INTEGRAL_TYPE_P (type))
10395 fold_overflow_warning (("assuming signed overflow does not occur "
10396 "when distributing negation across "
10397 "division"),
10398 WARN_STRICT_OVERFLOW_MISC);
10399 return fold_build2_loc (loc, code, type,
10400 negate_expr (op0),
10401 fold_convert_loc (loc, type,
10402 TREE_OPERAND (arg1, 0)));
10403 }
10404
10405 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10406 operation, EXACT_DIV_EXPR.
10407
10408 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10409 At one time others generated faster code, it's not clear if they do
10410 after the last round to changes to the DIV code in expmed.c. */
10411 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10412 && multiple_of_p (type, arg0, arg1))
10413 return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
10414 fold_convert (type, arg0),
10415 fold_convert (type, arg1));
10416
10417 strict_overflow_p = false;
10418 if (TREE_CODE (arg1) == INTEGER_CST
10419 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10420 &strict_overflow_p)))
10421 {
10422 if (strict_overflow_p)
10423 fold_overflow_warning (("assuming signed overflow does not occur "
10424 "when simplifying division"),
10425 WARN_STRICT_OVERFLOW_MISC);
10426 return fold_convert_loc (loc, type, tem);
10427 }
10428
10429 return NULL_TREE;
10430
10431 case CEIL_MOD_EXPR:
10432 case FLOOR_MOD_EXPR:
10433 case ROUND_MOD_EXPR:
10434 case TRUNC_MOD_EXPR:
10435 strict_overflow_p = false;
10436 if (TREE_CODE (arg1) == INTEGER_CST
10437 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10438 &strict_overflow_p)))
10439 {
10440 if (strict_overflow_p)
10441 fold_overflow_warning (("assuming signed overflow does not occur "
10442 "when simplifying modulus"),
10443 WARN_STRICT_OVERFLOW_MISC);
10444 return fold_convert_loc (loc, type, tem);
10445 }
10446
10447 return NULL_TREE;
10448
10449 case LROTATE_EXPR:
10450 case RROTATE_EXPR:
10451 case RSHIFT_EXPR:
10452 case LSHIFT_EXPR:
10453 /* Since negative shift count is not well-defined,
10454 don't try to compute it in the compiler. */
10455 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10456 return NULL_TREE;
10457
10458 prec = element_precision (type);
10459
10460 /* If we have a rotate of a bit operation with the rotate count and
10461 the second operand of the bit operation both constant,
10462 permute the two operations. */
10463 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10464 && (TREE_CODE (arg0) == BIT_AND_EXPR
10465 || TREE_CODE (arg0) == BIT_IOR_EXPR
10466 || TREE_CODE (arg0) == BIT_XOR_EXPR)
10467 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10468 return fold_build2_loc (loc, TREE_CODE (arg0), type,
10469 fold_build2_loc (loc, code, type,
10470 TREE_OPERAND (arg0, 0), arg1),
10471 fold_build2_loc (loc, code, type,
10472 TREE_OPERAND (arg0, 1), arg1));
10473
10474 /* Two consecutive rotates adding up to the some integer
10475 multiple of the precision of the type can be ignored. */
10476 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10477 && TREE_CODE (arg0) == RROTATE_EXPR
10478 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10479 && wi::umod_trunc (wi::add (arg1, TREE_OPERAND (arg0, 1)),
10480 prec) == 0)
10481 return TREE_OPERAND (arg0, 0);
10482
10483 return NULL_TREE;
10484
10485 case MIN_EXPR:
10486 case MAX_EXPR:
10487 goto associate;
10488
10489 case TRUTH_ANDIF_EXPR:
10490 /* Note that the operands of this must be ints
10491 and their values must be 0 or 1.
10492 ("true" is a fixed value perhaps depending on the language.) */
10493 /* If first arg is constant zero, return it. */
10494 if (integer_zerop (arg0))
10495 return fold_convert_loc (loc, type, arg0);
10496 case TRUTH_AND_EXPR:
10497 /* If either arg is constant true, drop it. */
10498 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10499 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10500 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10501 /* Preserve sequence points. */
10502 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10503 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10504 /* If second arg is constant zero, result is zero, but first arg
10505 must be evaluated. */
10506 if (integer_zerop (arg1))
10507 return omit_one_operand_loc (loc, type, arg1, arg0);
10508 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10509 case will be handled here. */
10510 if (integer_zerop (arg0))
10511 return omit_one_operand_loc (loc, type, arg0, arg1);
10512
10513 /* !X && X is always false. */
10514 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10515 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10516 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
10517 /* X && !X is always false. */
10518 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10519 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10520 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10521
10522 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10523 means A >= Y && A != MAX, but in this case we know that
10524 A < X <= MAX. */
10525
10526 if (!TREE_SIDE_EFFECTS (arg0)
10527 && !TREE_SIDE_EFFECTS (arg1))
10528 {
10529 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
10530 if (tem && !operand_equal_p (tem, arg0, 0))
10531 return fold_build2_loc (loc, code, type, tem, arg1);
10532
10533 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
10534 if (tem && !operand_equal_p (tem, arg1, 0))
10535 return fold_build2_loc (loc, code, type, arg0, tem);
10536 }
10537
10538 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10539 != NULL_TREE)
10540 return tem;
10541
10542 return NULL_TREE;
10543
10544 case TRUTH_ORIF_EXPR:
10545 /* Note that the operands of this must be ints
10546 and their values must be 0 or true.
10547 ("true" is a fixed value perhaps depending on the language.) */
10548 /* If first arg is constant true, return it. */
10549 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10550 return fold_convert_loc (loc, type, arg0);
10551 case TRUTH_OR_EXPR:
10552 /* If either arg is constant zero, drop it. */
10553 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10554 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10555 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10556 /* Preserve sequence points. */
10557 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10558 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10559 /* If second arg is constant true, result is true, but we must
10560 evaluate first arg. */
10561 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10562 return omit_one_operand_loc (loc, type, arg1, arg0);
10563 /* Likewise for first arg, but note this only occurs here for
10564 TRUTH_OR_EXPR. */
10565 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10566 return omit_one_operand_loc (loc, type, arg0, arg1);
10567
10568 /* !X || X is always true. */
10569 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10570 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10571 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10572 /* X || !X is always true. */
10573 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10574 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10575 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10576
10577 /* (X && !Y) || (!X && Y) is X ^ Y */
10578 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
10579 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
10580 {
10581 tree a0, a1, l0, l1, n0, n1;
10582
10583 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10584 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10585
10586 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10587 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10588
10589 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
10590 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
10591
10592 if ((operand_equal_p (n0, a0, 0)
10593 && operand_equal_p (n1, a1, 0))
10594 || (operand_equal_p (n0, a1, 0)
10595 && operand_equal_p (n1, a0, 0)))
10596 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
10597 }
10598
10599 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10600 != NULL_TREE)
10601 return tem;
10602
10603 return NULL_TREE;
10604
10605 case TRUTH_XOR_EXPR:
10606 /* If the second arg is constant zero, drop it. */
10607 if (integer_zerop (arg1))
10608 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10609 /* If the second arg is constant true, this is a logical inversion. */
10610 if (integer_onep (arg1))
10611 {
10612 tem = invert_truthvalue_loc (loc, arg0);
10613 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
10614 }
10615 /* Identical arguments cancel to zero. */
10616 if (operand_equal_p (arg0, arg1, 0))
10617 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10618
10619 /* !X ^ X is always true. */
10620 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10621 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10622 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10623
10624 /* X ^ !X is always true. */
10625 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10626 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10627 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10628
10629 return NULL_TREE;
10630
10631 case EQ_EXPR:
10632 case NE_EXPR:
10633 STRIP_NOPS (arg0);
10634 STRIP_NOPS (arg1);
10635
10636 tem = fold_comparison (loc, code, type, op0, op1);
10637 if (tem != NULL_TREE)
10638 return tem;
10639
10640 /* bool_var != 1 becomes !bool_var. */
10641 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10642 && code == NE_EXPR)
10643 return fold_convert_loc (loc, type,
10644 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10645 TREE_TYPE (arg0), arg0));
10646
10647 /* bool_var == 0 becomes !bool_var. */
10648 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10649 && code == EQ_EXPR)
10650 return fold_convert_loc (loc, type,
10651 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10652 TREE_TYPE (arg0), arg0));
10653
10654 /* !exp != 0 becomes !exp */
10655 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
10656 && code == NE_EXPR)
10657 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10658
10659 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
10660 if ((TREE_CODE (arg0) == PLUS_EXPR
10661 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
10662 || TREE_CODE (arg0) == MINUS_EXPR)
10663 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
10664 0)),
10665 arg1, 0)
10666 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10667 || POINTER_TYPE_P (TREE_TYPE (arg0))))
10668 {
10669 tree val = TREE_OPERAND (arg0, 1);
10670 val = fold_build2_loc (loc, code, type, val,
10671 build_int_cst (TREE_TYPE (val), 0));
10672 return omit_two_operands_loc (loc, type, val,
10673 TREE_OPERAND (arg0, 0), arg1);
10674 }
10675
10676 /* Transform comparisons of the form X CMP X +- Y to Y CMP 0. */
10677 if ((TREE_CODE (arg1) == PLUS_EXPR
10678 || TREE_CODE (arg1) == POINTER_PLUS_EXPR
10679 || TREE_CODE (arg1) == MINUS_EXPR)
10680 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg1,
10681 0)),
10682 arg0, 0)
10683 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10684 || POINTER_TYPE_P (TREE_TYPE (arg1))))
10685 {
10686 tree val = TREE_OPERAND (arg1, 1);
10687 val = fold_build2_loc (loc, code, type, val,
10688 build_int_cst (TREE_TYPE (val), 0));
10689 return omit_two_operands_loc (loc, type, val,
10690 TREE_OPERAND (arg1, 0), arg0);
10691 }
10692
10693 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
10694 if (TREE_CODE (arg0) == MINUS_EXPR
10695 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
10696 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
10697 1)),
10698 arg1, 0)
10699 && wi::extract_uhwi (TREE_OPERAND (arg0, 0), 0, 1) == 1)
10700 return omit_two_operands_loc (loc, type,
10701 code == NE_EXPR
10702 ? boolean_true_node : boolean_false_node,
10703 TREE_OPERAND (arg0, 1), arg1);
10704
10705 /* Transform comparisons of the form X CMP C - X if C % 2 == 1. */
10706 if (TREE_CODE (arg1) == MINUS_EXPR
10707 && TREE_CODE (TREE_OPERAND (arg1, 0)) == INTEGER_CST
10708 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg1,
10709 1)),
10710 arg0, 0)
10711 && wi::extract_uhwi (TREE_OPERAND (arg1, 0), 0, 1) == 1)
10712 return omit_two_operands_loc (loc, type,
10713 code == NE_EXPR
10714 ? boolean_true_node : boolean_false_node,
10715 TREE_OPERAND (arg1, 1), arg0);
10716
10717 /* If this is an EQ or NE comparison with zero and ARG0 is
10718 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10719 two operations, but the latter can be done in one less insn
10720 on machines that have only two-operand insns or on which a
10721 constant cannot be the first operand. */
10722 if (TREE_CODE (arg0) == BIT_AND_EXPR
10723 && integer_zerop (arg1))
10724 {
10725 tree arg00 = TREE_OPERAND (arg0, 0);
10726 tree arg01 = TREE_OPERAND (arg0, 1);
10727 if (TREE_CODE (arg00) == LSHIFT_EXPR
10728 && integer_onep (TREE_OPERAND (arg00, 0)))
10729 {
10730 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
10731 arg01, TREE_OPERAND (arg00, 1));
10732 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10733 build_int_cst (TREE_TYPE (arg0), 1));
10734 return fold_build2_loc (loc, code, type,
10735 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10736 arg1);
10737 }
10738 else if (TREE_CODE (arg01) == LSHIFT_EXPR
10739 && integer_onep (TREE_OPERAND (arg01, 0)))
10740 {
10741 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
10742 arg00, TREE_OPERAND (arg01, 1));
10743 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10744 build_int_cst (TREE_TYPE (arg0), 1));
10745 return fold_build2_loc (loc, code, type,
10746 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10747 arg1);
10748 }
10749 }
10750
10751 /* If this is an NE or EQ comparison of zero against the result of a
10752 signed MOD operation whose second operand is a power of 2, make
10753 the MOD operation unsigned since it is simpler and equivalent. */
10754 if (integer_zerop (arg1)
10755 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10756 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10757 || TREE_CODE (arg0) == CEIL_MOD_EXPR
10758 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10759 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10760 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10761 {
10762 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
10763 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
10764 fold_convert_loc (loc, newtype,
10765 TREE_OPERAND (arg0, 0)),
10766 fold_convert_loc (loc, newtype,
10767 TREE_OPERAND (arg0, 1)));
10768
10769 return fold_build2_loc (loc, code, type, newmod,
10770 fold_convert_loc (loc, newtype, arg1));
10771 }
10772
10773 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10774 C1 is a valid shift constant, and C2 is a power of two, i.e.
10775 a single bit. */
10776 if (TREE_CODE (arg0) == BIT_AND_EXPR
10777 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10778 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10779 == INTEGER_CST
10780 && integer_pow2p (TREE_OPERAND (arg0, 1))
10781 && integer_zerop (arg1))
10782 {
10783 tree itype = TREE_TYPE (arg0);
10784 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10785 prec = TYPE_PRECISION (itype);
10786
10787 /* Check for a valid shift count. */
10788 if (wi::ltu_p (arg001, prec))
10789 {
10790 tree arg01 = TREE_OPERAND (arg0, 1);
10791 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10792 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10793 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10794 can be rewritten as (X & (C2 << C1)) != 0. */
10795 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
10796 {
10797 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
10798 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
10799 return fold_build2_loc (loc, code, type, tem,
10800 fold_convert_loc (loc, itype, arg1));
10801 }
10802 /* Otherwise, for signed (arithmetic) shifts,
10803 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10804 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10805 else if (!TYPE_UNSIGNED (itype))
10806 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10807 arg000, build_int_cst (itype, 0));
10808 /* Otherwise, of unsigned (logical) shifts,
10809 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10810 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10811 else
10812 return omit_one_operand_loc (loc, type,
10813 code == EQ_EXPR ? integer_one_node
10814 : integer_zero_node,
10815 arg000);
10816 }
10817 }
10818
10819 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
10820 Similarly for NE_EXPR. */
10821 if (TREE_CODE (arg0) == BIT_AND_EXPR
10822 && TREE_CODE (arg1) == INTEGER_CST
10823 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10824 {
10825 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
10826 TREE_TYPE (TREE_OPERAND (arg0, 1)),
10827 TREE_OPERAND (arg0, 1));
10828 tree dandnotc
10829 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
10830 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
10831 notc);
10832 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10833 if (integer_nonzerop (dandnotc))
10834 return omit_one_operand_loc (loc, type, rslt, arg0);
10835 }
10836
10837 /* If this is a comparison of a field, we may be able to simplify it. */
10838 if ((TREE_CODE (arg0) == COMPONENT_REF
10839 || TREE_CODE (arg0) == BIT_FIELD_REF)
10840 /* Handle the constant case even without -O
10841 to make sure the warnings are given. */
10842 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
10843 {
10844 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
10845 if (t1)
10846 return t1;
10847 }
10848
10849 /* Optimize comparisons of strlen vs zero to a compare of the
10850 first character of the string vs zero. To wit,
10851 strlen(ptr) == 0 => *ptr == 0
10852 strlen(ptr) != 0 => *ptr != 0
10853 Other cases should reduce to one of these two (or a constant)
10854 due to the return value of strlen being unsigned. */
10855 if (TREE_CODE (arg0) == CALL_EXPR
10856 && integer_zerop (arg1))
10857 {
10858 tree fndecl = get_callee_fndecl (arg0);
10859
10860 if (fndecl
10861 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
10862 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
10863 && call_expr_nargs (arg0) == 1
10864 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
10865 {
10866 tree iref = build_fold_indirect_ref_loc (loc,
10867 CALL_EXPR_ARG (arg0, 0));
10868 return fold_build2_loc (loc, code, type, iref,
10869 build_int_cst (TREE_TYPE (iref), 0));
10870 }
10871 }
10872
10873 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10874 of X. Similarly fold (X >> C) == 0 into X >= 0. */
10875 if (TREE_CODE (arg0) == RSHIFT_EXPR
10876 && integer_zerop (arg1)
10877 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10878 {
10879 tree arg00 = TREE_OPERAND (arg0, 0);
10880 tree arg01 = TREE_OPERAND (arg0, 1);
10881 tree itype = TREE_TYPE (arg00);
10882 if (wi::eq_p (arg01, element_precision (itype) - 1))
10883 {
10884 if (TYPE_UNSIGNED (itype))
10885 {
10886 itype = signed_type_for (itype);
10887 arg00 = fold_convert_loc (loc, itype, arg00);
10888 }
10889 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
10890 type, arg00, build_zero_cst (itype));
10891 }
10892 }
10893
10894 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
10895 (X & C) == 0 when C is a single bit. */
10896 if (TREE_CODE (arg0) == BIT_AND_EXPR
10897 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
10898 && integer_zerop (arg1)
10899 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10900 {
10901 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
10902 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
10903 TREE_OPERAND (arg0, 1));
10904 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
10905 type, tem,
10906 fold_convert_loc (loc, TREE_TYPE (arg0),
10907 arg1));
10908 }
10909
10910 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
10911 constant C is a power of two, i.e. a single bit. */
10912 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10913 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10914 && integer_zerop (arg1)
10915 && integer_pow2p (TREE_OPERAND (arg0, 1))
10916 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10917 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10918 {
10919 tree arg00 = TREE_OPERAND (arg0, 0);
10920 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10921 arg00, build_int_cst (TREE_TYPE (arg00), 0));
10922 }
10923
10924 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
10925 when is C is a power of two, i.e. a single bit. */
10926 if (TREE_CODE (arg0) == BIT_AND_EXPR
10927 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
10928 && integer_zerop (arg1)
10929 && integer_pow2p (TREE_OPERAND (arg0, 1))
10930 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10931 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10932 {
10933 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10934 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
10935 arg000, TREE_OPERAND (arg0, 1));
10936 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10937 tem, build_int_cst (TREE_TYPE (tem), 0));
10938 }
10939
10940 if (integer_zerop (arg1)
10941 && tree_expr_nonzero_p (arg0))
10942 {
10943 tree res = constant_boolean_node (code==NE_EXPR, type);
10944 return omit_one_operand_loc (loc, type, res, arg0);
10945 }
10946
10947 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
10948 if (TREE_CODE (arg0) == BIT_AND_EXPR
10949 && TREE_CODE (arg1) == BIT_AND_EXPR)
10950 {
10951 tree arg00 = TREE_OPERAND (arg0, 0);
10952 tree arg01 = TREE_OPERAND (arg0, 1);
10953 tree arg10 = TREE_OPERAND (arg1, 0);
10954 tree arg11 = TREE_OPERAND (arg1, 1);
10955 tree itype = TREE_TYPE (arg0);
10956
10957 if (operand_equal_p (arg01, arg11, 0))
10958 return fold_build2_loc (loc, code, type,
10959 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10960 fold_build2_loc (loc,
10961 BIT_XOR_EXPR, itype,
10962 arg00, arg10),
10963 arg01),
10964 build_zero_cst (itype));
10965
10966 if (operand_equal_p (arg01, arg10, 0))
10967 return fold_build2_loc (loc, code, type,
10968 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10969 fold_build2_loc (loc,
10970 BIT_XOR_EXPR, itype,
10971 arg00, arg11),
10972 arg01),
10973 build_zero_cst (itype));
10974
10975 if (operand_equal_p (arg00, arg11, 0))
10976 return fold_build2_loc (loc, code, type,
10977 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10978 fold_build2_loc (loc,
10979 BIT_XOR_EXPR, itype,
10980 arg01, arg10),
10981 arg00),
10982 build_zero_cst (itype));
10983
10984 if (operand_equal_p (arg00, arg10, 0))
10985 return fold_build2_loc (loc, code, type,
10986 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10987 fold_build2_loc (loc,
10988 BIT_XOR_EXPR, itype,
10989 arg01, arg11),
10990 arg00),
10991 build_zero_cst (itype));
10992 }
10993
10994 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10995 && TREE_CODE (arg1) == BIT_XOR_EXPR)
10996 {
10997 tree arg00 = TREE_OPERAND (arg0, 0);
10998 tree arg01 = TREE_OPERAND (arg0, 1);
10999 tree arg10 = TREE_OPERAND (arg1, 0);
11000 tree arg11 = TREE_OPERAND (arg1, 1);
11001 tree itype = TREE_TYPE (arg0);
11002
11003 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
11004 operand_equal_p guarantees no side-effects so we don't need
11005 to use omit_one_operand on Z. */
11006 if (operand_equal_p (arg01, arg11, 0))
11007 return fold_build2_loc (loc, code, type, arg00,
11008 fold_convert_loc (loc, TREE_TYPE (arg00),
11009 arg10));
11010 if (operand_equal_p (arg01, arg10, 0))
11011 return fold_build2_loc (loc, code, type, arg00,
11012 fold_convert_loc (loc, TREE_TYPE (arg00),
11013 arg11));
11014 if (operand_equal_p (arg00, arg11, 0))
11015 return fold_build2_loc (loc, code, type, arg01,
11016 fold_convert_loc (loc, TREE_TYPE (arg01),
11017 arg10));
11018 if (operand_equal_p (arg00, arg10, 0))
11019 return fold_build2_loc (loc, code, type, arg01,
11020 fold_convert_loc (loc, TREE_TYPE (arg01),
11021 arg11));
11022
11023 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
11024 if (TREE_CODE (arg01) == INTEGER_CST
11025 && TREE_CODE (arg11) == INTEGER_CST)
11026 {
11027 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
11028 fold_convert_loc (loc, itype, arg11));
11029 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
11030 return fold_build2_loc (loc, code, type, tem,
11031 fold_convert_loc (loc, itype, arg10));
11032 }
11033 }
11034
11035 /* Attempt to simplify equality/inequality comparisons of complex
11036 values. Only lower the comparison if the result is known or
11037 can be simplified to a single scalar comparison. */
11038 if ((TREE_CODE (arg0) == COMPLEX_EXPR
11039 || TREE_CODE (arg0) == COMPLEX_CST)
11040 && (TREE_CODE (arg1) == COMPLEX_EXPR
11041 || TREE_CODE (arg1) == COMPLEX_CST))
11042 {
11043 tree real0, imag0, real1, imag1;
11044 tree rcond, icond;
11045
11046 if (TREE_CODE (arg0) == COMPLEX_EXPR)
11047 {
11048 real0 = TREE_OPERAND (arg0, 0);
11049 imag0 = TREE_OPERAND (arg0, 1);
11050 }
11051 else
11052 {
11053 real0 = TREE_REALPART (arg0);
11054 imag0 = TREE_IMAGPART (arg0);
11055 }
11056
11057 if (TREE_CODE (arg1) == COMPLEX_EXPR)
11058 {
11059 real1 = TREE_OPERAND (arg1, 0);
11060 imag1 = TREE_OPERAND (arg1, 1);
11061 }
11062 else
11063 {
11064 real1 = TREE_REALPART (arg1);
11065 imag1 = TREE_IMAGPART (arg1);
11066 }
11067
11068 rcond = fold_binary_loc (loc, code, type, real0, real1);
11069 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
11070 {
11071 if (integer_zerop (rcond))
11072 {
11073 if (code == EQ_EXPR)
11074 return omit_two_operands_loc (loc, type, boolean_false_node,
11075 imag0, imag1);
11076 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
11077 }
11078 else
11079 {
11080 if (code == NE_EXPR)
11081 return omit_two_operands_loc (loc, type, boolean_true_node,
11082 imag0, imag1);
11083 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
11084 }
11085 }
11086
11087 icond = fold_binary_loc (loc, code, type, imag0, imag1);
11088 if (icond && TREE_CODE (icond) == INTEGER_CST)
11089 {
11090 if (integer_zerop (icond))
11091 {
11092 if (code == EQ_EXPR)
11093 return omit_two_operands_loc (loc, type, boolean_false_node,
11094 real0, real1);
11095 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
11096 }
11097 else
11098 {
11099 if (code == NE_EXPR)
11100 return omit_two_operands_loc (loc, type, boolean_true_node,
11101 real0, real1);
11102 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
11103 }
11104 }
11105 }
11106
11107 return NULL_TREE;
11108
11109 case LT_EXPR:
11110 case GT_EXPR:
11111 case LE_EXPR:
11112 case GE_EXPR:
11113 tem = fold_comparison (loc, code, type, op0, op1);
11114 if (tem != NULL_TREE)
11115 return tem;
11116
11117 /* Transform comparisons of the form X +- C CMP X. */
11118 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11119 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11120 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
11121 && !HONOR_SNANS (arg0))
11122 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11123 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
11124 {
11125 tree arg01 = TREE_OPERAND (arg0, 1);
11126 enum tree_code code0 = TREE_CODE (arg0);
11127 int is_positive;
11128
11129 if (TREE_CODE (arg01) == REAL_CST)
11130 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
11131 else
11132 is_positive = tree_int_cst_sgn (arg01);
11133
11134 /* (X - c) > X becomes false. */
11135 if (code == GT_EXPR
11136 && ((code0 == MINUS_EXPR && is_positive >= 0)
11137 || (code0 == PLUS_EXPR && is_positive <= 0)))
11138 {
11139 if (TREE_CODE (arg01) == INTEGER_CST
11140 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11141 fold_overflow_warning (("assuming signed overflow does not "
11142 "occur when assuming that (X - c) > X "
11143 "is always false"),
11144 WARN_STRICT_OVERFLOW_ALL);
11145 return constant_boolean_node (0, type);
11146 }
11147
11148 /* Likewise (X + c) < X becomes false. */
11149 if (code == LT_EXPR
11150 && ((code0 == PLUS_EXPR && is_positive >= 0)
11151 || (code0 == MINUS_EXPR && is_positive <= 0)))
11152 {
11153 if (TREE_CODE (arg01) == INTEGER_CST
11154 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11155 fold_overflow_warning (("assuming signed overflow does not "
11156 "occur when assuming that "
11157 "(X + c) < X is always false"),
11158 WARN_STRICT_OVERFLOW_ALL);
11159 return constant_boolean_node (0, type);
11160 }
11161
11162 /* Convert (X - c) <= X to true. */
11163 if (!HONOR_NANS (arg1)
11164 && code == LE_EXPR
11165 && ((code0 == MINUS_EXPR && is_positive >= 0)
11166 || (code0 == PLUS_EXPR && is_positive <= 0)))
11167 {
11168 if (TREE_CODE (arg01) == INTEGER_CST
11169 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11170 fold_overflow_warning (("assuming signed overflow does not "
11171 "occur when assuming that "
11172 "(X - c) <= X is always true"),
11173 WARN_STRICT_OVERFLOW_ALL);
11174 return constant_boolean_node (1, type);
11175 }
11176
11177 /* Convert (X + c) >= X to true. */
11178 if (!HONOR_NANS (arg1)
11179 && code == GE_EXPR
11180 && ((code0 == PLUS_EXPR && is_positive >= 0)
11181 || (code0 == MINUS_EXPR && is_positive <= 0)))
11182 {
11183 if (TREE_CODE (arg01) == INTEGER_CST
11184 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11185 fold_overflow_warning (("assuming signed overflow does not "
11186 "occur when assuming that "
11187 "(X + c) >= X is always true"),
11188 WARN_STRICT_OVERFLOW_ALL);
11189 return constant_boolean_node (1, type);
11190 }
11191
11192 if (TREE_CODE (arg01) == INTEGER_CST)
11193 {
11194 /* Convert X + c > X and X - c < X to true for integers. */
11195 if (code == GT_EXPR
11196 && ((code0 == PLUS_EXPR && is_positive > 0)
11197 || (code0 == MINUS_EXPR && is_positive < 0)))
11198 {
11199 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11200 fold_overflow_warning (("assuming signed overflow does "
11201 "not occur when assuming that "
11202 "(X + c) > X is always true"),
11203 WARN_STRICT_OVERFLOW_ALL);
11204 return constant_boolean_node (1, type);
11205 }
11206
11207 if (code == LT_EXPR
11208 && ((code0 == MINUS_EXPR && is_positive > 0)
11209 || (code0 == PLUS_EXPR && is_positive < 0)))
11210 {
11211 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11212 fold_overflow_warning (("assuming signed overflow does "
11213 "not occur when assuming that "
11214 "(X - c) < X is always true"),
11215 WARN_STRICT_OVERFLOW_ALL);
11216 return constant_boolean_node (1, type);
11217 }
11218
11219 /* Convert X + c <= X and X - c >= X to false for integers. */
11220 if (code == LE_EXPR
11221 && ((code0 == PLUS_EXPR && is_positive > 0)
11222 || (code0 == MINUS_EXPR && is_positive < 0)))
11223 {
11224 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11225 fold_overflow_warning (("assuming signed overflow does "
11226 "not occur when assuming that "
11227 "(X + c) <= X is always false"),
11228 WARN_STRICT_OVERFLOW_ALL);
11229 return constant_boolean_node (0, type);
11230 }
11231
11232 if (code == GE_EXPR
11233 && ((code0 == MINUS_EXPR && is_positive > 0)
11234 || (code0 == PLUS_EXPR && is_positive < 0)))
11235 {
11236 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11237 fold_overflow_warning (("assuming signed overflow does "
11238 "not occur when assuming that "
11239 "(X - c) >= X is always false"),
11240 WARN_STRICT_OVERFLOW_ALL);
11241 return constant_boolean_node (0, type);
11242 }
11243 }
11244 }
11245
11246 /* If we are comparing an ABS_EXPR with a constant, we can
11247 convert all the cases into explicit comparisons, but they may
11248 well not be faster than doing the ABS and one comparison.
11249 But ABS (X) <= C is a range comparison, which becomes a subtraction
11250 and a comparison, and is probably faster. */
11251 if (code == LE_EXPR
11252 && TREE_CODE (arg1) == INTEGER_CST
11253 && TREE_CODE (arg0) == ABS_EXPR
11254 && ! TREE_SIDE_EFFECTS (arg0)
11255 && (0 != (tem = negate_expr (arg1)))
11256 && TREE_CODE (tem) == INTEGER_CST
11257 && !TREE_OVERFLOW (tem))
11258 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
11259 build2 (GE_EXPR, type,
11260 TREE_OPERAND (arg0, 0), tem),
11261 build2 (LE_EXPR, type,
11262 TREE_OPERAND (arg0, 0), arg1));
11263
11264 /* Convert ABS_EXPR<x> >= 0 to true. */
11265 strict_overflow_p = false;
11266 if (code == GE_EXPR
11267 && (integer_zerop (arg1)
11268 || (! HONOR_NANS (arg0)
11269 && real_zerop (arg1)))
11270 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11271 {
11272 if (strict_overflow_p)
11273 fold_overflow_warning (("assuming signed overflow does not occur "
11274 "when simplifying comparison of "
11275 "absolute value and zero"),
11276 WARN_STRICT_OVERFLOW_CONDITIONAL);
11277 return omit_one_operand_loc (loc, type,
11278 constant_boolean_node (true, type),
11279 arg0);
11280 }
11281
11282 /* Convert ABS_EXPR<x> < 0 to false. */
11283 strict_overflow_p = false;
11284 if (code == LT_EXPR
11285 && (integer_zerop (arg1) || real_zerop (arg1))
11286 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11287 {
11288 if (strict_overflow_p)
11289 fold_overflow_warning (("assuming signed overflow does not occur "
11290 "when simplifying comparison of "
11291 "absolute value and zero"),
11292 WARN_STRICT_OVERFLOW_CONDITIONAL);
11293 return omit_one_operand_loc (loc, type,
11294 constant_boolean_node (false, type),
11295 arg0);
11296 }
11297
11298 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11299 and similarly for >= into !=. */
11300 if ((code == LT_EXPR || code == GE_EXPR)
11301 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11302 && TREE_CODE (arg1) == LSHIFT_EXPR
11303 && integer_onep (TREE_OPERAND (arg1, 0)))
11304 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11305 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11306 TREE_OPERAND (arg1, 1)),
11307 build_zero_cst (TREE_TYPE (arg0)));
11308
11309 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
11310 otherwise Y might be >= # of bits in X's type and thus e.g.
11311 (unsigned char) (1 << Y) for Y 15 might be 0.
11312 If the cast is widening, then 1 << Y should have unsigned type,
11313 otherwise if Y is number of bits in the signed shift type minus 1,
11314 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
11315 31 might be 0xffffffff80000000. */
11316 if ((code == LT_EXPR || code == GE_EXPR)
11317 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11318 && CONVERT_EXPR_P (arg1)
11319 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
11320 && (element_precision (TREE_TYPE (arg1))
11321 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
11322 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
11323 || (element_precision (TREE_TYPE (arg1))
11324 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
11325 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11326 {
11327 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11328 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
11329 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11330 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
11331 build_zero_cst (TREE_TYPE (arg0)));
11332 }
11333
11334 return NULL_TREE;
11335
11336 case UNORDERED_EXPR:
11337 case ORDERED_EXPR:
11338 case UNLT_EXPR:
11339 case UNLE_EXPR:
11340 case UNGT_EXPR:
11341 case UNGE_EXPR:
11342 case UNEQ_EXPR:
11343 case LTGT_EXPR:
11344 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
11345 {
11346 tree targ0 = strip_float_extensions (arg0);
11347 tree targ1 = strip_float_extensions (arg1);
11348 tree newtype = TREE_TYPE (targ0);
11349
11350 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
11351 newtype = TREE_TYPE (targ1);
11352
11353 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
11354 return fold_build2_loc (loc, code, type,
11355 fold_convert_loc (loc, newtype, targ0),
11356 fold_convert_loc (loc, newtype, targ1));
11357 }
11358
11359 return NULL_TREE;
11360
11361 case COMPOUND_EXPR:
11362 /* When pedantic, a compound expression can be neither an lvalue
11363 nor an integer constant expression. */
11364 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11365 return NULL_TREE;
11366 /* Don't let (0, 0) be null pointer constant. */
11367 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11368 : fold_convert_loc (loc, type, arg1);
11369 return pedantic_non_lvalue_loc (loc, tem);
11370
11371 case ASSERT_EXPR:
11372 /* An ASSERT_EXPR should never be passed to fold_binary. */
11373 gcc_unreachable ();
11374
11375 default:
11376 return NULL_TREE;
11377 } /* switch (code) */
11378 }
11379
11380 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
11381 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
11382 of GOTO_EXPR. */
11383
11384 static tree
11385 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
11386 {
11387 switch (TREE_CODE (*tp))
11388 {
11389 case LABEL_EXPR:
11390 return *tp;
11391
11392 case GOTO_EXPR:
11393 *walk_subtrees = 0;
11394
11395 /* ... fall through ... */
11396
11397 default:
11398 return NULL_TREE;
11399 }
11400 }
11401
11402 /* Return whether the sub-tree ST contains a label which is accessible from
11403 outside the sub-tree. */
11404
11405 static bool
11406 contains_label_p (tree st)
11407 {
11408 return
11409 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
11410 }
11411
11412 /* Fold a ternary expression of code CODE and type TYPE with operands
11413 OP0, OP1, and OP2. Return the folded expression if folding is
11414 successful. Otherwise, return NULL_TREE. */
11415
11416 tree
11417 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
11418 tree op0, tree op1, tree op2)
11419 {
11420 tree tem;
11421 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
11422 enum tree_code_class kind = TREE_CODE_CLASS (code);
11423
11424 gcc_assert (IS_EXPR_CODE_CLASS (kind)
11425 && TREE_CODE_LENGTH (code) == 3);
11426
11427 /* If this is a commutative operation, and OP0 is a constant, move it
11428 to OP1 to reduce the number of tests below. */
11429 if (commutative_ternary_tree_code (code)
11430 && tree_swap_operands_p (op0, op1, true))
11431 return fold_build3_loc (loc, code, type, op1, op0, op2);
11432
11433 tem = generic_simplify (loc, code, type, op0, op1, op2);
11434 if (tem)
11435 return tem;
11436
11437 /* Strip any conversions that don't change the mode. This is safe
11438 for every expression, except for a comparison expression because
11439 its signedness is derived from its operands. So, in the latter
11440 case, only strip conversions that don't change the signedness.
11441
11442 Note that this is done as an internal manipulation within the
11443 constant folder, in order to find the simplest representation of
11444 the arguments so that their form can be studied. In any cases,
11445 the appropriate type conversions should be put back in the tree
11446 that will get out of the constant folder. */
11447 if (op0)
11448 {
11449 arg0 = op0;
11450 STRIP_NOPS (arg0);
11451 }
11452
11453 if (op1)
11454 {
11455 arg1 = op1;
11456 STRIP_NOPS (arg1);
11457 }
11458
11459 if (op2)
11460 {
11461 arg2 = op2;
11462 STRIP_NOPS (arg2);
11463 }
11464
11465 switch (code)
11466 {
11467 case COMPONENT_REF:
11468 if (TREE_CODE (arg0) == CONSTRUCTOR
11469 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11470 {
11471 unsigned HOST_WIDE_INT idx;
11472 tree field, value;
11473 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11474 if (field == arg1)
11475 return value;
11476 }
11477 return NULL_TREE;
11478
11479 case COND_EXPR:
11480 case VEC_COND_EXPR:
11481 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11482 so all simple results must be passed through pedantic_non_lvalue. */
11483 if (TREE_CODE (arg0) == INTEGER_CST)
11484 {
11485 tree unused_op = integer_zerop (arg0) ? op1 : op2;
11486 tem = integer_zerop (arg0) ? op2 : op1;
11487 /* Only optimize constant conditions when the selected branch
11488 has the same type as the COND_EXPR. This avoids optimizing
11489 away "c ? x : throw", where the throw has a void type.
11490 Avoid throwing away that operand which contains label. */
11491 if ((!TREE_SIDE_EFFECTS (unused_op)
11492 || !contains_label_p (unused_op))
11493 && (! VOID_TYPE_P (TREE_TYPE (tem))
11494 || VOID_TYPE_P (type)))
11495 return pedantic_non_lvalue_loc (loc, tem);
11496 return NULL_TREE;
11497 }
11498 else if (TREE_CODE (arg0) == VECTOR_CST)
11499 {
11500 if ((TREE_CODE (arg1) == VECTOR_CST
11501 || TREE_CODE (arg1) == CONSTRUCTOR)
11502 && (TREE_CODE (arg2) == VECTOR_CST
11503 || TREE_CODE (arg2) == CONSTRUCTOR))
11504 {
11505 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
11506 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
11507 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
11508 for (i = 0; i < nelts; i++)
11509 {
11510 tree val = VECTOR_CST_ELT (arg0, i);
11511 if (integer_all_onesp (val))
11512 sel[i] = i;
11513 else if (integer_zerop (val))
11514 sel[i] = nelts + i;
11515 else /* Currently unreachable. */
11516 return NULL_TREE;
11517 }
11518 tree t = fold_vec_perm (type, arg1, arg2, sel);
11519 if (t != NULL_TREE)
11520 return t;
11521 }
11522 }
11523
11524 /* If we have A op B ? A : C, we may be able to convert this to a
11525 simpler expression, depending on the operation and the values
11526 of B and C. Signed zeros prevent all of these transformations,
11527 for reasons given above each one.
11528
11529 Also try swapping the arguments and inverting the conditional. */
11530 if (COMPARISON_CLASS_P (arg0)
11531 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11532 arg1, TREE_OPERAND (arg0, 1))
11533 && !HONOR_SIGNED_ZEROS (element_mode (arg1)))
11534 {
11535 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
11536 if (tem)
11537 return tem;
11538 }
11539
11540 if (COMPARISON_CLASS_P (arg0)
11541 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11542 op2,
11543 TREE_OPERAND (arg0, 1))
11544 && !HONOR_SIGNED_ZEROS (element_mode (op2)))
11545 {
11546 location_t loc0 = expr_location_or (arg0, loc);
11547 tem = fold_invert_truthvalue (loc0, arg0);
11548 if (tem && COMPARISON_CLASS_P (tem))
11549 {
11550 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
11551 if (tem)
11552 return tem;
11553 }
11554 }
11555
11556 /* If the second operand is simpler than the third, swap them
11557 since that produces better jump optimization results. */
11558 if (truth_value_p (TREE_CODE (arg0))
11559 && tree_swap_operands_p (op1, op2, false))
11560 {
11561 location_t loc0 = expr_location_or (arg0, loc);
11562 /* See if this can be inverted. If it can't, possibly because
11563 it was a floating-point inequality comparison, don't do
11564 anything. */
11565 tem = fold_invert_truthvalue (loc0, arg0);
11566 if (tem)
11567 return fold_build3_loc (loc, code, type, tem, op2, op1);
11568 }
11569
11570 /* Convert A ? 1 : 0 to simply A. */
11571 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
11572 : (integer_onep (op1)
11573 && !VECTOR_TYPE_P (type)))
11574 && integer_zerop (op2)
11575 /* If we try to convert OP0 to our type, the
11576 call to fold will try to move the conversion inside
11577 a COND, which will recurse. In that case, the COND_EXPR
11578 is probably the best choice, so leave it alone. */
11579 && type == TREE_TYPE (arg0))
11580 return pedantic_non_lvalue_loc (loc, arg0);
11581
11582 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11583 over COND_EXPR in cases such as floating point comparisons. */
11584 if (integer_zerop (op1)
11585 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
11586 : (integer_onep (op2)
11587 && !VECTOR_TYPE_P (type)))
11588 && truth_value_p (TREE_CODE (arg0)))
11589 return pedantic_non_lvalue_loc (loc,
11590 fold_convert_loc (loc, type,
11591 invert_truthvalue_loc (loc,
11592 arg0)));
11593
11594 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11595 if (TREE_CODE (arg0) == LT_EXPR
11596 && integer_zerop (TREE_OPERAND (arg0, 1))
11597 && integer_zerop (op2)
11598 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11599 {
11600 /* sign_bit_p looks through both zero and sign extensions,
11601 but for this optimization only sign extensions are
11602 usable. */
11603 tree tem2 = TREE_OPERAND (arg0, 0);
11604 while (tem != tem2)
11605 {
11606 if (TREE_CODE (tem2) != NOP_EXPR
11607 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
11608 {
11609 tem = NULL_TREE;
11610 break;
11611 }
11612 tem2 = TREE_OPERAND (tem2, 0);
11613 }
11614 /* sign_bit_p only checks ARG1 bits within A's precision.
11615 If <sign bit of A> has wider type than A, bits outside
11616 of A's precision in <sign bit of A> need to be checked.
11617 If they are all 0, this optimization needs to be done
11618 in unsigned A's type, if they are all 1 in signed A's type,
11619 otherwise this can't be done. */
11620 if (tem
11621 && TYPE_PRECISION (TREE_TYPE (tem))
11622 < TYPE_PRECISION (TREE_TYPE (arg1))
11623 && TYPE_PRECISION (TREE_TYPE (tem))
11624 < TYPE_PRECISION (type))
11625 {
11626 int inner_width, outer_width;
11627 tree tem_type;
11628
11629 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
11630 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
11631 if (outer_width > TYPE_PRECISION (type))
11632 outer_width = TYPE_PRECISION (type);
11633
11634 wide_int mask = wi::shifted_mask
11635 (inner_width, outer_width - inner_width, false,
11636 TYPE_PRECISION (TREE_TYPE (arg1)));
11637
11638 wide_int common = mask & arg1;
11639 if (common == mask)
11640 {
11641 tem_type = signed_type_for (TREE_TYPE (tem));
11642 tem = fold_convert_loc (loc, tem_type, tem);
11643 }
11644 else if (common == 0)
11645 {
11646 tem_type = unsigned_type_for (TREE_TYPE (tem));
11647 tem = fold_convert_loc (loc, tem_type, tem);
11648 }
11649 else
11650 tem = NULL;
11651 }
11652
11653 if (tem)
11654 return
11655 fold_convert_loc (loc, type,
11656 fold_build2_loc (loc, BIT_AND_EXPR,
11657 TREE_TYPE (tem), tem,
11658 fold_convert_loc (loc,
11659 TREE_TYPE (tem),
11660 arg1)));
11661 }
11662
11663 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11664 already handled above. */
11665 if (TREE_CODE (arg0) == BIT_AND_EXPR
11666 && integer_onep (TREE_OPERAND (arg0, 1))
11667 && integer_zerop (op2)
11668 && integer_pow2p (arg1))
11669 {
11670 tree tem = TREE_OPERAND (arg0, 0);
11671 STRIP_NOPS (tem);
11672 if (TREE_CODE (tem) == RSHIFT_EXPR
11673 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
11674 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
11675 tree_to_uhwi (TREE_OPERAND (tem, 1)))
11676 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11677 TREE_OPERAND (tem, 0), arg1);
11678 }
11679
11680 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11681 is probably obsolete because the first operand should be a
11682 truth value (that's why we have the two cases above), but let's
11683 leave it in until we can confirm this for all front-ends. */
11684 if (integer_zerop (op2)
11685 && TREE_CODE (arg0) == NE_EXPR
11686 && integer_zerop (TREE_OPERAND (arg0, 1))
11687 && integer_pow2p (arg1)
11688 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11689 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11690 arg1, OEP_ONLY_CONST))
11691 return pedantic_non_lvalue_loc (loc,
11692 fold_convert_loc (loc, type,
11693 TREE_OPERAND (arg0, 0)));
11694
11695 /* Disable the transformations below for vectors, since
11696 fold_binary_op_with_conditional_arg may undo them immediately,
11697 yielding an infinite loop. */
11698 if (code == VEC_COND_EXPR)
11699 return NULL_TREE;
11700
11701 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11702 if (integer_zerop (op2)
11703 && truth_value_p (TREE_CODE (arg0))
11704 && truth_value_p (TREE_CODE (arg1))
11705 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11706 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
11707 : TRUTH_ANDIF_EXPR,
11708 type, fold_convert_loc (loc, type, arg0), arg1);
11709
11710 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11711 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
11712 && truth_value_p (TREE_CODE (arg0))
11713 && truth_value_p (TREE_CODE (arg1))
11714 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11715 {
11716 location_t loc0 = expr_location_or (arg0, loc);
11717 /* Only perform transformation if ARG0 is easily inverted. */
11718 tem = fold_invert_truthvalue (loc0, arg0);
11719 if (tem)
11720 return fold_build2_loc (loc, code == VEC_COND_EXPR
11721 ? BIT_IOR_EXPR
11722 : TRUTH_ORIF_EXPR,
11723 type, fold_convert_loc (loc, type, tem),
11724 arg1);
11725 }
11726
11727 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11728 if (integer_zerop (arg1)
11729 && truth_value_p (TREE_CODE (arg0))
11730 && truth_value_p (TREE_CODE (op2))
11731 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11732 {
11733 location_t loc0 = expr_location_or (arg0, loc);
11734 /* Only perform transformation if ARG0 is easily inverted. */
11735 tem = fold_invert_truthvalue (loc0, arg0);
11736 if (tem)
11737 return fold_build2_loc (loc, code == VEC_COND_EXPR
11738 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
11739 type, fold_convert_loc (loc, type, tem),
11740 op2);
11741 }
11742
11743 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11744 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
11745 && truth_value_p (TREE_CODE (arg0))
11746 && truth_value_p (TREE_CODE (op2))
11747 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11748 return fold_build2_loc (loc, code == VEC_COND_EXPR
11749 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
11750 type, fold_convert_loc (loc, type, arg0), op2);
11751
11752 return NULL_TREE;
11753
11754 case CALL_EXPR:
11755 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
11756 of fold_ternary on them. */
11757 gcc_unreachable ();
11758
11759 case BIT_FIELD_REF:
11760 if ((TREE_CODE (arg0) == VECTOR_CST
11761 || (TREE_CODE (arg0) == CONSTRUCTOR
11762 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
11763 && (type == TREE_TYPE (TREE_TYPE (arg0))
11764 || (TREE_CODE (type) == VECTOR_TYPE
11765 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
11766 {
11767 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
11768 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
11769 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
11770 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
11771
11772 if (n != 0
11773 && (idx % width) == 0
11774 && (n % width) == 0
11775 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
11776 {
11777 idx = idx / width;
11778 n = n / width;
11779
11780 if (TREE_CODE (arg0) == VECTOR_CST)
11781 {
11782 if (n == 1)
11783 return VECTOR_CST_ELT (arg0, idx);
11784
11785 tree *vals = XALLOCAVEC (tree, n);
11786 for (unsigned i = 0; i < n; ++i)
11787 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
11788 return build_vector (type, vals);
11789 }
11790
11791 /* Constructor elements can be subvectors. */
11792 unsigned HOST_WIDE_INT k = 1;
11793 if (CONSTRUCTOR_NELTS (arg0) != 0)
11794 {
11795 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
11796 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
11797 k = TYPE_VECTOR_SUBPARTS (cons_elem);
11798 }
11799
11800 /* We keep an exact subset of the constructor elements. */
11801 if ((idx % k) == 0 && (n % k) == 0)
11802 {
11803 if (CONSTRUCTOR_NELTS (arg0) == 0)
11804 return build_constructor (type, NULL);
11805 idx /= k;
11806 n /= k;
11807 if (n == 1)
11808 {
11809 if (idx < CONSTRUCTOR_NELTS (arg0))
11810 return CONSTRUCTOR_ELT (arg0, idx)->value;
11811 return build_zero_cst (type);
11812 }
11813
11814 vec<constructor_elt, va_gc> *vals;
11815 vec_alloc (vals, n);
11816 for (unsigned i = 0;
11817 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
11818 ++i)
11819 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
11820 CONSTRUCTOR_ELT
11821 (arg0, idx + i)->value);
11822 return build_constructor (type, vals);
11823 }
11824 /* The bitfield references a single constructor element. */
11825 else if (idx + n <= (idx / k + 1) * k)
11826 {
11827 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
11828 return build_zero_cst (type);
11829 else if (n == k)
11830 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
11831 else
11832 return fold_build3_loc (loc, code, type,
11833 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
11834 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
11835 }
11836 }
11837 }
11838
11839 /* A bit-field-ref that referenced the full argument can be stripped. */
11840 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11841 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1)
11842 && integer_zerop (op2))
11843 return fold_convert_loc (loc, type, arg0);
11844
11845 /* On constants we can use native encode/interpret to constant
11846 fold (nearly) all BIT_FIELD_REFs. */
11847 if (CONSTANT_CLASS_P (arg0)
11848 && can_native_interpret_type_p (type)
11849 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0)))
11850 /* This limitation should not be necessary, we just need to
11851 round this up to mode size. */
11852 && tree_to_uhwi (op1) % BITS_PER_UNIT == 0
11853 /* Need bit-shifting of the buffer to relax the following. */
11854 && tree_to_uhwi (op2) % BITS_PER_UNIT == 0)
11855 {
11856 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11857 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
11858 unsigned HOST_WIDE_INT clen;
11859 clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0)));
11860 /* ??? We cannot tell native_encode_expr to start at
11861 some random byte only. So limit us to a reasonable amount
11862 of work. */
11863 if (clen <= 4096)
11864 {
11865 unsigned char *b = XALLOCAVEC (unsigned char, clen);
11866 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
11867 if (len > 0
11868 && len * BITS_PER_UNIT >= bitpos + bitsize)
11869 {
11870 tree v = native_interpret_expr (type,
11871 b + bitpos / BITS_PER_UNIT,
11872 bitsize / BITS_PER_UNIT);
11873 if (v)
11874 return v;
11875 }
11876 }
11877 }
11878
11879 return NULL_TREE;
11880
11881 case FMA_EXPR:
11882 /* For integers we can decompose the FMA if possible. */
11883 if (TREE_CODE (arg0) == INTEGER_CST
11884 && TREE_CODE (arg1) == INTEGER_CST)
11885 return fold_build2_loc (loc, PLUS_EXPR, type,
11886 const_binop (MULT_EXPR, arg0, arg1), arg2);
11887 if (integer_zerop (arg2))
11888 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11889
11890 return fold_fma (loc, type, arg0, arg1, arg2);
11891
11892 case VEC_PERM_EXPR:
11893 if (TREE_CODE (arg2) == VECTOR_CST)
11894 {
11895 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask, mask2;
11896 unsigned char *sel = XALLOCAVEC (unsigned char, 2 * nelts);
11897 unsigned char *sel2 = sel + nelts;
11898 bool need_mask_canon = false;
11899 bool need_mask_canon2 = false;
11900 bool all_in_vec0 = true;
11901 bool all_in_vec1 = true;
11902 bool maybe_identity = true;
11903 bool single_arg = (op0 == op1);
11904 bool changed = false;
11905
11906 mask2 = 2 * nelts - 1;
11907 mask = single_arg ? (nelts - 1) : mask2;
11908 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
11909 for (i = 0; i < nelts; i++)
11910 {
11911 tree val = VECTOR_CST_ELT (arg2, i);
11912 if (TREE_CODE (val) != INTEGER_CST)
11913 return NULL_TREE;
11914
11915 /* Make sure that the perm value is in an acceptable
11916 range. */
11917 wide_int t = val;
11918 need_mask_canon |= wi::gtu_p (t, mask);
11919 need_mask_canon2 |= wi::gtu_p (t, mask2);
11920 sel[i] = t.to_uhwi () & mask;
11921 sel2[i] = t.to_uhwi () & mask2;
11922
11923 if (sel[i] < nelts)
11924 all_in_vec1 = false;
11925 else
11926 all_in_vec0 = false;
11927
11928 if ((sel[i] & (nelts-1)) != i)
11929 maybe_identity = false;
11930 }
11931
11932 if (maybe_identity)
11933 {
11934 if (all_in_vec0)
11935 return op0;
11936 if (all_in_vec1)
11937 return op1;
11938 }
11939
11940 if (all_in_vec0)
11941 op1 = op0;
11942 else if (all_in_vec1)
11943 {
11944 op0 = op1;
11945 for (i = 0; i < nelts; i++)
11946 sel[i] -= nelts;
11947 need_mask_canon = true;
11948 }
11949
11950 if ((TREE_CODE (op0) == VECTOR_CST
11951 || TREE_CODE (op0) == CONSTRUCTOR)
11952 && (TREE_CODE (op1) == VECTOR_CST
11953 || TREE_CODE (op1) == CONSTRUCTOR))
11954 {
11955 tree t = fold_vec_perm (type, op0, op1, sel);
11956 if (t != NULL_TREE)
11957 return t;
11958 }
11959
11960 if (op0 == op1 && !single_arg)
11961 changed = true;
11962
11963 /* Some targets are deficient and fail to expand a single
11964 argument permutation while still allowing an equivalent
11965 2-argument version. */
11966 if (need_mask_canon && arg2 == op2
11967 && !can_vec_perm_p (TYPE_MODE (type), false, sel)
11968 && can_vec_perm_p (TYPE_MODE (type), false, sel2))
11969 {
11970 need_mask_canon = need_mask_canon2;
11971 sel = sel2;
11972 }
11973
11974 if (need_mask_canon && arg2 == op2)
11975 {
11976 tree *tsel = XALLOCAVEC (tree, nelts);
11977 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
11978 for (i = 0; i < nelts; i++)
11979 tsel[i] = build_int_cst (eltype, sel[i]);
11980 op2 = build_vector (TREE_TYPE (arg2), tsel);
11981 changed = true;
11982 }
11983
11984 if (changed)
11985 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
11986 }
11987 return NULL_TREE;
11988
11989 default:
11990 return NULL_TREE;
11991 } /* switch (code) */
11992 }
11993
11994 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
11995 of an array (or vector). */
11996
11997 tree
11998 get_array_ctor_element_at_index (tree ctor, offset_int access_index)
11999 {
12000 tree index_type = NULL_TREE;
12001 offset_int low_bound = 0;
12002
12003 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
12004 {
12005 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
12006 if (domain_type && TYPE_MIN_VALUE (domain_type))
12007 {
12008 /* Static constructors for variably sized objects makes no sense. */
12009 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
12010 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
12011 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
12012 }
12013 }
12014
12015 if (index_type)
12016 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
12017 TYPE_SIGN (index_type));
12018
12019 offset_int index = low_bound - 1;
12020 if (index_type)
12021 index = wi::ext (index, TYPE_PRECISION (index_type),
12022 TYPE_SIGN (index_type));
12023
12024 offset_int max_index;
12025 unsigned HOST_WIDE_INT cnt;
12026 tree cfield, cval;
12027
12028 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
12029 {
12030 /* Array constructor might explicitly set index, or specify a range,
12031 or leave index NULL meaning that it is next index after previous
12032 one. */
12033 if (cfield)
12034 {
12035 if (TREE_CODE (cfield) == INTEGER_CST)
12036 max_index = index = wi::to_offset (cfield);
12037 else
12038 {
12039 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
12040 index = wi::to_offset (TREE_OPERAND (cfield, 0));
12041 max_index = wi::to_offset (TREE_OPERAND (cfield, 1));
12042 }
12043 }
12044 else
12045 {
12046 index += 1;
12047 if (index_type)
12048 index = wi::ext (index, TYPE_PRECISION (index_type),
12049 TYPE_SIGN (index_type));
12050 max_index = index;
12051 }
12052
12053 /* Do we have match? */
12054 if (wi::cmpu (access_index, index) >= 0
12055 && wi::cmpu (access_index, max_index) <= 0)
12056 return cval;
12057 }
12058 return NULL_TREE;
12059 }
12060
12061 /* Perform constant folding and related simplification of EXPR.
12062 The related simplifications include x*1 => x, x*0 => 0, etc.,
12063 and application of the associative law.
12064 NOP_EXPR conversions may be removed freely (as long as we
12065 are careful not to change the type of the overall expression).
12066 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
12067 but we can constant-fold them if they have constant operands. */
12068
12069 #ifdef ENABLE_FOLD_CHECKING
12070 # define fold(x) fold_1 (x)
12071 static tree fold_1 (tree);
12072 static
12073 #endif
12074 tree
12075 fold (tree expr)
12076 {
12077 const tree t = expr;
12078 enum tree_code code = TREE_CODE (t);
12079 enum tree_code_class kind = TREE_CODE_CLASS (code);
12080 tree tem;
12081 location_t loc = EXPR_LOCATION (expr);
12082
12083 /* Return right away if a constant. */
12084 if (kind == tcc_constant)
12085 return t;
12086
12087 /* CALL_EXPR-like objects with variable numbers of operands are
12088 treated specially. */
12089 if (kind == tcc_vl_exp)
12090 {
12091 if (code == CALL_EXPR)
12092 {
12093 tem = fold_call_expr (loc, expr, false);
12094 return tem ? tem : expr;
12095 }
12096 return expr;
12097 }
12098
12099 if (IS_EXPR_CODE_CLASS (kind))
12100 {
12101 tree type = TREE_TYPE (t);
12102 tree op0, op1, op2;
12103
12104 switch (TREE_CODE_LENGTH (code))
12105 {
12106 case 1:
12107 op0 = TREE_OPERAND (t, 0);
12108 tem = fold_unary_loc (loc, code, type, op0);
12109 return tem ? tem : expr;
12110 case 2:
12111 op0 = TREE_OPERAND (t, 0);
12112 op1 = TREE_OPERAND (t, 1);
12113 tem = fold_binary_loc (loc, code, type, op0, op1);
12114 return tem ? tem : expr;
12115 case 3:
12116 op0 = TREE_OPERAND (t, 0);
12117 op1 = TREE_OPERAND (t, 1);
12118 op2 = TREE_OPERAND (t, 2);
12119 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12120 return tem ? tem : expr;
12121 default:
12122 break;
12123 }
12124 }
12125
12126 switch (code)
12127 {
12128 case ARRAY_REF:
12129 {
12130 tree op0 = TREE_OPERAND (t, 0);
12131 tree op1 = TREE_OPERAND (t, 1);
12132
12133 if (TREE_CODE (op1) == INTEGER_CST
12134 && TREE_CODE (op0) == CONSTRUCTOR
12135 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
12136 {
12137 tree val = get_array_ctor_element_at_index (op0,
12138 wi::to_offset (op1));
12139 if (val)
12140 return val;
12141 }
12142
12143 return t;
12144 }
12145
12146 /* Return a VECTOR_CST if possible. */
12147 case CONSTRUCTOR:
12148 {
12149 tree type = TREE_TYPE (t);
12150 if (TREE_CODE (type) != VECTOR_TYPE)
12151 return t;
12152
12153 unsigned i;
12154 tree val;
12155 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
12156 if (! CONSTANT_CLASS_P (val))
12157 return t;
12158
12159 return build_vector_from_ctor (type, CONSTRUCTOR_ELTS (t));
12160 }
12161
12162 case CONST_DECL:
12163 return fold (DECL_INITIAL (t));
12164
12165 default:
12166 return t;
12167 } /* switch (code) */
12168 }
12169
12170 #ifdef ENABLE_FOLD_CHECKING
12171 #undef fold
12172
12173 static void fold_checksum_tree (const_tree, struct md5_ctx *,
12174 hash_table<nofree_ptr_hash<const tree_node> > *);
12175 static void fold_check_failed (const_tree, const_tree);
12176 void print_fold_checksum (const_tree);
12177
12178 /* When --enable-checking=fold, compute a digest of expr before
12179 and after actual fold call to see if fold did not accidentally
12180 change original expr. */
12181
12182 tree
12183 fold (tree expr)
12184 {
12185 tree ret;
12186 struct md5_ctx ctx;
12187 unsigned char checksum_before[16], checksum_after[16];
12188 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12189
12190 md5_init_ctx (&ctx);
12191 fold_checksum_tree (expr, &ctx, &ht);
12192 md5_finish_ctx (&ctx, checksum_before);
12193 ht.empty ();
12194
12195 ret = fold_1 (expr);
12196
12197 md5_init_ctx (&ctx);
12198 fold_checksum_tree (expr, &ctx, &ht);
12199 md5_finish_ctx (&ctx, checksum_after);
12200
12201 if (memcmp (checksum_before, checksum_after, 16))
12202 fold_check_failed (expr, ret);
12203
12204 return ret;
12205 }
12206
12207 void
12208 print_fold_checksum (const_tree expr)
12209 {
12210 struct md5_ctx ctx;
12211 unsigned char checksum[16], cnt;
12212 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12213
12214 md5_init_ctx (&ctx);
12215 fold_checksum_tree (expr, &ctx, &ht);
12216 md5_finish_ctx (&ctx, checksum);
12217 for (cnt = 0; cnt < 16; ++cnt)
12218 fprintf (stderr, "%02x", checksum[cnt]);
12219 putc ('\n', stderr);
12220 }
12221
12222 static void
12223 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
12224 {
12225 internal_error ("fold check: original tree changed by fold");
12226 }
12227
12228 static void
12229 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
12230 hash_table<nofree_ptr_hash <const tree_node> > *ht)
12231 {
12232 const tree_node **slot;
12233 enum tree_code code;
12234 union tree_node buf;
12235 int i, len;
12236
12237 recursive_label:
12238 if (expr == NULL)
12239 return;
12240 slot = ht->find_slot (expr, INSERT);
12241 if (*slot != NULL)
12242 return;
12243 *slot = expr;
12244 code = TREE_CODE (expr);
12245 if (TREE_CODE_CLASS (code) == tcc_declaration
12246 && HAS_DECL_ASSEMBLER_NAME_P (expr))
12247 {
12248 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
12249 memcpy ((char *) &buf, expr, tree_size (expr));
12250 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
12251 buf.decl_with_vis.symtab_node = NULL;
12252 expr = (tree) &buf;
12253 }
12254 else if (TREE_CODE_CLASS (code) == tcc_type
12255 && (TYPE_POINTER_TO (expr)
12256 || TYPE_REFERENCE_TO (expr)
12257 || TYPE_CACHED_VALUES_P (expr)
12258 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
12259 || TYPE_NEXT_VARIANT (expr)))
12260 {
12261 /* Allow these fields to be modified. */
12262 tree tmp;
12263 memcpy ((char *) &buf, expr, tree_size (expr));
12264 expr = tmp = (tree) &buf;
12265 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
12266 TYPE_POINTER_TO (tmp) = NULL;
12267 TYPE_REFERENCE_TO (tmp) = NULL;
12268 TYPE_NEXT_VARIANT (tmp) = NULL;
12269 if (TYPE_CACHED_VALUES_P (tmp))
12270 {
12271 TYPE_CACHED_VALUES_P (tmp) = 0;
12272 TYPE_CACHED_VALUES (tmp) = NULL;
12273 }
12274 }
12275 md5_process_bytes (expr, tree_size (expr), ctx);
12276 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
12277 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
12278 if (TREE_CODE_CLASS (code) != tcc_type
12279 && TREE_CODE_CLASS (code) != tcc_declaration
12280 && code != TREE_LIST
12281 && code != SSA_NAME
12282 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
12283 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
12284 switch (TREE_CODE_CLASS (code))
12285 {
12286 case tcc_constant:
12287 switch (code)
12288 {
12289 case STRING_CST:
12290 md5_process_bytes (TREE_STRING_POINTER (expr),
12291 TREE_STRING_LENGTH (expr), ctx);
12292 break;
12293 case COMPLEX_CST:
12294 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
12295 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
12296 break;
12297 case VECTOR_CST:
12298 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
12299 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
12300 break;
12301 default:
12302 break;
12303 }
12304 break;
12305 case tcc_exceptional:
12306 switch (code)
12307 {
12308 case TREE_LIST:
12309 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
12310 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
12311 expr = TREE_CHAIN (expr);
12312 goto recursive_label;
12313 break;
12314 case TREE_VEC:
12315 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
12316 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
12317 break;
12318 default:
12319 break;
12320 }
12321 break;
12322 case tcc_expression:
12323 case tcc_reference:
12324 case tcc_comparison:
12325 case tcc_unary:
12326 case tcc_binary:
12327 case tcc_statement:
12328 case tcc_vl_exp:
12329 len = TREE_OPERAND_LENGTH (expr);
12330 for (i = 0; i < len; ++i)
12331 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
12332 break;
12333 case tcc_declaration:
12334 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
12335 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
12336 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
12337 {
12338 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
12339 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
12340 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
12341 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
12342 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
12343 }
12344
12345 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
12346 {
12347 if (TREE_CODE (expr) == FUNCTION_DECL)
12348 {
12349 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
12350 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
12351 }
12352 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
12353 }
12354 break;
12355 case tcc_type:
12356 if (TREE_CODE (expr) == ENUMERAL_TYPE)
12357 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
12358 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
12359 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
12360 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
12361 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
12362 if (INTEGRAL_TYPE_P (expr)
12363 || SCALAR_FLOAT_TYPE_P (expr))
12364 {
12365 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12366 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
12367 }
12368 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
12369 if (TREE_CODE (expr) == RECORD_TYPE
12370 || TREE_CODE (expr) == UNION_TYPE
12371 || TREE_CODE (expr) == QUAL_UNION_TYPE)
12372 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
12373 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12374 break;
12375 default:
12376 break;
12377 }
12378 }
12379
12380 /* Helper function for outputting the checksum of a tree T. When
12381 debugging with gdb, you can "define mynext" to be "next" followed
12382 by "call debug_fold_checksum (op0)", then just trace down till the
12383 outputs differ. */
12384
12385 DEBUG_FUNCTION void
12386 debug_fold_checksum (const_tree t)
12387 {
12388 int i;
12389 unsigned char checksum[16];
12390 struct md5_ctx ctx;
12391 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12392
12393 md5_init_ctx (&ctx);
12394 fold_checksum_tree (t, &ctx, &ht);
12395 md5_finish_ctx (&ctx, checksum);
12396 ht.empty ();
12397
12398 for (i = 0; i < 16; i++)
12399 fprintf (stderr, "%d ", checksum[i]);
12400
12401 fprintf (stderr, "\n");
12402 }
12403
12404 #endif
12405
12406 /* Fold a unary tree expression with code CODE of type TYPE with an
12407 operand OP0. LOC is the location of the resulting expression.
12408 Return a folded expression if successful. Otherwise, return a tree
12409 expression with code CODE of type TYPE with an operand OP0. */
12410
12411 tree
12412 fold_build1_stat_loc (location_t loc,
12413 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
12414 {
12415 tree tem;
12416 #ifdef ENABLE_FOLD_CHECKING
12417 unsigned char checksum_before[16], checksum_after[16];
12418 struct md5_ctx ctx;
12419 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12420
12421 md5_init_ctx (&ctx);
12422 fold_checksum_tree (op0, &ctx, &ht);
12423 md5_finish_ctx (&ctx, checksum_before);
12424 ht.empty ();
12425 #endif
12426
12427 tem = fold_unary_loc (loc, code, type, op0);
12428 if (!tem)
12429 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
12430
12431 #ifdef ENABLE_FOLD_CHECKING
12432 md5_init_ctx (&ctx);
12433 fold_checksum_tree (op0, &ctx, &ht);
12434 md5_finish_ctx (&ctx, checksum_after);
12435
12436 if (memcmp (checksum_before, checksum_after, 16))
12437 fold_check_failed (op0, tem);
12438 #endif
12439 return tem;
12440 }
12441
12442 /* Fold a binary tree expression with code CODE of type TYPE with
12443 operands OP0 and OP1. LOC is the location of the resulting
12444 expression. Return a folded expression if successful. Otherwise,
12445 return a tree expression with code CODE of type TYPE with operands
12446 OP0 and OP1. */
12447
12448 tree
12449 fold_build2_stat_loc (location_t loc,
12450 enum tree_code code, tree type, tree op0, tree op1
12451 MEM_STAT_DECL)
12452 {
12453 tree tem;
12454 #ifdef ENABLE_FOLD_CHECKING
12455 unsigned char checksum_before_op0[16],
12456 checksum_before_op1[16],
12457 checksum_after_op0[16],
12458 checksum_after_op1[16];
12459 struct md5_ctx ctx;
12460 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12461
12462 md5_init_ctx (&ctx);
12463 fold_checksum_tree (op0, &ctx, &ht);
12464 md5_finish_ctx (&ctx, checksum_before_op0);
12465 ht.empty ();
12466
12467 md5_init_ctx (&ctx);
12468 fold_checksum_tree (op1, &ctx, &ht);
12469 md5_finish_ctx (&ctx, checksum_before_op1);
12470 ht.empty ();
12471 #endif
12472
12473 tem = fold_binary_loc (loc, code, type, op0, op1);
12474 if (!tem)
12475 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
12476
12477 #ifdef ENABLE_FOLD_CHECKING
12478 md5_init_ctx (&ctx);
12479 fold_checksum_tree (op0, &ctx, &ht);
12480 md5_finish_ctx (&ctx, checksum_after_op0);
12481 ht.empty ();
12482
12483 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12484 fold_check_failed (op0, tem);
12485
12486 md5_init_ctx (&ctx);
12487 fold_checksum_tree (op1, &ctx, &ht);
12488 md5_finish_ctx (&ctx, checksum_after_op1);
12489
12490 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12491 fold_check_failed (op1, tem);
12492 #endif
12493 return tem;
12494 }
12495
12496 /* Fold a ternary tree expression with code CODE of type TYPE with
12497 operands OP0, OP1, and OP2. Return a folded expression if
12498 successful. Otherwise, return a tree expression with code CODE of
12499 type TYPE with operands OP0, OP1, and OP2. */
12500
12501 tree
12502 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
12503 tree op0, tree op1, tree op2 MEM_STAT_DECL)
12504 {
12505 tree tem;
12506 #ifdef ENABLE_FOLD_CHECKING
12507 unsigned char checksum_before_op0[16],
12508 checksum_before_op1[16],
12509 checksum_before_op2[16],
12510 checksum_after_op0[16],
12511 checksum_after_op1[16],
12512 checksum_after_op2[16];
12513 struct md5_ctx ctx;
12514 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12515
12516 md5_init_ctx (&ctx);
12517 fold_checksum_tree (op0, &ctx, &ht);
12518 md5_finish_ctx (&ctx, checksum_before_op0);
12519 ht.empty ();
12520
12521 md5_init_ctx (&ctx);
12522 fold_checksum_tree (op1, &ctx, &ht);
12523 md5_finish_ctx (&ctx, checksum_before_op1);
12524 ht.empty ();
12525
12526 md5_init_ctx (&ctx);
12527 fold_checksum_tree (op2, &ctx, &ht);
12528 md5_finish_ctx (&ctx, checksum_before_op2);
12529 ht.empty ();
12530 #endif
12531
12532 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
12533 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12534 if (!tem)
12535 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
12536
12537 #ifdef ENABLE_FOLD_CHECKING
12538 md5_init_ctx (&ctx);
12539 fold_checksum_tree (op0, &ctx, &ht);
12540 md5_finish_ctx (&ctx, checksum_after_op0);
12541 ht.empty ();
12542
12543 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12544 fold_check_failed (op0, tem);
12545
12546 md5_init_ctx (&ctx);
12547 fold_checksum_tree (op1, &ctx, &ht);
12548 md5_finish_ctx (&ctx, checksum_after_op1);
12549 ht.empty ();
12550
12551 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12552 fold_check_failed (op1, tem);
12553
12554 md5_init_ctx (&ctx);
12555 fold_checksum_tree (op2, &ctx, &ht);
12556 md5_finish_ctx (&ctx, checksum_after_op2);
12557
12558 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12559 fold_check_failed (op2, tem);
12560 #endif
12561 return tem;
12562 }
12563
12564 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
12565 arguments in ARGARRAY, and a null static chain.
12566 Return a folded expression if successful. Otherwise, return a CALL_EXPR
12567 of type TYPE from the given operands as constructed by build_call_array. */
12568
12569 tree
12570 fold_build_call_array_loc (location_t loc, tree type, tree fn,
12571 int nargs, tree *argarray)
12572 {
12573 tree tem;
12574 #ifdef ENABLE_FOLD_CHECKING
12575 unsigned char checksum_before_fn[16],
12576 checksum_before_arglist[16],
12577 checksum_after_fn[16],
12578 checksum_after_arglist[16];
12579 struct md5_ctx ctx;
12580 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12581 int i;
12582
12583 md5_init_ctx (&ctx);
12584 fold_checksum_tree (fn, &ctx, &ht);
12585 md5_finish_ctx (&ctx, checksum_before_fn);
12586 ht.empty ();
12587
12588 md5_init_ctx (&ctx);
12589 for (i = 0; i < nargs; i++)
12590 fold_checksum_tree (argarray[i], &ctx, &ht);
12591 md5_finish_ctx (&ctx, checksum_before_arglist);
12592 ht.empty ();
12593 #endif
12594
12595 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
12596 if (!tem)
12597 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
12598
12599 #ifdef ENABLE_FOLD_CHECKING
12600 md5_init_ctx (&ctx);
12601 fold_checksum_tree (fn, &ctx, &ht);
12602 md5_finish_ctx (&ctx, checksum_after_fn);
12603 ht.empty ();
12604
12605 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
12606 fold_check_failed (fn, tem);
12607
12608 md5_init_ctx (&ctx);
12609 for (i = 0; i < nargs; i++)
12610 fold_checksum_tree (argarray[i], &ctx, &ht);
12611 md5_finish_ctx (&ctx, checksum_after_arglist);
12612
12613 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
12614 fold_check_failed (NULL_TREE, tem);
12615 #endif
12616 return tem;
12617 }
12618
12619 /* Perform constant folding and related simplification of initializer
12620 expression EXPR. These behave identically to "fold_buildN" but ignore
12621 potential run-time traps and exceptions that fold must preserve. */
12622
12623 #define START_FOLD_INIT \
12624 int saved_signaling_nans = flag_signaling_nans;\
12625 int saved_trapping_math = flag_trapping_math;\
12626 int saved_rounding_math = flag_rounding_math;\
12627 int saved_trapv = flag_trapv;\
12628 int saved_folding_initializer = folding_initializer;\
12629 flag_signaling_nans = 0;\
12630 flag_trapping_math = 0;\
12631 flag_rounding_math = 0;\
12632 flag_trapv = 0;\
12633 folding_initializer = 1;
12634
12635 #define END_FOLD_INIT \
12636 flag_signaling_nans = saved_signaling_nans;\
12637 flag_trapping_math = saved_trapping_math;\
12638 flag_rounding_math = saved_rounding_math;\
12639 flag_trapv = saved_trapv;\
12640 folding_initializer = saved_folding_initializer;
12641
12642 tree
12643 fold_build1_initializer_loc (location_t loc, enum tree_code code,
12644 tree type, tree op)
12645 {
12646 tree result;
12647 START_FOLD_INIT;
12648
12649 result = fold_build1_loc (loc, code, type, op);
12650
12651 END_FOLD_INIT;
12652 return result;
12653 }
12654
12655 tree
12656 fold_build2_initializer_loc (location_t loc, enum tree_code code,
12657 tree type, tree op0, tree op1)
12658 {
12659 tree result;
12660 START_FOLD_INIT;
12661
12662 result = fold_build2_loc (loc, code, type, op0, op1);
12663
12664 END_FOLD_INIT;
12665 return result;
12666 }
12667
12668 tree
12669 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
12670 int nargs, tree *argarray)
12671 {
12672 tree result;
12673 START_FOLD_INIT;
12674
12675 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
12676
12677 END_FOLD_INIT;
12678 return result;
12679 }
12680
12681 #undef START_FOLD_INIT
12682 #undef END_FOLD_INIT
12683
12684 /* Determine if first argument is a multiple of second argument. Return 0 if
12685 it is not, or we cannot easily determined it to be.
12686
12687 An example of the sort of thing we care about (at this point; this routine
12688 could surely be made more general, and expanded to do what the *_DIV_EXPR's
12689 fold cases do now) is discovering that
12690
12691 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12692
12693 is a multiple of
12694
12695 SAVE_EXPR (J * 8)
12696
12697 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12698
12699 This code also handles discovering that
12700
12701 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12702
12703 is a multiple of 8 so we don't have to worry about dealing with a
12704 possible remainder.
12705
12706 Note that we *look* inside a SAVE_EXPR only to determine how it was
12707 calculated; it is not safe for fold to do much of anything else with the
12708 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12709 at run time. For example, the latter example above *cannot* be implemented
12710 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12711 evaluation time of the original SAVE_EXPR is not necessarily the same at
12712 the time the new expression is evaluated. The only optimization of this
12713 sort that would be valid is changing
12714
12715 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12716
12717 divided by 8 to
12718
12719 SAVE_EXPR (I) * SAVE_EXPR (J)
12720
12721 (where the same SAVE_EXPR (J) is used in the original and the
12722 transformed version). */
12723
12724 int
12725 multiple_of_p (tree type, const_tree top, const_tree bottom)
12726 {
12727 if (operand_equal_p (top, bottom, 0))
12728 return 1;
12729
12730 if (TREE_CODE (type) != INTEGER_TYPE)
12731 return 0;
12732
12733 switch (TREE_CODE (top))
12734 {
12735 case BIT_AND_EXPR:
12736 /* Bitwise and provides a power of two multiple. If the mask is
12737 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
12738 if (!integer_pow2p (bottom))
12739 return 0;
12740 /* FALLTHRU */
12741
12742 case MULT_EXPR:
12743 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12744 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12745
12746 case PLUS_EXPR:
12747 case MINUS_EXPR:
12748 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12749 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12750
12751 case LSHIFT_EXPR:
12752 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
12753 {
12754 tree op1, t1;
12755
12756 op1 = TREE_OPERAND (top, 1);
12757 /* const_binop may not detect overflow correctly,
12758 so check for it explicitly here. */
12759 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
12760 && 0 != (t1 = fold_convert (type,
12761 const_binop (LSHIFT_EXPR,
12762 size_one_node,
12763 op1)))
12764 && !TREE_OVERFLOW (t1))
12765 return multiple_of_p (type, t1, bottom);
12766 }
12767 return 0;
12768
12769 case NOP_EXPR:
12770 /* Can't handle conversions from non-integral or wider integral type. */
12771 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
12772 || (TYPE_PRECISION (type)
12773 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
12774 return 0;
12775
12776 /* .. fall through ... */
12777
12778 case SAVE_EXPR:
12779 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
12780
12781 case COND_EXPR:
12782 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12783 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
12784
12785 case INTEGER_CST:
12786 if (TREE_CODE (bottom) != INTEGER_CST
12787 || integer_zerop (bottom)
12788 || (TYPE_UNSIGNED (type)
12789 && (tree_int_cst_sgn (top) < 0
12790 || tree_int_cst_sgn (bottom) < 0)))
12791 return 0;
12792 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
12793 SIGNED);
12794
12795 default:
12796 return 0;
12797 }
12798 }
12799
12800 #define tree_expr_nonnegative_warnv_p(X, Y) \
12801 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
12802
12803 #define RECURSE(X) \
12804 ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
12805
12806 /* Return true if CODE or TYPE is known to be non-negative. */
12807
12808 static bool
12809 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
12810 {
12811 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
12812 && truth_value_p (code))
12813 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
12814 have a signed:1 type (where the value is -1 and 0). */
12815 return true;
12816 return false;
12817 }
12818
12819 /* Return true if (CODE OP0) is known to be non-negative. If the return
12820 value is based on the assumption that signed overflow is undefined,
12821 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12822 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12823
12824 bool
12825 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12826 bool *strict_overflow_p, int depth)
12827 {
12828 if (TYPE_UNSIGNED (type))
12829 return true;
12830
12831 switch (code)
12832 {
12833 case ABS_EXPR:
12834 /* We can't return 1 if flag_wrapv is set because
12835 ABS_EXPR<INT_MIN> = INT_MIN. */
12836 if (!ANY_INTEGRAL_TYPE_P (type))
12837 return true;
12838 if (TYPE_OVERFLOW_UNDEFINED (type))
12839 {
12840 *strict_overflow_p = true;
12841 return true;
12842 }
12843 break;
12844
12845 case NON_LVALUE_EXPR:
12846 case FLOAT_EXPR:
12847 case FIX_TRUNC_EXPR:
12848 return RECURSE (op0);
12849
12850 CASE_CONVERT:
12851 {
12852 tree inner_type = TREE_TYPE (op0);
12853 tree outer_type = type;
12854
12855 if (TREE_CODE (outer_type) == REAL_TYPE)
12856 {
12857 if (TREE_CODE (inner_type) == REAL_TYPE)
12858 return RECURSE (op0);
12859 if (INTEGRAL_TYPE_P (inner_type))
12860 {
12861 if (TYPE_UNSIGNED (inner_type))
12862 return true;
12863 return RECURSE (op0);
12864 }
12865 }
12866 else if (INTEGRAL_TYPE_P (outer_type))
12867 {
12868 if (TREE_CODE (inner_type) == REAL_TYPE)
12869 return RECURSE (op0);
12870 if (INTEGRAL_TYPE_P (inner_type))
12871 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
12872 && TYPE_UNSIGNED (inner_type);
12873 }
12874 }
12875 break;
12876
12877 default:
12878 return tree_simple_nonnegative_warnv_p (code, type);
12879 }
12880
12881 /* We don't know sign of `t', so be conservative and return false. */
12882 return false;
12883 }
12884
12885 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
12886 value is based on the assumption that signed overflow is undefined,
12887 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12888 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12889
12890 bool
12891 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12892 tree op1, bool *strict_overflow_p,
12893 int depth)
12894 {
12895 if (TYPE_UNSIGNED (type))
12896 return true;
12897
12898 switch (code)
12899 {
12900 case POINTER_PLUS_EXPR:
12901 case PLUS_EXPR:
12902 if (FLOAT_TYPE_P (type))
12903 return RECURSE (op0) && RECURSE (op1);
12904
12905 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12906 both unsigned and at least 2 bits shorter than the result. */
12907 if (TREE_CODE (type) == INTEGER_TYPE
12908 && TREE_CODE (op0) == NOP_EXPR
12909 && TREE_CODE (op1) == NOP_EXPR)
12910 {
12911 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
12912 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
12913 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12914 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12915 {
12916 unsigned int prec = MAX (TYPE_PRECISION (inner1),
12917 TYPE_PRECISION (inner2)) + 1;
12918 return prec < TYPE_PRECISION (type);
12919 }
12920 }
12921 break;
12922
12923 case MULT_EXPR:
12924 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12925 {
12926 /* x * x is always non-negative for floating point x
12927 or without overflow. */
12928 if (operand_equal_p (op0, op1, 0)
12929 || (RECURSE (op0) && RECURSE (op1)))
12930 {
12931 if (ANY_INTEGRAL_TYPE_P (type)
12932 && TYPE_OVERFLOW_UNDEFINED (type))
12933 *strict_overflow_p = true;
12934 return true;
12935 }
12936 }
12937
12938 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12939 both unsigned and their total bits is shorter than the result. */
12940 if (TREE_CODE (type) == INTEGER_TYPE
12941 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
12942 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
12943 {
12944 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
12945 ? TREE_TYPE (TREE_OPERAND (op0, 0))
12946 : TREE_TYPE (op0);
12947 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
12948 ? TREE_TYPE (TREE_OPERAND (op1, 0))
12949 : TREE_TYPE (op1);
12950
12951 bool unsigned0 = TYPE_UNSIGNED (inner0);
12952 bool unsigned1 = TYPE_UNSIGNED (inner1);
12953
12954 if (TREE_CODE (op0) == INTEGER_CST)
12955 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
12956
12957 if (TREE_CODE (op1) == INTEGER_CST)
12958 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
12959
12960 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
12961 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
12962 {
12963 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
12964 ? tree_int_cst_min_precision (op0, UNSIGNED)
12965 : TYPE_PRECISION (inner0);
12966
12967 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
12968 ? tree_int_cst_min_precision (op1, UNSIGNED)
12969 : TYPE_PRECISION (inner1);
12970
12971 return precision0 + precision1 < TYPE_PRECISION (type);
12972 }
12973 }
12974 return false;
12975
12976 case BIT_AND_EXPR:
12977 case MAX_EXPR:
12978 return RECURSE (op0) || RECURSE (op1);
12979
12980 case BIT_IOR_EXPR:
12981 case BIT_XOR_EXPR:
12982 case MIN_EXPR:
12983 case RDIV_EXPR:
12984 case TRUNC_DIV_EXPR:
12985 case CEIL_DIV_EXPR:
12986 case FLOOR_DIV_EXPR:
12987 case ROUND_DIV_EXPR:
12988 return RECURSE (op0) && RECURSE (op1);
12989
12990 case TRUNC_MOD_EXPR:
12991 return RECURSE (op0);
12992
12993 case FLOOR_MOD_EXPR:
12994 return RECURSE (op1);
12995
12996 case CEIL_MOD_EXPR:
12997 case ROUND_MOD_EXPR:
12998 default:
12999 return tree_simple_nonnegative_warnv_p (code, type);
13000 }
13001
13002 /* We don't know sign of `t', so be conservative and return false. */
13003 return false;
13004 }
13005
13006 /* Return true if T is known to be non-negative. If the return
13007 value is based on the assumption that signed overflow is undefined,
13008 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13009 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13010
13011 bool
13012 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13013 {
13014 if (TYPE_UNSIGNED (TREE_TYPE (t)))
13015 return true;
13016
13017 switch (TREE_CODE (t))
13018 {
13019 case INTEGER_CST:
13020 return tree_int_cst_sgn (t) >= 0;
13021
13022 case REAL_CST:
13023 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
13024
13025 case FIXED_CST:
13026 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
13027
13028 case COND_EXPR:
13029 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
13030
13031 case SSA_NAME:
13032 /* Limit the depth of recursion to avoid quadratic behavior.
13033 This is expected to catch almost all occurrences in practice.
13034 If this code misses important cases that unbounded recursion
13035 would not, passes that need this information could be revised
13036 to provide it through dataflow propagation. */
13037 return (!name_registered_for_update_p (t)
13038 && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
13039 && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
13040 strict_overflow_p, depth));
13041
13042 default:
13043 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
13044 }
13045 }
13046
13047 /* Return true if T is known to be non-negative. If the return
13048 value is based on the assumption that signed overflow is undefined,
13049 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13050 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13051
13052 bool
13053 tree_call_nonnegative_warnv_p (tree type, combined_fn fn, tree arg0, tree arg1,
13054 bool *strict_overflow_p, int depth)
13055 {
13056 switch (fn)
13057 {
13058 CASE_CFN_ACOS:
13059 CASE_CFN_ACOSH:
13060 CASE_CFN_CABS:
13061 CASE_CFN_COSH:
13062 CASE_CFN_ERFC:
13063 CASE_CFN_EXP:
13064 CASE_CFN_EXP10:
13065 CASE_CFN_EXP2:
13066 CASE_CFN_FABS:
13067 CASE_CFN_FDIM:
13068 CASE_CFN_HYPOT:
13069 CASE_CFN_POW10:
13070 CASE_CFN_FFS:
13071 CASE_CFN_PARITY:
13072 CASE_CFN_POPCOUNT:
13073 CASE_CFN_CLZ:
13074 CASE_CFN_CLRSB:
13075 case CFN_BUILT_IN_BSWAP32:
13076 case CFN_BUILT_IN_BSWAP64:
13077 /* Always true. */
13078 return true;
13079
13080 CASE_CFN_SQRT:
13081 /* sqrt(-0.0) is -0.0. */
13082 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
13083 return true;
13084 return RECURSE (arg0);
13085
13086 CASE_CFN_ASINH:
13087 CASE_CFN_ATAN:
13088 CASE_CFN_ATANH:
13089 CASE_CFN_CBRT:
13090 CASE_CFN_CEIL:
13091 CASE_CFN_ERF:
13092 CASE_CFN_EXPM1:
13093 CASE_CFN_FLOOR:
13094 CASE_CFN_FMOD:
13095 CASE_CFN_FREXP:
13096 CASE_CFN_ICEIL:
13097 CASE_CFN_IFLOOR:
13098 CASE_CFN_IRINT:
13099 CASE_CFN_IROUND:
13100 CASE_CFN_LCEIL:
13101 CASE_CFN_LDEXP:
13102 CASE_CFN_LFLOOR:
13103 CASE_CFN_LLCEIL:
13104 CASE_CFN_LLFLOOR:
13105 CASE_CFN_LLRINT:
13106 CASE_CFN_LLROUND:
13107 CASE_CFN_LRINT:
13108 CASE_CFN_LROUND:
13109 CASE_CFN_MODF:
13110 CASE_CFN_NEARBYINT:
13111 CASE_CFN_RINT:
13112 CASE_CFN_ROUND:
13113 CASE_CFN_SCALB:
13114 CASE_CFN_SCALBLN:
13115 CASE_CFN_SCALBN:
13116 CASE_CFN_SIGNBIT:
13117 CASE_CFN_SIGNIFICAND:
13118 CASE_CFN_SINH:
13119 CASE_CFN_TANH:
13120 CASE_CFN_TRUNC:
13121 /* True if the 1st argument is nonnegative. */
13122 return RECURSE (arg0);
13123
13124 CASE_CFN_FMAX:
13125 /* True if the 1st OR 2nd arguments are nonnegative. */
13126 return RECURSE (arg0) || RECURSE (arg1);
13127
13128 CASE_CFN_FMIN:
13129 /* True if the 1st AND 2nd arguments are nonnegative. */
13130 return RECURSE (arg0) && RECURSE (arg1);
13131
13132 CASE_CFN_COPYSIGN:
13133 /* True if the 2nd argument is nonnegative. */
13134 return RECURSE (arg1);
13135
13136 CASE_CFN_POWI:
13137 /* True if the 1st argument is nonnegative or the second
13138 argument is an even integer. */
13139 if (TREE_CODE (arg1) == INTEGER_CST
13140 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
13141 return true;
13142 return RECURSE (arg0);
13143
13144 CASE_CFN_POW:
13145 /* True if the 1st argument is nonnegative or the second
13146 argument is an even integer valued real. */
13147 if (TREE_CODE (arg1) == REAL_CST)
13148 {
13149 REAL_VALUE_TYPE c;
13150 HOST_WIDE_INT n;
13151
13152 c = TREE_REAL_CST (arg1);
13153 n = real_to_integer (&c);
13154 if ((n & 1) == 0)
13155 {
13156 REAL_VALUE_TYPE cint;
13157 real_from_integer (&cint, VOIDmode, n, SIGNED);
13158 if (real_identical (&c, &cint))
13159 return true;
13160 }
13161 }
13162 return RECURSE (arg0);
13163
13164 default:
13165 break;
13166 }
13167 return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
13168 }
13169
13170 /* Return true if T is known to be non-negative. If the return
13171 value is based on the assumption that signed overflow is undefined,
13172 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13173 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13174
13175 static bool
13176 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13177 {
13178 enum tree_code code = TREE_CODE (t);
13179 if (TYPE_UNSIGNED (TREE_TYPE (t)))
13180 return true;
13181
13182 switch (code)
13183 {
13184 case TARGET_EXPR:
13185 {
13186 tree temp = TARGET_EXPR_SLOT (t);
13187 t = TARGET_EXPR_INITIAL (t);
13188
13189 /* If the initializer is non-void, then it's a normal expression
13190 that will be assigned to the slot. */
13191 if (!VOID_TYPE_P (t))
13192 return RECURSE (t);
13193
13194 /* Otherwise, the initializer sets the slot in some way. One common
13195 way is an assignment statement at the end of the initializer. */
13196 while (1)
13197 {
13198 if (TREE_CODE (t) == BIND_EXPR)
13199 t = expr_last (BIND_EXPR_BODY (t));
13200 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
13201 || TREE_CODE (t) == TRY_CATCH_EXPR)
13202 t = expr_last (TREE_OPERAND (t, 0));
13203 else if (TREE_CODE (t) == STATEMENT_LIST)
13204 t = expr_last (t);
13205 else
13206 break;
13207 }
13208 if (TREE_CODE (t) == MODIFY_EXPR
13209 && TREE_OPERAND (t, 0) == temp)
13210 return RECURSE (TREE_OPERAND (t, 1));
13211
13212 return false;
13213 }
13214
13215 case CALL_EXPR:
13216 {
13217 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
13218 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
13219
13220 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
13221 get_call_combined_fn (t),
13222 arg0,
13223 arg1,
13224 strict_overflow_p, depth);
13225 }
13226 case COMPOUND_EXPR:
13227 case MODIFY_EXPR:
13228 return RECURSE (TREE_OPERAND (t, 1));
13229
13230 case BIND_EXPR:
13231 return RECURSE (expr_last (TREE_OPERAND (t, 1)));
13232
13233 case SAVE_EXPR:
13234 return RECURSE (TREE_OPERAND (t, 0));
13235
13236 default:
13237 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
13238 }
13239 }
13240
13241 #undef RECURSE
13242 #undef tree_expr_nonnegative_warnv_p
13243
13244 /* Return true if T is known to be non-negative. If the return
13245 value is based on the assumption that signed overflow is undefined,
13246 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13247 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13248
13249 bool
13250 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13251 {
13252 enum tree_code code;
13253 if (t == error_mark_node)
13254 return false;
13255
13256 code = TREE_CODE (t);
13257 switch (TREE_CODE_CLASS (code))
13258 {
13259 case tcc_binary:
13260 case tcc_comparison:
13261 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13262 TREE_TYPE (t),
13263 TREE_OPERAND (t, 0),
13264 TREE_OPERAND (t, 1),
13265 strict_overflow_p, depth);
13266
13267 case tcc_unary:
13268 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13269 TREE_TYPE (t),
13270 TREE_OPERAND (t, 0),
13271 strict_overflow_p, depth);
13272
13273 case tcc_constant:
13274 case tcc_declaration:
13275 case tcc_reference:
13276 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13277
13278 default:
13279 break;
13280 }
13281
13282 switch (code)
13283 {
13284 case TRUTH_AND_EXPR:
13285 case TRUTH_OR_EXPR:
13286 case TRUTH_XOR_EXPR:
13287 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13288 TREE_TYPE (t),
13289 TREE_OPERAND (t, 0),
13290 TREE_OPERAND (t, 1),
13291 strict_overflow_p, depth);
13292 case TRUTH_NOT_EXPR:
13293 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13294 TREE_TYPE (t),
13295 TREE_OPERAND (t, 0),
13296 strict_overflow_p, depth);
13297
13298 case COND_EXPR:
13299 case CONSTRUCTOR:
13300 case OBJ_TYPE_REF:
13301 case ASSERT_EXPR:
13302 case ADDR_EXPR:
13303 case WITH_SIZE_EXPR:
13304 case SSA_NAME:
13305 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13306
13307 default:
13308 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
13309 }
13310 }
13311
13312 /* Return true if `t' is known to be non-negative. Handle warnings
13313 about undefined signed overflow. */
13314
13315 bool
13316 tree_expr_nonnegative_p (tree t)
13317 {
13318 bool ret, strict_overflow_p;
13319
13320 strict_overflow_p = false;
13321 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
13322 if (strict_overflow_p)
13323 fold_overflow_warning (("assuming signed overflow does not occur when "
13324 "determining that expression is always "
13325 "non-negative"),
13326 WARN_STRICT_OVERFLOW_MISC);
13327 return ret;
13328 }
13329
13330
13331 /* Return true when (CODE OP0) is an address and is known to be nonzero.
13332 For floating point we further ensure that T is not denormal.
13333 Similar logic is present in nonzero_address in rtlanal.h.
13334
13335 If the return value is based on the assumption that signed overflow
13336 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13337 change *STRICT_OVERFLOW_P. */
13338
13339 bool
13340 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
13341 bool *strict_overflow_p)
13342 {
13343 switch (code)
13344 {
13345 case ABS_EXPR:
13346 return tree_expr_nonzero_warnv_p (op0,
13347 strict_overflow_p);
13348
13349 case NOP_EXPR:
13350 {
13351 tree inner_type = TREE_TYPE (op0);
13352 tree outer_type = type;
13353
13354 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
13355 && tree_expr_nonzero_warnv_p (op0,
13356 strict_overflow_p));
13357 }
13358 break;
13359
13360 case NON_LVALUE_EXPR:
13361 return tree_expr_nonzero_warnv_p (op0,
13362 strict_overflow_p);
13363
13364 default:
13365 break;
13366 }
13367
13368 return false;
13369 }
13370
13371 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
13372 For floating point we further ensure that T is not denormal.
13373 Similar logic is present in nonzero_address in rtlanal.h.
13374
13375 If the return value is based on the assumption that signed overflow
13376 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13377 change *STRICT_OVERFLOW_P. */
13378
13379 bool
13380 tree_binary_nonzero_warnv_p (enum tree_code code,
13381 tree type,
13382 tree op0,
13383 tree op1, bool *strict_overflow_p)
13384 {
13385 bool sub_strict_overflow_p;
13386 switch (code)
13387 {
13388 case POINTER_PLUS_EXPR:
13389 case PLUS_EXPR:
13390 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
13391 {
13392 /* With the presence of negative values it is hard
13393 to say something. */
13394 sub_strict_overflow_p = false;
13395 if (!tree_expr_nonnegative_warnv_p (op0,
13396 &sub_strict_overflow_p)
13397 || !tree_expr_nonnegative_warnv_p (op1,
13398 &sub_strict_overflow_p))
13399 return false;
13400 /* One of operands must be positive and the other non-negative. */
13401 /* We don't set *STRICT_OVERFLOW_P here: even if this value
13402 overflows, on a twos-complement machine the sum of two
13403 nonnegative numbers can never be zero. */
13404 return (tree_expr_nonzero_warnv_p (op0,
13405 strict_overflow_p)
13406 || tree_expr_nonzero_warnv_p (op1,
13407 strict_overflow_p));
13408 }
13409 break;
13410
13411 case MULT_EXPR:
13412 if (TYPE_OVERFLOW_UNDEFINED (type))
13413 {
13414 if (tree_expr_nonzero_warnv_p (op0,
13415 strict_overflow_p)
13416 && tree_expr_nonzero_warnv_p (op1,
13417 strict_overflow_p))
13418 {
13419 *strict_overflow_p = true;
13420 return true;
13421 }
13422 }
13423 break;
13424
13425 case MIN_EXPR:
13426 sub_strict_overflow_p = false;
13427 if (tree_expr_nonzero_warnv_p (op0,
13428 &sub_strict_overflow_p)
13429 && tree_expr_nonzero_warnv_p (op1,
13430 &sub_strict_overflow_p))
13431 {
13432 if (sub_strict_overflow_p)
13433 *strict_overflow_p = true;
13434 }
13435 break;
13436
13437 case MAX_EXPR:
13438 sub_strict_overflow_p = false;
13439 if (tree_expr_nonzero_warnv_p (op0,
13440 &sub_strict_overflow_p))
13441 {
13442 if (sub_strict_overflow_p)
13443 *strict_overflow_p = true;
13444
13445 /* When both operands are nonzero, then MAX must be too. */
13446 if (tree_expr_nonzero_warnv_p (op1,
13447 strict_overflow_p))
13448 return true;
13449
13450 /* MAX where operand 0 is positive is positive. */
13451 return tree_expr_nonnegative_warnv_p (op0,
13452 strict_overflow_p);
13453 }
13454 /* MAX where operand 1 is positive is positive. */
13455 else if (tree_expr_nonzero_warnv_p (op1,
13456 &sub_strict_overflow_p)
13457 && tree_expr_nonnegative_warnv_p (op1,
13458 &sub_strict_overflow_p))
13459 {
13460 if (sub_strict_overflow_p)
13461 *strict_overflow_p = true;
13462 return true;
13463 }
13464 break;
13465
13466 case BIT_IOR_EXPR:
13467 return (tree_expr_nonzero_warnv_p (op1,
13468 strict_overflow_p)
13469 || tree_expr_nonzero_warnv_p (op0,
13470 strict_overflow_p));
13471
13472 default:
13473 break;
13474 }
13475
13476 return false;
13477 }
13478
13479 /* Return true when T is an address and is known to be nonzero.
13480 For floating point we further ensure that T is not denormal.
13481 Similar logic is present in nonzero_address in rtlanal.h.
13482
13483 If the return value is based on the assumption that signed overflow
13484 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13485 change *STRICT_OVERFLOW_P. */
13486
13487 bool
13488 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
13489 {
13490 bool sub_strict_overflow_p;
13491 switch (TREE_CODE (t))
13492 {
13493 case INTEGER_CST:
13494 return !integer_zerop (t);
13495
13496 case ADDR_EXPR:
13497 {
13498 tree base = TREE_OPERAND (t, 0);
13499
13500 if (!DECL_P (base))
13501 base = get_base_address (base);
13502
13503 if (!base)
13504 return false;
13505
13506 /* For objects in symbol table check if we know they are non-zero.
13507 Don't do anything for variables and functions before symtab is built;
13508 it is quite possible that they will be declared weak later. */
13509 if (DECL_P (base) && decl_in_symtab_p (base))
13510 {
13511 struct symtab_node *symbol;
13512
13513 symbol = symtab_node::get_create (base);
13514 if (symbol)
13515 return symbol->nonzero_address ();
13516 else
13517 return false;
13518 }
13519
13520 /* Function local objects are never NULL. */
13521 if (DECL_P (base)
13522 && (DECL_CONTEXT (base)
13523 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
13524 && auto_var_in_fn_p (base, DECL_CONTEXT (base))))
13525 return true;
13526
13527 /* Constants are never weak. */
13528 if (CONSTANT_CLASS_P (base))
13529 return true;
13530
13531 return false;
13532 }
13533
13534 case COND_EXPR:
13535 sub_strict_overflow_p = false;
13536 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13537 &sub_strict_overflow_p)
13538 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
13539 &sub_strict_overflow_p))
13540 {
13541 if (sub_strict_overflow_p)
13542 *strict_overflow_p = true;
13543 return true;
13544 }
13545 break;
13546
13547 default:
13548 break;
13549 }
13550 return false;
13551 }
13552
13553 #define integer_valued_real_p(X) \
13554 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
13555
13556 #define RECURSE(X) \
13557 ((integer_valued_real_p) (X, depth + 1))
13558
13559 /* Return true if the floating point result of (CODE OP0) has an
13560 integer value. We also allow +Inf, -Inf and NaN to be considered
13561 integer values. Return false for signaling NaN.
13562
13563 DEPTH is the current nesting depth of the query. */
13564
13565 bool
13566 integer_valued_real_unary_p (tree_code code, tree op0, int depth)
13567 {
13568 switch (code)
13569 {
13570 case FLOAT_EXPR:
13571 return true;
13572
13573 case ABS_EXPR:
13574 return RECURSE (op0);
13575
13576 CASE_CONVERT:
13577 {
13578 tree type = TREE_TYPE (op0);
13579 if (TREE_CODE (type) == INTEGER_TYPE)
13580 return true;
13581 if (TREE_CODE (type) == REAL_TYPE)
13582 return RECURSE (op0);
13583 break;
13584 }
13585
13586 default:
13587 break;
13588 }
13589 return false;
13590 }
13591
13592 /* Return true if the floating point result of (CODE OP0 OP1) has an
13593 integer value. We also allow +Inf, -Inf and NaN to be considered
13594 integer values. Return false for signaling NaN.
13595
13596 DEPTH is the current nesting depth of the query. */
13597
13598 bool
13599 integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth)
13600 {
13601 switch (code)
13602 {
13603 case PLUS_EXPR:
13604 case MINUS_EXPR:
13605 case MULT_EXPR:
13606 case MIN_EXPR:
13607 case MAX_EXPR:
13608 return RECURSE (op0) && RECURSE (op1);
13609
13610 default:
13611 break;
13612 }
13613 return false;
13614 }
13615
13616 /* Return true if the floating point result of calling FNDECL with arguments
13617 ARG0 and ARG1 has an integer value. We also allow +Inf, -Inf and NaN to be
13618 considered integer values. Return false for signaling NaN. If FNDECL
13619 takes fewer than 2 arguments, the remaining ARGn are null.
13620
13621 DEPTH is the current nesting depth of the query. */
13622
13623 bool
13624 integer_valued_real_call_p (combined_fn fn, tree arg0, tree arg1, int depth)
13625 {
13626 switch (fn)
13627 {
13628 CASE_CFN_CEIL:
13629 CASE_CFN_FLOOR:
13630 CASE_CFN_NEARBYINT:
13631 CASE_CFN_RINT:
13632 CASE_CFN_ROUND:
13633 CASE_CFN_TRUNC:
13634 return true;
13635
13636 CASE_CFN_FMIN:
13637 CASE_CFN_FMAX:
13638 return RECURSE (arg0) && RECURSE (arg1);
13639
13640 default:
13641 break;
13642 }
13643 return false;
13644 }
13645
13646 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
13647 has an integer value. We also allow +Inf, -Inf and NaN to be
13648 considered integer values. Return false for signaling NaN.
13649
13650 DEPTH is the current nesting depth of the query. */
13651
13652 bool
13653 integer_valued_real_single_p (tree t, int depth)
13654 {
13655 switch (TREE_CODE (t))
13656 {
13657 case REAL_CST:
13658 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
13659
13660 case COND_EXPR:
13661 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
13662
13663 case SSA_NAME:
13664 /* Limit the depth of recursion to avoid quadratic behavior.
13665 This is expected to catch almost all occurrences in practice.
13666 If this code misses important cases that unbounded recursion
13667 would not, passes that need this information could be revised
13668 to provide it through dataflow propagation. */
13669 return (!name_registered_for_update_p (t)
13670 && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
13671 && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t),
13672 depth));
13673
13674 default:
13675 break;
13676 }
13677 return false;
13678 }
13679
13680 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
13681 has an integer value. We also allow +Inf, -Inf and NaN to be
13682 considered integer values. Return false for signaling NaN.
13683
13684 DEPTH is the current nesting depth of the query. */
13685
13686 static bool
13687 integer_valued_real_invalid_p (tree t, int depth)
13688 {
13689 switch (TREE_CODE (t))
13690 {
13691 case COMPOUND_EXPR:
13692 case MODIFY_EXPR:
13693 case BIND_EXPR:
13694 return RECURSE (TREE_OPERAND (t, 1));
13695
13696 case SAVE_EXPR:
13697 return RECURSE (TREE_OPERAND (t, 0));
13698
13699 default:
13700 break;
13701 }
13702 return false;
13703 }
13704
13705 #undef RECURSE
13706 #undef integer_valued_real_p
13707
13708 /* Return true if the floating point expression T has an integer value.
13709 We also allow +Inf, -Inf and NaN to be considered integer values.
13710 Return false for signaling NaN.
13711
13712 DEPTH is the current nesting depth of the query. */
13713
13714 bool
13715 integer_valued_real_p (tree t, int depth)
13716 {
13717 if (t == error_mark_node)
13718 return false;
13719
13720 tree_code code = TREE_CODE (t);
13721 switch (TREE_CODE_CLASS (code))
13722 {
13723 case tcc_binary:
13724 case tcc_comparison:
13725 return integer_valued_real_binary_p (code, TREE_OPERAND (t, 0),
13726 TREE_OPERAND (t, 1), depth);
13727
13728 case tcc_unary:
13729 return integer_valued_real_unary_p (code, TREE_OPERAND (t, 0), depth);
13730
13731 case tcc_constant:
13732 case tcc_declaration:
13733 case tcc_reference:
13734 return integer_valued_real_single_p (t, depth);
13735
13736 default:
13737 break;
13738 }
13739
13740 switch (code)
13741 {
13742 case COND_EXPR:
13743 case SSA_NAME:
13744 return integer_valued_real_single_p (t, depth);
13745
13746 case CALL_EXPR:
13747 {
13748 tree arg0 = (call_expr_nargs (t) > 0
13749 ? CALL_EXPR_ARG (t, 0)
13750 : NULL_TREE);
13751 tree arg1 = (call_expr_nargs (t) > 1
13752 ? CALL_EXPR_ARG (t, 1)
13753 : NULL_TREE);
13754 return integer_valued_real_call_p (get_call_combined_fn (t),
13755 arg0, arg1, depth);
13756 }
13757
13758 default:
13759 return integer_valued_real_invalid_p (t, depth);
13760 }
13761 }
13762
13763 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
13764 attempt to fold the expression to a constant without modifying TYPE,
13765 OP0 or OP1.
13766
13767 If the expression could be simplified to a constant, then return
13768 the constant. If the expression would not be simplified to a
13769 constant, then return NULL_TREE. */
13770
13771 tree
13772 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
13773 {
13774 tree tem = fold_binary (code, type, op0, op1);
13775 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13776 }
13777
13778 /* Given the components of a unary expression CODE, TYPE and OP0,
13779 attempt to fold the expression to a constant without modifying
13780 TYPE or OP0.
13781
13782 If the expression could be simplified to a constant, then return
13783 the constant. If the expression would not be simplified to a
13784 constant, then return NULL_TREE. */
13785
13786 tree
13787 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
13788 {
13789 tree tem = fold_unary (code, type, op0);
13790 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13791 }
13792
13793 /* If EXP represents referencing an element in a constant string
13794 (either via pointer arithmetic or array indexing), return the
13795 tree representing the value accessed, otherwise return NULL. */
13796
13797 tree
13798 fold_read_from_constant_string (tree exp)
13799 {
13800 if ((TREE_CODE (exp) == INDIRECT_REF
13801 || TREE_CODE (exp) == ARRAY_REF)
13802 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
13803 {
13804 tree exp1 = TREE_OPERAND (exp, 0);
13805 tree index;
13806 tree string;
13807 location_t loc = EXPR_LOCATION (exp);
13808
13809 if (TREE_CODE (exp) == INDIRECT_REF)
13810 string = string_constant (exp1, &index);
13811 else
13812 {
13813 tree low_bound = array_ref_low_bound (exp);
13814 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
13815
13816 /* Optimize the special-case of a zero lower bound.
13817
13818 We convert the low_bound to sizetype to avoid some problems
13819 with constant folding. (E.g. suppose the lower bound is 1,
13820 and its mode is QI. Without the conversion,l (ARRAY
13821 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
13822 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
13823 if (! integer_zerop (low_bound))
13824 index = size_diffop_loc (loc, index,
13825 fold_convert_loc (loc, sizetype, low_bound));
13826
13827 string = exp1;
13828 }
13829
13830 if (string
13831 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
13832 && TREE_CODE (string) == STRING_CST
13833 && TREE_CODE (index) == INTEGER_CST
13834 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
13835 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
13836 == MODE_INT)
13837 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
13838 return build_int_cst_type (TREE_TYPE (exp),
13839 (TREE_STRING_POINTER (string)
13840 [TREE_INT_CST_LOW (index)]));
13841 }
13842 return NULL;
13843 }
13844
13845 /* Return the tree for neg (ARG0) when ARG0 is known to be either
13846 an integer constant, real, or fixed-point constant.
13847
13848 TYPE is the type of the result. */
13849
13850 static tree
13851 fold_negate_const (tree arg0, tree type)
13852 {
13853 tree t = NULL_TREE;
13854
13855 switch (TREE_CODE (arg0))
13856 {
13857 case INTEGER_CST:
13858 {
13859 bool overflow;
13860 wide_int val = wi::neg (arg0, &overflow);
13861 t = force_fit_type (type, val, 1,
13862 (overflow | TREE_OVERFLOW (arg0))
13863 && !TYPE_UNSIGNED (type));
13864 break;
13865 }
13866
13867 case REAL_CST:
13868 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13869 break;
13870
13871 case FIXED_CST:
13872 {
13873 FIXED_VALUE_TYPE f;
13874 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
13875 &(TREE_FIXED_CST (arg0)), NULL,
13876 TYPE_SATURATING (type));
13877 t = build_fixed (type, f);
13878 /* Propagate overflow flags. */
13879 if (overflow_p | TREE_OVERFLOW (arg0))
13880 TREE_OVERFLOW (t) = 1;
13881 break;
13882 }
13883
13884 default:
13885 gcc_unreachable ();
13886 }
13887
13888 return t;
13889 }
13890
13891 /* Return the tree for abs (ARG0) when ARG0 is known to be either
13892 an integer constant or real constant.
13893
13894 TYPE is the type of the result. */
13895
13896 tree
13897 fold_abs_const (tree arg0, tree type)
13898 {
13899 tree t = NULL_TREE;
13900
13901 switch (TREE_CODE (arg0))
13902 {
13903 case INTEGER_CST:
13904 {
13905 /* If the value is unsigned or non-negative, then the absolute value
13906 is the same as the ordinary value. */
13907 if (!wi::neg_p (arg0, TYPE_SIGN (type)))
13908 t = arg0;
13909
13910 /* If the value is negative, then the absolute value is
13911 its negation. */
13912 else
13913 {
13914 bool overflow;
13915 wide_int val = wi::neg (arg0, &overflow);
13916 t = force_fit_type (type, val, -1,
13917 overflow | TREE_OVERFLOW (arg0));
13918 }
13919 }
13920 break;
13921
13922 case REAL_CST:
13923 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
13924 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13925 else
13926 t = arg0;
13927 break;
13928
13929 default:
13930 gcc_unreachable ();
13931 }
13932
13933 return t;
13934 }
13935
13936 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13937 constant. TYPE is the type of the result. */
13938
13939 static tree
13940 fold_not_const (const_tree arg0, tree type)
13941 {
13942 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
13943
13944 return force_fit_type (type, wi::bit_not (arg0), 0, TREE_OVERFLOW (arg0));
13945 }
13946
13947 /* Given CODE, a relational operator, the target type, TYPE and two
13948 constant operands OP0 and OP1, return the result of the
13949 relational operation. If the result is not a compile time
13950 constant, then return NULL_TREE. */
13951
13952 static tree
13953 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
13954 {
13955 int result, invert;
13956
13957 /* From here on, the only cases we handle are when the result is
13958 known to be a constant. */
13959
13960 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
13961 {
13962 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
13963 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
13964
13965 /* Handle the cases where either operand is a NaN. */
13966 if (real_isnan (c0) || real_isnan (c1))
13967 {
13968 switch (code)
13969 {
13970 case EQ_EXPR:
13971 case ORDERED_EXPR:
13972 result = 0;
13973 break;
13974
13975 case NE_EXPR:
13976 case UNORDERED_EXPR:
13977 case UNLT_EXPR:
13978 case UNLE_EXPR:
13979 case UNGT_EXPR:
13980 case UNGE_EXPR:
13981 case UNEQ_EXPR:
13982 result = 1;
13983 break;
13984
13985 case LT_EXPR:
13986 case LE_EXPR:
13987 case GT_EXPR:
13988 case GE_EXPR:
13989 case LTGT_EXPR:
13990 if (flag_trapping_math)
13991 return NULL_TREE;
13992 result = 0;
13993 break;
13994
13995 default:
13996 gcc_unreachable ();
13997 }
13998
13999 return constant_boolean_node (result, type);
14000 }
14001
14002 return constant_boolean_node (real_compare (code, c0, c1), type);
14003 }
14004
14005 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
14006 {
14007 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
14008 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
14009 return constant_boolean_node (fixed_compare (code, c0, c1), type);
14010 }
14011
14012 /* Handle equality/inequality of complex constants. */
14013 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
14014 {
14015 tree rcond = fold_relational_const (code, type,
14016 TREE_REALPART (op0),
14017 TREE_REALPART (op1));
14018 tree icond = fold_relational_const (code, type,
14019 TREE_IMAGPART (op0),
14020 TREE_IMAGPART (op1));
14021 if (code == EQ_EXPR)
14022 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
14023 else if (code == NE_EXPR)
14024 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
14025 else
14026 return NULL_TREE;
14027 }
14028
14029 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
14030 {
14031 if (!VECTOR_TYPE_P (type))
14032 {
14033 /* Have vector comparison with scalar boolean result. */
14034 bool result = true;
14035 gcc_assert ((code == EQ_EXPR || code == NE_EXPR)
14036 && VECTOR_CST_NELTS (op0) == VECTOR_CST_NELTS (op1));
14037 for (unsigned i = 0; i < VECTOR_CST_NELTS (op0); i++)
14038 {
14039 tree elem0 = VECTOR_CST_ELT (op0, i);
14040 tree elem1 = VECTOR_CST_ELT (op1, i);
14041 tree tmp = fold_relational_const (code, type, elem0, elem1);
14042 result &= integer_onep (tmp);
14043 }
14044 if (code == NE_EXPR)
14045 result = !result;
14046 return constant_boolean_node (result, type);
14047 }
14048 unsigned count = VECTOR_CST_NELTS (op0);
14049 tree *elts = XALLOCAVEC (tree, count);
14050 gcc_assert (VECTOR_CST_NELTS (op1) == count
14051 && TYPE_VECTOR_SUBPARTS (type) == count);
14052
14053 for (unsigned i = 0; i < count; i++)
14054 {
14055 tree elem_type = TREE_TYPE (type);
14056 tree elem0 = VECTOR_CST_ELT (op0, i);
14057 tree elem1 = VECTOR_CST_ELT (op1, i);
14058
14059 tree tem = fold_relational_const (code, elem_type,
14060 elem0, elem1);
14061
14062 if (tem == NULL_TREE)
14063 return NULL_TREE;
14064
14065 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
14066 }
14067
14068 return build_vector (type, elts);
14069 }
14070
14071 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
14072
14073 To compute GT, swap the arguments and do LT.
14074 To compute GE, do LT and invert the result.
14075 To compute LE, swap the arguments, do LT and invert the result.
14076 To compute NE, do EQ and invert the result.
14077
14078 Therefore, the code below must handle only EQ and LT. */
14079
14080 if (code == LE_EXPR || code == GT_EXPR)
14081 {
14082 std::swap (op0, op1);
14083 code = swap_tree_comparison (code);
14084 }
14085
14086 /* Note that it is safe to invert for real values here because we
14087 have already handled the one case that it matters. */
14088
14089 invert = 0;
14090 if (code == NE_EXPR || code == GE_EXPR)
14091 {
14092 invert = 1;
14093 code = invert_tree_comparison (code, false);
14094 }
14095
14096 /* Compute a result for LT or EQ if args permit;
14097 Otherwise return T. */
14098 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
14099 {
14100 if (code == EQ_EXPR)
14101 result = tree_int_cst_equal (op0, op1);
14102 else
14103 result = tree_int_cst_lt (op0, op1);
14104 }
14105 else
14106 return NULL_TREE;
14107
14108 if (invert)
14109 result ^= 1;
14110 return constant_boolean_node (result, type);
14111 }
14112
14113 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
14114 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
14115 itself. */
14116
14117 tree
14118 fold_build_cleanup_point_expr (tree type, tree expr)
14119 {
14120 /* If the expression does not have side effects then we don't have to wrap
14121 it with a cleanup point expression. */
14122 if (!TREE_SIDE_EFFECTS (expr))
14123 return expr;
14124
14125 /* If the expression is a return, check to see if the expression inside the
14126 return has no side effects or the right hand side of the modify expression
14127 inside the return. If either don't have side effects set we don't need to
14128 wrap the expression in a cleanup point expression. Note we don't check the
14129 left hand side of the modify because it should always be a return decl. */
14130 if (TREE_CODE (expr) == RETURN_EXPR)
14131 {
14132 tree op = TREE_OPERAND (expr, 0);
14133 if (!op || !TREE_SIDE_EFFECTS (op))
14134 return expr;
14135 op = TREE_OPERAND (op, 1);
14136 if (!TREE_SIDE_EFFECTS (op))
14137 return expr;
14138 }
14139
14140 return build1 (CLEANUP_POINT_EXPR, type, expr);
14141 }
14142
14143 /* Given a pointer value OP0 and a type TYPE, return a simplified version
14144 of an indirection through OP0, or NULL_TREE if no simplification is
14145 possible. */
14146
14147 tree
14148 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
14149 {
14150 tree sub = op0;
14151 tree subtype;
14152
14153 STRIP_NOPS (sub);
14154 subtype = TREE_TYPE (sub);
14155 if (!POINTER_TYPE_P (subtype))
14156 return NULL_TREE;
14157
14158 if (TREE_CODE (sub) == ADDR_EXPR)
14159 {
14160 tree op = TREE_OPERAND (sub, 0);
14161 tree optype = TREE_TYPE (op);
14162 /* *&CONST_DECL -> to the value of the const decl. */
14163 if (TREE_CODE (op) == CONST_DECL)
14164 return DECL_INITIAL (op);
14165 /* *&p => p; make sure to handle *&"str"[cst] here. */
14166 if (type == optype)
14167 {
14168 tree fop = fold_read_from_constant_string (op);
14169 if (fop)
14170 return fop;
14171 else
14172 return op;
14173 }
14174 /* *(foo *)&fooarray => fooarray[0] */
14175 else if (TREE_CODE (optype) == ARRAY_TYPE
14176 && type == TREE_TYPE (optype)
14177 && (!in_gimple_form
14178 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14179 {
14180 tree type_domain = TYPE_DOMAIN (optype);
14181 tree min_val = size_zero_node;
14182 if (type_domain && TYPE_MIN_VALUE (type_domain))
14183 min_val = TYPE_MIN_VALUE (type_domain);
14184 if (in_gimple_form
14185 && TREE_CODE (min_val) != INTEGER_CST)
14186 return NULL_TREE;
14187 return build4_loc (loc, ARRAY_REF, type, op, min_val,
14188 NULL_TREE, NULL_TREE);
14189 }
14190 /* *(foo *)&complexfoo => __real__ complexfoo */
14191 else if (TREE_CODE (optype) == COMPLEX_TYPE
14192 && type == TREE_TYPE (optype))
14193 return fold_build1_loc (loc, REALPART_EXPR, type, op);
14194 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
14195 else if (TREE_CODE (optype) == VECTOR_TYPE
14196 && type == TREE_TYPE (optype))
14197 {
14198 tree part_width = TYPE_SIZE (type);
14199 tree index = bitsize_int (0);
14200 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
14201 }
14202 }
14203
14204 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
14205 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
14206 {
14207 tree op00 = TREE_OPERAND (sub, 0);
14208 tree op01 = TREE_OPERAND (sub, 1);
14209
14210 STRIP_NOPS (op00);
14211 if (TREE_CODE (op00) == ADDR_EXPR)
14212 {
14213 tree op00type;
14214 op00 = TREE_OPERAND (op00, 0);
14215 op00type = TREE_TYPE (op00);
14216
14217 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
14218 if (TREE_CODE (op00type) == VECTOR_TYPE
14219 && type == TREE_TYPE (op00type))
14220 {
14221 HOST_WIDE_INT offset = tree_to_shwi (op01);
14222 tree part_width = TYPE_SIZE (type);
14223 unsigned HOST_WIDE_INT part_widthi = tree_to_shwi (part_width)/BITS_PER_UNIT;
14224 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
14225 tree index = bitsize_int (indexi);
14226
14227 if (offset / part_widthi < TYPE_VECTOR_SUBPARTS (op00type))
14228 return fold_build3_loc (loc,
14229 BIT_FIELD_REF, type, op00,
14230 part_width, index);
14231
14232 }
14233 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
14234 else if (TREE_CODE (op00type) == COMPLEX_TYPE
14235 && type == TREE_TYPE (op00type))
14236 {
14237 tree size = TYPE_SIZE_UNIT (type);
14238 if (tree_int_cst_equal (size, op01))
14239 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
14240 }
14241 /* ((foo *)&fooarray)[1] => fooarray[1] */
14242 else if (TREE_CODE (op00type) == ARRAY_TYPE
14243 && type == TREE_TYPE (op00type))
14244 {
14245 tree type_domain = TYPE_DOMAIN (op00type);
14246 tree min_val = size_zero_node;
14247 if (type_domain && TYPE_MIN_VALUE (type_domain))
14248 min_val = TYPE_MIN_VALUE (type_domain);
14249 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
14250 TYPE_SIZE_UNIT (type));
14251 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
14252 return build4_loc (loc, ARRAY_REF, type, op00, op01,
14253 NULL_TREE, NULL_TREE);
14254 }
14255 }
14256 }
14257
14258 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
14259 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
14260 && type == TREE_TYPE (TREE_TYPE (subtype))
14261 && (!in_gimple_form
14262 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14263 {
14264 tree type_domain;
14265 tree min_val = size_zero_node;
14266 sub = build_fold_indirect_ref_loc (loc, sub);
14267 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
14268 if (type_domain && TYPE_MIN_VALUE (type_domain))
14269 min_val = TYPE_MIN_VALUE (type_domain);
14270 if (in_gimple_form
14271 && TREE_CODE (min_val) != INTEGER_CST)
14272 return NULL_TREE;
14273 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
14274 NULL_TREE);
14275 }
14276
14277 return NULL_TREE;
14278 }
14279
14280 /* Builds an expression for an indirection through T, simplifying some
14281 cases. */
14282
14283 tree
14284 build_fold_indirect_ref_loc (location_t loc, tree t)
14285 {
14286 tree type = TREE_TYPE (TREE_TYPE (t));
14287 tree sub = fold_indirect_ref_1 (loc, type, t);
14288
14289 if (sub)
14290 return sub;
14291
14292 return build1_loc (loc, INDIRECT_REF, type, t);
14293 }
14294
14295 /* Given an INDIRECT_REF T, return either T or a simplified version. */
14296
14297 tree
14298 fold_indirect_ref_loc (location_t loc, tree t)
14299 {
14300 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
14301
14302 if (sub)
14303 return sub;
14304 else
14305 return t;
14306 }
14307
14308 /* Strip non-trapping, non-side-effecting tree nodes from an expression
14309 whose result is ignored. The type of the returned tree need not be
14310 the same as the original expression. */
14311
14312 tree
14313 fold_ignored_result (tree t)
14314 {
14315 if (!TREE_SIDE_EFFECTS (t))
14316 return integer_zero_node;
14317
14318 for (;;)
14319 switch (TREE_CODE_CLASS (TREE_CODE (t)))
14320 {
14321 case tcc_unary:
14322 t = TREE_OPERAND (t, 0);
14323 break;
14324
14325 case tcc_binary:
14326 case tcc_comparison:
14327 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14328 t = TREE_OPERAND (t, 0);
14329 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
14330 t = TREE_OPERAND (t, 1);
14331 else
14332 return t;
14333 break;
14334
14335 case tcc_expression:
14336 switch (TREE_CODE (t))
14337 {
14338 case COMPOUND_EXPR:
14339 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14340 return t;
14341 t = TREE_OPERAND (t, 0);
14342 break;
14343
14344 case COND_EXPR:
14345 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
14346 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
14347 return t;
14348 t = TREE_OPERAND (t, 0);
14349 break;
14350
14351 default:
14352 return t;
14353 }
14354 break;
14355
14356 default:
14357 return t;
14358 }
14359 }
14360
14361 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
14362
14363 tree
14364 round_up_loc (location_t loc, tree value, unsigned int divisor)
14365 {
14366 tree div = NULL_TREE;
14367
14368 if (divisor == 1)
14369 return value;
14370
14371 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14372 have to do anything. Only do this when we are not given a const,
14373 because in that case, this check is more expensive than just
14374 doing it. */
14375 if (TREE_CODE (value) != INTEGER_CST)
14376 {
14377 div = build_int_cst (TREE_TYPE (value), divisor);
14378
14379 if (multiple_of_p (TREE_TYPE (value), value, div))
14380 return value;
14381 }
14382
14383 /* If divisor is a power of two, simplify this to bit manipulation. */
14384 if (divisor == (divisor & -divisor))
14385 {
14386 if (TREE_CODE (value) == INTEGER_CST)
14387 {
14388 wide_int val = value;
14389 bool overflow_p;
14390
14391 if ((val & (divisor - 1)) == 0)
14392 return value;
14393
14394 overflow_p = TREE_OVERFLOW (value);
14395 val += divisor - 1;
14396 val &= - (int) divisor;
14397 if (val == 0)
14398 overflow_p = true;
14399
14400 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
14401 }
14402 else
14403 {
14404 tree t;
14405
14406 t = build_int_cst (TREE_TYPE (value), divisor - 1);
14407 value = size_binop_loc (loc, PLUS_EXPR, value, t);
14408 t = build_int_cst (TREE_TYPE (value), - (int) divisor);
14409 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14410 }
14411 }
14412 else
14413 {
14414 if (!div)
14415 div = build_int_cst (TREE_TYPE (value), divisor);
14416 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
14417 value = size_binop_loc (loc, MULT_EXPR, value, div);
14418 }
14419
14420 return value;
14421 }
14422
14423 /* Likewise, but round down. */
14424
14425 tree
14426 round_down_loc (location_t loc, tree value, int divisor)
14427 {
14428 tree div = NULL_TREE;
14429
14430 gcc_assert (divisor > 0);
14431 if (divisor == 1)
14432 return value;
14433
14434 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14435 have to do anything. Only do this when we are not given a const,
14436 because in that case, this check is more expensive than just
14437 doing it. */
14438 if (TREE_CODE (value) != INTEGER_CST)
14439 {
14440 div = build_int_cst (TREE_TYPE (value), divisor);
14441
14442 if (multiple_of_p (TREE_TYPE (value), value, div))
14443 return value;
14444 }
14445
14446 /* If divisor is a power of two, simplify this to bit manipulation. */
14447 if (divisor == (divisor & -divisor))
14448 {
14449 tree t;
14450
14451 t = build_int_cst (TREE_TYPE (value), -divisor);
14452 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14453 }
14454 else
14455 {
14456 if (!div)
14457 div = build_int_cst (TREE_TYPE (value), divisor);
14458 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
14459 value = size_binop_loc (loc, MULT_EXPR, value, div);
14460 }
14461
14462 return value;
14463 }
14464
14465 /* Returns the pointer to the base of the object addressed by EXP and
14466 extracts the information about the offset of the access, storing it
14467 to PBITPOS and POFFSET. */
14468
14469 static tree
14470 split_address_to_core_and_offset (tree exp,
14471 HOST_WIDE_INT *pbitpos, tree *poffset)
14472 {
14473 tree core;
14474 machine_mode mode;
14475 int unsignedp, reversep, volatilep;
14476 HOST_WIDE_INT bitsize;
14477 location_t loc = EXPR_LOCATION (exp);
14478
14479 if (TREE_CODE (exp) == ADDR_EXPR)
14480 {
14481 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
14482 poffset, &mode, &unsignedp, &reversep,
14483 &volatilep, false);
14484 core = build_fold_addr_expr_loc (loc, core);
14485 }
14486 else
14487 {
14488 core = exp;
14489 *pbitpos = 0;
14490 *poffset = NULL_TREE;
14491 }
14492
14493 return core;
14494 }
14495
14496 /* Returns true if addresses of E1 and E2 differ by a constant, false
14497 otherwise. If they do, E1 - E2 is stored in *DIFF. */
14498
14499 bool
14500 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
14501 {
14502 tree core1, core2;
14503 HOST_WIDE_INT bitpos1, bitpos2;
14504 tree toffset1, toffset2, tdiff, type;
14505
14506 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
14507 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
14508
14509 if (bitpos1 % BITS_PER_UNIT != 0
14510 || bitpos2 % BITS_PER_UNIT != 0
14511 || !operand_equal_p (core1, core2, 0))
14512 return false;
14513
14514 if (toffset1 && toffset2)
14515 {
14516 type = TREE_TYPE (toffset1);
14517 if (type != TREE_TYPE (toffset2))
14518 toffset2 = fold_convert (type, toffset2);
14519
14520 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
14521 if (!cst_and_fits_in_hwi (tdiff))
14522 return false;
14523
14524 *diff = int_cst_value (tdiff);
14525 }
14526 else if (toffset1 || toffset2)
14527 {
14528 /* If only one of the offsets is non-constant, the difference cannot
14529 be a constant. */
14530 return false;
14531 }
14532 else
14533 *diff = 0;
14534
14535 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
14536 return true;
14537 }
14538
14539 /* Return OFF converted to a pointer offset type suitable as offset for
14540 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
14541 tree
14542 convert_to_ptrofftype_loc (location_t loc, tree off)
14543 {
14544 return fold_convert_loc (loc, sizetype, off);
14545 }
14546
14547 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14548 tree
14549 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
14550 {
14551 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14552 ptr, convert_to_ptrofftype_loc (loc, off));
14553 }
14554
14555 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14556 tree
14557 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
14558 {
14559 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14560 ptr, size_int (off));
14561 }
14562
14563 /* Return a char pointer for a C string if it is a string constant
14564 or sum of string constant and integer constant. */
14565
14566 const char *
14567 c_getstr (tree src)
14568 {
14569 tree offset_node;
14570
14571 src = string_constant (src, &offset_node);
14572 if (src == 0)
14573 return 0;
14574
14575 if (offset_node == 0)
14576 return TREE_STRING_POINTER (src);
14577 else if (!tree_fits_uhwi_p (offset_node)
14578 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
14579 return 0;
14580
14581 return TREE_STRING_POINTER (src) + tree_to_uhwi (offset_node);
14582 }