re PR middle-end/69553 (Optimizations O1/O2 makes std::array value incorrect when...
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2016 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
27
28 /* The entry points in this file are fold, size_int_wide and size_binop.
29
30 fold takes a tree as argument and returns a simplified tree.
31
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
35
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
38
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
42
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "backend.h"
47 #include "target.h"
48 #include "rtl.h"
49 #include "tree.h"
50 #include "gimple.h"
51 #include "predict.h"
52 #include "tm_p.h"
53 #include "tree-ssa-operands.h"
54 #include "optabs-query.h"
55 #include "cgraph.h"
56 #include "diagnostic-core.h"
57 #include "flags.h"
58 #include "alias.h"
59 #include "fold-const.h"
60 #include "fold-const-call.h"
61 #include "stor-layout.h"
62 #include "calls.h"
63 #include "tree-iterator.h"
64 #include "expr.h"
65 #include "intl.h"
66 #include "langhooks.h"
67 #include "tree-eh.h"
68 #include "gimplify.h"
69 #include "tree-dfa.h"
70 #include "builtins.h"
71 #include "generic-match.h"
72 #include "gimple-fold.h"
73 #include "params.h"
74 #include "tree-into-ssa.h"
75 #include "md5.h"
76 #include "case-cfn-macros.h"
77 #include "stringpool.h"
78 #include "tree-ssanames.h"
79
80 #ifndef LOAD_EXTEND_OP
81 #define LOAD_EXTEND_OP(M) UNKNOWN
82 #endif
83
84 /* Nonzero if we are folding constants inside an initializer; zero
85 otherwise. */
86 int folding_initializer = 0;
87
88 /* The following constants represent a bit based encoding of GCC's
89 comparison operators. This encoding simplifies transformations
90 on relational comparison operators, such as AND and OR. */
91 enum comparison_code {
92 COMPCODE_FALSE = 0,
93 COMPCODE_LT = 1,
94 COMPCODE_EQ = 2,
95 COMPCODE_LE = 3,
96 COMPCODE_GT = 4,
97 COMPCODE_LTGT = 5,
98 COMPCODE_GE = 6,
99 COMPCODE_ORD = 7,
100 COMPCODE_UNORD = 8,
101 COMPCODE_UNLT = 9,
102 COMPCODE_UNEQ = 10,
103 COMPCODE_UNLE = 11,
104 COMPCODE_UNGT = 12,
105 COMPCODE_NE = 13,
106 COMPCODE_UNGE = 14,
107 COMPCODE_TRUE = 15
108 };
109
110 static bool negate_expr_p (tree);
111 static tree negate_expr (tree);
112 static tree split_tree (location_t, tree, tree, enum tree_code,
113 tree *, tree *, tree *, int);
114 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
115 static enum comparison_code comparison_to_compcode (enum tree_code);
116 static enum tree_code compcode_to_comparison (enum comparison_code);
117 static int operand_equal_for_comparison_p (tree, tree, tree);
118 static int twoval_comparison_p (tree, tree *, tree *, int *);
119 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
120 static tree make_bit_field_ref (location_t, tree, tree,
121 HOST_WIDE_INT, HOST_WIDE_INT, int, int);
122 static tree optimize_bit_field_compare (location_t, enum tree_code,
123 tree, tree, tree);
124 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
125 HOST_WIDE_INT *,
126 machine_mode *, int *, int *, int *,
127 tree *, tree *);
128 static int simple_operand_p (const_tree);
129 static bool simple_operand_p_2 (tree);
130 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
131 static tree range_predecessor (tree);
132 static tree range_successor (tree);
133 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
134 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
135 static tree unextend (tree, int, int, tree);
136 static tree optimize_minmax_comparison (location_t, enum tree_code,
137 tree, tree, tree);
138 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
139 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
140 static tree fold_binary_op_with_conditional_arg (location_t,
141 enum tree_code, tree,
142 tree, tree,
143 tree, tree, int);
144 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
145 static bool reorder_operands_p (const_tree, const_tree);
146 static tree fold_negate_const (tree, tree);
147 static tree fold_not_const (const_tree, tree);
148 static tree fold_relational_const (enum tree_code, tree, tree, tree);
149 static tree fold_convert_const (enum tree_code, tree, tree);
150 static tree fold_view_convert_expr (tree, tree);
151 static bool vec_cst_ctor_to_array (tree, tree *);
152
153
154 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
155 Otherwise, return LOC. */
156
157 static location_t
158 expr_location_or (tree t, location_t loc)
159 {
160 location_t tloc = EXPR_LOCATION (t);
161 return tloc == UNKNOWN_LOCATION ? loc : tloc;
162 }
163
164 /* Similar to protected_set_expr_location, but never modify x in place,
165 if location can and needs to be set, unshare it. */
166
167 static inline tree
168 protected_set_expr_location_unshare (tree x, location_t loc)
169 {
170 if (CAN_HAVE_LOCATION_P (x)
171 && EXPR_LOCATION (x) != loc
172 && !(TREE_CODE (x) == SAVE_EXPR
173 || TREE_CODE (x) == TARGET_EXPR
174 || TREE_CODE (x) == BIND_EXPR))
175 {
176 x = copy_node (x);
177 SET_EXPR_LOCATION (x, loc);
178 }
179 return x;
180 }
181 \f
182 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
183 division and returns the quotient. Otherwise returns
184 NULL_TREE. */
185
186 tree
187 div_if_zero_remainder (const_tree arg1, const_tree arg2)
188 {
189 widest_int quo;
190
191 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
192 SIGNED, &quo))
193 return wide_int_to_tree (TREE_TYPE (arg1), quo);
194
195 return NULL_TREE;
196 }
197 \f
198 /* This is nonzero if we should defer warnings about undefined
199 overflow. This facility exists because these warnings are a
200 special case. The code to estimate loop iterations does not want
201 to issue any warnings, since it works with expressions which do not
202 occur in user code. Various bits of cleanup code call fold(), but
203 only use the result if it has certain characteristics (e.g., is a
204 constant); that code only wants to issue a warning if the result is
205 used. */
206
207 static int fold_deferring_overflow_warnings;
208
209 /* If a warning about undefined overflow is deferred, this is the
210 warning. Note that this may cause us to turn two warnings into
211 one, but that is fine since it is sufficient to only give one
212 warning per expression. */
213
214 static const char* fold_deferred_overflow_warning;
215
216 /* If a warning about undefined overflow is deferred, this is the
217 level at which the warning should be emitted. */
218
219 static enum warn_strict_overflow_code fold_deferred_overflow_code;
220
221 /* Start deferring overflow warnings. We could use a stack here to
222 permit nested calls, but at present it is not necessary. */
223
224 void
225 fold_defer_overflow_warnings (void)
226 {
227 ++fold_deferring_overflow_warnings;
228 }
229
230 /* Stop deferring overflow warnings. If there is a pending warning,
231 and ISSUE is true, then issue the warning if appropriate. STMT is
232 the statement with which the warning should be associated (used for
233 location information); STMT may be NULL. CODE is the level of the
234 warning--a warn_strict_overflow_code value. This function will use
235 the smaller of CODE and the deferred code when deciding whether to
236 issue the warning. CODE may be zero to mean to always use the
237 deferred code. */
238
239 void
240 fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
241 {
242 const char *warnmsg;
243 location_t locus;
244
245 gcc_assert (fold_deferring_overflow_warnings > 0);
246 --fold_deferring_overflow_warnings;
247 if (fold_deferring_overflow_warnings > 0)
248 {
249 if (fold_deferred_overflow_warning != NULL
250 && code != 0
251 && code < (int) fold_deferred_overflow_code)
252 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
253 return;
254 }
255
256 warnmsg = fold_deferred_overflow_warning;
257 fold_deferred_overflow_warning = NULL;
258
259 if (!issue || warnmsg == NULL)
260 return;
261
262 if (gimple_no_warning_p (stmt))
263 return;
264
265 /* Use the smallest code level when deciding to issue the
266 warning. */
267 if (code == 0 || code > (int) fold_deferred_overflow_code)
268 code = fold_deferred_overflow_code;
269
270 if (!issue_strict_overflow_warning (code))
271 return;
272
273 if (stmt == NULL)
274 locus = input_location;
275 else
276 locus = gimple_location (stmt);
277 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
278 }
279
280 /* Stop deferring overflow warnings, ignoring any deferred
281 warnings. */
282
283 void
284 fold_undefer_and_ignore_overflow_warnings (void)
285 {
286 fold_undefer_overflow_warnings (false, NULL, 0);
287 }
288
289 /* Whether we are deferring overflow warnings. */
290
291 bool
292 fold_deferring_overflow_warnings_p (void)
293 {
294 return fold_deferring_overflow_warnings > 0;
295 }
296
297 /* This is called when we fold something based on the fact that signed
298 overflow is undefined. */
299
300 static void
301 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
302 {
303 if (fold_deferring_overflow_warnings > 0)
304 {
305 if (fold_deferred_overflow_warning == NULL
306 || wc < fold_deferred_overflow_code)
307 {
308 fold_deferred_overflow_warning = gmsgid;
309 fold_deferred_overflow_code = wc;
310 }
311 }
312 else if (issue_strict_overflow_warning (wc))
313 warning (OPT_Wstrict_overflow, gmsgid);
314 }
315 \f
316 /* Return true if the built-in mathematical function specified by CODE
317 is odd, i.e. -f(x) == f(-x). */
318
319 bool
320 negate_mathfn_p (combined_fn fn)
321 {
322 switch (fn)
323 {
324 CASE_CFN_ASIN:
325 CASE_CFN_ASINH:
326 CASE_CFN_ATAN:
327 CASE_CFN_ATANH:
328 CASE_CFN_CASIN:
329 CASE_CFN_CASINH:
330 CASE_CFN_CATAN:
331 CASE_CFN_CATANH:
332 CASE_CFN_CBRT:
333 CASE_CFN_CPROJ:
334 CASE_CFN_CSIN:
335 CASE_CFN_CSINH:
336 CASE_CFN_CTAN:
337 CASE_CFN_CTANH:
338 CASE_CFN_ERF:
339 CASE_CFN_LLROUND:
340 CASE_CFN_LROUND:
341 CASE_CFN_ROUND:
342 CASE_CFN_SIN:
343 CASE_CFN_SINH:
344 CASE_CFN_TAN:
345 CASE_CFN_TANH:
346 CASE_CFN_TRUNC:
347 return true;
348
349 CASE_CFN_LLRINT:
350 CASE_CFN_LRINT:
351 CASE_CFN_NEARBYINT:
352 CASE_CFN_RINT:
353 return !flag_rounding_math;
354
355 default:
356 break;
357 }
358 return false;
359 }
360
361 /* Check whether we may negate an integer constant T without causing
362 overflow. */
363
364 bool
365 may_negate_without_overflow_p (const_tree t)
366 {
367 tree type;
368
369 gcc_assert (TREE_CODE (t) == INTEGER_CST);
370
371 type = TREE_TYPE (t);
372 if (TYPE_UNSIGNED (type))
373 return false;
374
375 return !wi::only_sign_bit_p (t);
376 }
377
378 /* Determine whether an expression T can be cheaply negated using
379 the function negate_expr without introducing undefined overflow. */
380
381 static bool
382 negate_expr_p (tree t)
383 {
384 tree type;
385
386 if (t == 0)
387 return false;
388
389 type = TREE_TYPE (t);
390
391 STRIP_SIGN_NOPS (t);
392 switch (TREE_CODE (t))
393 {
394 case INTEGER_CST:
395 if (INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_WRAPS (type))
396 return true;
397
398 /* Check that -CST will not overflow type. */
399 return may_negate_without_overflow_p (t);
400 case BIT_NOT_EXPR:
401 return (INTEGRAL_TYPE_P (type)
402 && TYPE_OVERFLOW_WRAPS (type));
403
404 case FIXED_CST:
405 return true;
406
407 case NEGATE_EXPR:
408 return !TYPE_OVERFLOW_SANITIZED (type);
409
410 case REAL_CST:
411 /* We want to canonicalize to positive real constants. Pretend
412 that only negative ones can be easily negated. */
413 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
414
415 case COMPLEX_CST:
416 return negate_expr_p (TREE_REALPART (t))
417 && negate_expr_p (TREE_IMAGPART (t));
418
419 case VECTOR_CST:
420 {
421 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
422 return true;
423
424 int count = TYPE_VECTOR_SUBPARTS (type), i;
425
426 for (i = 0; i < count; i++)
427 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
428 return false;
429
430 return true;
431 }
432
433 case COMPLEX_EXPR:
434 return negate_expr_p (TREE_OPERAND (t, 0))
435 && negate_expr_p (TREE_OPERAND (t, 1));
436
437 case CONJ_EXPR:
438 return negate_expr_p (TREE_OPERAND (t, 0));
439
440 case PLUS_EXPR:
441 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
442 || HONOR_SIGNED_ZEROS (element_mode (type))
443 || (INTEGRAL_TYPE_P (type)
444 && ! TYPE_OVERFLOW_WRAPS (type)))
445 return false;
446 /* -(A + B) -> (-B) - A. */
447 if (negate_expr_p (TREE_OPERAND (t, 1))
448 && reorder_operands_p (TREE_OPERAND (t, 0),
449 TREE_OPERAND (t, 1)))
450 return true;
451 /* -(A + B) -> (-A) - B. */
452 return negate_expr_p (TREE_OPERAND (t, 0));
453
454 case MINUS_EXPR:
455 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
456 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
457 && !HONOR_SIGNED_ZEROS (element_mode (type))
458 && (! INTEGRAL_TYPE_P (type)
459 || TYPE_OVERFLOW_WRAPS (type))
460 && reorder_operands_p (TREE_OPERAND (t, 0),
461 TREE_OPERAND (t, 1));
462
463 case MULT_EXPR:
464 if (TYPE_UNSIGNED (type))
465 break;
466 /* INT_MIN/n * n doesn't overflow while negating one operand it does
467 if n is a power of two. */
468 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
469 && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
470 && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
471 && ! integer_pow2p (TREE_OPERAND (t, 0)))
472 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
473 && ! integer_pow2p (TREE_OPERAND (t, 1)))))
474 break;
475
476 /* Fall through. */
477
478 case RDIV_EXPR:
479 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
480 return negate_expr_p (TREE_OPERAND (t, 1))
481 || negate_expr_p (TREE_OPERAND (t, 0));
482 break;
483
484 case TRUNC_DIV_EXPR:
485 case ROUND_DIV_EXPR:
486 case EXACT_DIV_EXPR:
487 if (TYPE_UNSIGNED (type))
488 break;
489 if (negate_expr_p (TREE_OPERAND (t, 0)))
490 return true;
491 /* In general we can't negate B in A / B, because if A is INT_MIN and
492 B is 1, we may turn this into INT_MIN / -1 which is undefined
493 and actually traps on some architectures. */
494 if (! INTEGRAL_TYPE_P (TREE_TYPE (t))
495 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
496 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
497 && ! integer_onep (TREE_OPERAND (t, 1))))
498 return negate_expr_p (TREE_OPERAND (t, 1));
499 break;
500
501 case NOP_EXPR:
502 /* Negate -((double)float) as (double)(-float). */
503 if (TREE_CODE (type) == REAL_TYPE)
504 {
505 tree tem = strip_float_extensions (t);
506 if (tem != t)
507 return negate_expr_p (tem);
508 }
509 break;
510
511 case CALL_EXPR:
512 /* Negate -f(x) as f(-x). */
513 if (negate_mathfn_p (get_call_combined_fn (t)))
514 return negate_expr_p (CALL_EXPR_ARG (t, 0));
515 break;
516
517 case RSHIFT_EXPR:
518 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
519 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
520 {
521 tree op1 = TREE_OPERAND (t, 1);
522 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
523 return true;
524 }
525 break;
526
527 default:
528 break;
529 }
530 return false;
531 }
532
533 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
534 simplification is possible.
535 If negate_expr_p would return true for T, NULL_TREE will never be
536 returned. */
537
538 static tree
539 fold_negate_expr (location_t loc, tree t)
540 {
541 tree type = TREE_TYPE (t);
542 tree tem;
543
544 switch (TREE_CODE (t))
545 {
546 /* Convert - (~A) to A + 1. */
547 case BIT_NOT_EXPR:
548 if (INTEGRAL_TYPE_P (type))
549 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
550 build_one_cst (type));
551 break;
552
553 case INTEGER_CST:
554 tem = fold_negate_const (t, type);
555 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
556 || (ANY_INTEGRAL_TYPE_P (type)
557 && !TYPE_OVERFLOW_TRAPS (type)
558 && TYPE_OVERFLOW_WRAPS (type))
559 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
560 return tem;
561 break;
562
563 case REAL_CST:
564 tem = fold_negate_const (t, type);
565 return tem;
566
567 case FIXED_CST:
568 tem = fold_negate_const (t, type);
569 return tem;
570
571 case COMPLEX_CST:
572 {
573 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
574 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
575 if (rpart && ipart)
576 return build_complex (type, rpart, ipart);
577 }
578 break;
579
580 case VECTOR_CST:
581 {
582 int count = TYPE_VECTOR_SUBPARTS (type), i;
583 tree *elts = XALLOCAVEC (tree, count);
584
585 for (i = 0; i < count; i++)
586 {
587 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
588 if (elts[i] == NULL_TREE)
589 return NULL_TREE;
590 }
591
592 return build_vector (type, elts);
593 }
594
595 case COMPLEX_EXPR:
596 if (negate_expr_p (t))
597 return fold_build2_loc (loc, COMPLEX_EXPR, type,
598 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
599 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
600 break;
601
602 case CONJ_EXPR:
603 if (negate_expr_p (t))
604 return fold_build1_loc (loc, CONJ_EXPR, type,
605 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
606 break;
607
608 case NEGATE_EXPR:
609 if (!TYPE_OVERFLOW_SANITIZED (type))
610 return TREE_OPERAND (t, 0);
611 break;
612
613 case PLUS_EXPR:
614 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
615 && !HONOR_SIGNED_ZEROS (element_mode (type)))
616 {
617 /* -(A + B) -> (-B) - A. */
618 if (negate_expr_p (TREE_OPERAND (t, 1))
619 && reorder_operands_p (TREE_OPERAND (t, 0),
620 TREE_OPERAND (t, 1)))
621 {
622 tem = negate_expr (TREE_OPERAND (t, 1));
623 return fold_build2_loc (loc, MINUS_EXPR, type,
624 tem, TREE_OPERAND (t, 0));
625 }
626
627 /* -(A + B) -> (-A) - B. */
628 if (negate_expr_p (TREE_OPERAND (t, 0)))
629 {
630 tem = negate_expr (TREE_OPERAND (t, 0));
631 return fold_build2_loc (loc, MINUS_EXPR, type,
632 tem, TREE_OPERAND (t, 1));
633 }
634 }
635 break;
636
637 case MINUS_EXPR:
638 /* - (A - B) -> B - A */
639 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
640 && !HONOR_SIGNED_ZEROS (element_mode (type))
641 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
642 return fold_build2_loc (loc, MINUS_EXPR, type,
643 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
644 break;
645
646 case MULT_EXPR:
647 if (TYPE_UNSIGNED (type))
648 break;
649
650 /* Fall through. */
651
652 case RDIV_EXPR:
653 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
654 {
655 tem = TREE_OPERAND (t, 1);
656 if (negate_expr_p (tem))
657 return fold_build2_loc (loc, TREE_CODE (t), type,
658 TREE_OPERAND (t, 0), negate_expr (tem));
659 tem = TREE_OPERAND (t, 0);
660 if (negate_expr_p (tem))
661 return fold_build2_loc (loc, TREE_CODE (t), type,
662 negate_expr (tem), TREE_OPERAND (t, 1));
663 }
664 break;
665
666 case TRUNC_DIV_EXPR:
667 case ROUND_DIV_EXPR:
668 case EXACT_DIV_EXPR:
669 if (TYPE_UNSIGNED (type))
670 break;
671 if (negate_expr_p (TREE_OPERAND (t, 0)))
672 return fold_build2_loc (loc, TREE_CODE (t), type,
673 negate_expr (TREE_OPERAND (t, 0)),
674 TREE_OPERAND (t, 1));
675 /* In general we can't negate B in A / B, because if A is INT_MIN and
676 B is 1, we may turn this into INT_MIN / -1 which is undefined
677 and actually traps on some architectures. */
678 if ((! INTEGRAL_TYPE_P (TREE_TYPE (t))
679 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
680 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
681 && ! integer_onep (TREE_OPERAND (t, 1))))
682 && negate_expr_p (TREE_OPERAND (t, 1)))
683 return fold_build2_loc (loc, TREE_CODE (t), type,
684 TREE_OPERAND (t, 0),
685 negate_expr (TREE_OPERAND (t, 1)));
686 break;
687
688 case NOP_EXPR:
689 /* Convert -((double)float) into (double)(-float). */
690 if (TREE_CODE (type) == REAL_TYPE)
691 {
692 tem = strip_float_extensions (t);
693 if (tem != t && negate_expr_p (tem))
694 return fold_convert_loc (loc, type, negate_expr (tem));
695 }
696 break;
697
698 case CALL_EXPR:
699 /* Negate -f(x) as f(-x). */
700 if (negate_mathfn_p (get_call_combined_fn (t))
701 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
702 {
703 tree fndecl, arg;
704
705 fndecl = get_callee_fndecl (t);
706 arg = negate_expr (CALL_EXPR_ARG (t, 0));
707 return build_call_expr_loc (loc, fndecl, 1, arg);
708 }
709 break;
710
711 case RSHIFT_EXPR:
712 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
713 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
714 {
715 tree op1 = TREE_OPERAND (t, 1);
716 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
717 {
718 tree ntype = TYPE_UNSIGNED (type)
719 ? signed_type_for (type)
720 : unsigned_type_for (type);
721 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
722 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
723 return fold_convert_loc (loc, type, temp);
724 }
725 }
726 break;
727
728 default:
729 break;
730 }
731
732 return NULL_TREE;
733 }
734
735 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
736 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
737 return NULL_TREE. */
738
739 static tree
740 negate_expr (tree t)
741 {
742 tree type, tem;
743 location_t loc;
744
745 if (t == NULL_TREE)
746 return NULL_TREE;
747
748 loc = EXPR_LOCATION (t);
749 type = TREE_TYPE (t);
750 STRIP_SIGN_NOPS (t);
751
752 tem = fold_negate_expr (loc, t);
753 if (!tem)
754 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
755 return fold_convert_loc (loc, type, tem);
756 }
757 \f
758 /* Split a tree IN into a constant, literal and variable parts that could be
759 combined with CODE to make IN. "constant" means an expression with
760 TREE_CONSTANT but that isn't an actual constant. CODE must be a
761 commutative arithmetic operation. Store the constant part into *CONP,
762 the literal in *LITP and return the variable part. If a part isn't
763 present, set it to null. If the tree does not decompose in this way,
764 return the entire tree as the variable part and the other parts as null.
765
766 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
767 case, we negate an operand that was subtracted. Except if it is a
768 literal for which we use *MINUS_LITP instead.
769
770 If NEGATE_P is true, we are negating all of IN, again except a literal
771 for which we use *MINUS_LITP instead. If a variable part is of pointer
772 type, it is negated after converting to TYPE. This prevents us from
773 generating illegal MINUS pointer expression. LOC is the location of
774 the converted variable part.
775
776 If IN is itself a literal or constant, return it as appropriate.
777
778 Note that we do not guarantee that any of the three values will be the
779 same type as IN, but they will have the same signedness and mode. */
780
781 static tree
782 split_tree (location_t loc, tree in, tree type, enum tree_code code,
783 tree *conp, tree *litp, tree *minus_litp, int negate_p)
784 {
785 tree var = 0;
786
787 *conp = 0;
788 *litp = 0;
789 *minus_litp = 0;
790
791 /* Strip any conversions that don't change the machine mode or signedness. */
792 STRIP_SIGN_NOPS (in);
793
794 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
795 || TREE_CODE (in) == FIXED_CST)
796 *litp = in;
797 else if (TREE_CODE (in) == code
798 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
799 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
800 /* We can associate addition and subtraction together (even
801 though the C standard doesn't say so) for integers because
802 the value is not affected. For reals, the value might be
803 affected, so we can't. */
804 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
805 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
806 {
807 tree op0 = TREE_OPERAND (in, 0);
808 tree op1 = TREE_OPERAND (in, 1);
809 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
810 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
811
812 /* First see if either of the operands is a literal, then a constant. */
813 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
814 || TREE_CODE (op0) == FIXED_CST)
815 *litp = op0, op0 = 0;
816 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
817 || TREE_CODE (op1) == FIXED_CST)
818 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
819
820 if (op0 != 0 && TREE_CONSTANT (op0))
821 *conp = op0, op0 = 0;
822 else if (op1 != 0 && TREE_CONSTANT (op1))
823 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
824
825 /* If we haven't dealt with either operand, this is not a case we can
826 decompose. Otherwise, VAR is either of the ones remaining, if any. */
827 if (op0 != 0 && op1 != 0)
828 var = in;
829 else if (op0 != 0)
830 var = op0;
831 else
832 var = op1, neg_var_p = neg1_p;
833
834 /* Now do any needed negations. */
835 if (neg_litp_p)
836 *minus_litp = *litp, *litp = 0;
837 if (neg_conp_p)
838 *conp = negate_expr (*conp);
839 if (neg_var_p)
840 {
841 /* Convert to TYPE before negating a pointer type expr. */
842 if (var && POINTER_TYPE_P (TREE_TYPE (var)))
843 var = fold_convert_loc (loc, type, var);
844 var = negate_expr (var);
845 }
846 }
847 else if (TREE_CODE (in) == BIT_NOT_EXPR
848 && code == PLUS_EXPR)
849 {
850 /* -X - 1 is folded to ~X, undo that here. */
851 *minus_litp = build_one_cst (TREE_TYPE (in));
852 var = negate_expr (TREE_OPERAND (in, 0));
853 }
854 else if (TREE_CONSTANT (in))
855 *conp = in;
856 else
857 var = in;
858
859 if (negate_p)
860 {
861 if (*litp)
862 *minus_litp = *litp, *litp = 0;
863 else if (*minus_litp)
864 *litp = *minus_litp, *minus_litp = 0;
865 *conp = negate_expr (*conp);
866 /* Convert to TYPE before negating a pointer type expr. */
867 if (var && POINTER_TYPE_P (TREE_TYPE (var)))
868 var = fold_convert_loc (loc, type, var);
869 var = negate_expr (var);
870 }
871
872 return var;
873 }
874
875 /* Re-associate trees split by the above function. T1 and T2 are
876 either expressions to associate or null. Return the new
877 expression, if any. LOC is the location of the new expression. If
878 we build an operation, do it in TYPE and with CODE. */
879
880 static tree
881 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
882 {
883 if (t1 == 0)
884 return t2;
885 else if (t2 == 0)
886 return t1;
887
888 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
889 try to fold this since we will have infinite recursion. But do
890 deal with any NEGATE_EXPRs. */
891 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
892 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
893 {
894 if (code == PLUS_EXPR)
895 {
896 if (TREE_CODE (t1) == NEGATE_EXPR)
897 return build2_loc (loc, MINUS_EXPR, type,
898 fold_convert_loc (loc, type, t2),
899 fold_convert_loc (loc, type,
900 TREE_OPERAND (t1, 0)));
901 else if (TREE_CODE (t2) == NEGATE_EXPR)
902 return build2_loc (loc, MINUS_EXPR, type,
903 fold_convert_loc (loc, type, t1),
904 fold_convert_loc (loc, type,
905 TREE_OPERAND (t2, 0)));
906 else if (integer_zerop (t2))
907 return fold_convert_loc (loc, type, t1);
908 }
909 else if (code == MINUS_EXPR)
910 {
911 if (integer_zerop (t2))
912 return fold_convert_loc (loc, type, t1);
913 }
914
915 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
916 fold_convert_loc (loc, type, t2));
917 }
918
919 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
920 fold_convert_loc (loc, type, t2));
921 }
922 \f
923 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
924 for use in int_const_binop, size_binop and size_diffop. */
925
926 static bool
927 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
928 {
929 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
930 return false;
931 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
932 return false;
933
934 switch (code)
935 {
936 case LSHIFT_EXPR:
937 case RSHIFT_EXPR:
938 case LROTATE_EXPR:
939 case RROTATE_EXPR:
940 return true;
941
942 default:
943 break;
944 }
945
946 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
947 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
948 && TYPE_MODE (type1) == TYPE_MODE (type2);
949 }
950
951
952 /* Combine two integer constants ARG1 and ARG2 under operation CODE
953 to produce a new constant. Return NULL_TREE if we don't know how
954 to evaluate CODE at compile-time. */
955
956 static tree
957 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree parg2,
958 int overflowable)
959 {
960 wide_int res;
961 tree t;
962 tree type = TREE_TYPE (arg1);
963 signop sign = TYPE_SIGN (type);
964 bool overflow = false;
965
966 wide_int arg2 = wide_int::from (parg2, TYPE_PRECISION (type),
967 TYPE_SIGN (TREE_TYPE (parg2)));
968
969 switch (code)
970 {
971 case BIT_IOR_EXPR:
972 res = wi::bit_or (arg1, arg2);
973 break;
974
975 case BIT_XOR_EXPR:
976 res = wi::bit_xor (arg1, arg2);
977 break;
978
979 case BIT_AND_EXPR:
980 res = wi::bit_and (arg1, arg2);
981 break;
982
983 case RSHIFT_EXPR:
984 case LSHIFT_EXPR:
985 if (wi::neg_p (arg2))
986 {
987 arg2 = -arg2;
988 if (code == RSHIFT_EXPR)
989 code = LSHIFT_EXPR;
990 else
991 code = RSHIFT_EXPR;
992 }
993
994 if (code == RSHIFT_EXPR)
995 /* It's unclear from the C standard whether shifts can overflow.
996 The following code ignores overflow; perhaps a C standard
997 interpretation ruling is needed. */
998 res = wi::rshift (arg1, arg2, sign);
999 else
1000 res = wi::lshift (arg1, arg2);
1001 break;
1002
1003 case RROTATE_EXPR:
1004 case LROTATE_EXPR:
1005 if (wi::neg_p (arg2))
1006 {
1007 arg2 = -arg2;
1008 if (code == RROTATE_EXPR)
1009 code = LROTATE_EXPR;
1010 else
1011 code = RROTATE_EXPR;
1012 }
1013
1014 if (code == RROTATE_EXPR)
1015 res = wi::rrotate (arg1, arg2);
1016 else
1017 res = wi::lrotate (arg1, arg2);
1018 break;
1019
1020 case PLUS_EXPR:
1021 res = wi::add (arg1, arg2, sign, &overflow);
1022 break;
1023
1024 case MINUS_EXPR:
1025 res = wi::sub (arg1, arg2, sign, &overflow);
1026 break;
1027
1028 case MULT_EXPR:
1029 res = wi::mul (arg1, arg2, sign, &overflow);
1030 break;
1031
1032 case MULT_HIGHPART_EXPR:
1033 res = wi::mul_high (arg1, arg2, sign);
1034 break;
1035
1036 case TRUNC_DIV_EXPR:
1037 case EXACT_DIV_EXPR:
1038 if (arg2 == 0)
1039 return NULL_TREE;
1040 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1041 break;
1042
1043 case FLOOR_DIV_EXPR:
1044 if (arg2 == 0)
1045 return NULL_TREE;
1046 res = wi::div_floor (arg1, arg2, sign, &overflow);
1047 break;
1048
1049 case CEIL_DIV_EXPR:
1050 if (arg2 == 0)
1051 return NULL_TREE;
1052 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1053 break;
1054
1055 case ROUND_DIV_EXPR:
1056 if (arg2 == 0)
1057 return NULL_TREE;
1058 res = wi::div_round (arg1, arg2, sign, &overflow);
1059 break;
1060
1061 case TRUNC_MOD_EXPR:
1062 if (arg2 == 0)
1063 return NULL_TREE;
1064 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1065 break;
1066
1067 case FLOOR_MOD_EXPR:
1068 if (arg2 == 0)
1069 return NULL_TREE;
1070 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1071 break;
1072
1073 case CEIL_MOD_EXPR:
1074 if (arg2 == 0)
1075 return NULL_TREE;
1076 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1077 break;
1078
1079 case ROUND_MOD_EXPR:
1080 if (arg2 == 0)
1081 return NULL_TREE;
1082 res = wi::mod_round (arg1, arg2, sign, &overflow);
1083 break;
1084
1085 case MIN_EXPR:
1086 res = wi::min (arg1, arg2, sign);
1087 break;
1088
1089 case MAX_EXPR:
1090 res = wi::max (arg1, arg2, sign);
1091 break;
1092
1093 default:
1094 return NULL_TREE;
1095 }
1096
1097 t = force_fit_type (type, res, overflowable,
1098 (((sign == SIGNED || overflowable == -1)
1099 && overflow)
1100 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (parg2)));
1101
1102 return t;
1103 }
1104
1105 tree
1106 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1107 {
1108 return int_const_binop_1 (code, arg1, arg2, 1);
1109 }
1110
1111 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1112 constant. We assume ARG1 and ARG2 have the same data type, or at least
1113 are the same kind of constant and the same machine mode. Return zero if
1114 combining the constants is not allowed in the current operating mode. */
1115
1116 static tree
1117 const_binop (enum tree_code code, tree arg1, tree arg2)
1118 {
1119 /* Sanity check for the recursive cases. */
1120 if (!arg1 || !arg2)
1121 return NULL_TREE;
1122
1123 STRIP_NOPS (arg1);
1124 STRIP_NOPS (arg2);
1125
1126 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1127 {
1128 if (code == POINTER_PLUS_EXPR)
1129 return int_const_binop (PLUS_EXPR,
1130 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1131
1132 return int_const_binop (code, arg1, arg2);
1133 }
1134
1135 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1136 {
1137 machine_mode mode;
1138 REAL_VALUE_TYPE d1;
1139 REAL_VALUE_TYPE d2;
1140 REAL_VALUE_TYPE value;
1141 REAL_VALUE_TYPE result;
1142 bool inexact;
1143 tree t, type;
1144
1145 /* The following codes are handled by real_arithmetic. */
1146 switch (code)
1147 {
1148 case PLUS_EXPR:
1149 case MINUS_EXPR:
1150 case MULT_EXPR:
1151 case RDIV_EXPR:
1152 case MIN_EXPR:
1153 case MAX_EXPR:
1154 break;
1155
1156 default:
1157 return NULL_TREE;
1158 }
1159
1160 d1 = TREE_REAL_CST (arg1);
1161 d2 = TREE_REAL_CST (arg2);
1162
1163 type = TREE_TYPE (arg1);
1164 mode = TYPE_MODE (type);
1165
1166 /* Don't perform operation if we honor signaling NaNs and
1167 either operand is a signaling NaN. */
1168 if (HONOR_SNANS (mode)
1169 && (REAL_VALUE_ISSIGNALING_NAN (d1)
1170 || REAL_VALUE_ISSIGNALING_NAN (d2)))
1171 return NULL_TREE;
1172
1173 /* Don't perform operation if it would raise a division
1174 by zero exception. */
1175 if (code == RDIV_EXPR
1176 && real_equal (&d2, &dconst0)
1177 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1178 return NULL_TREE;
1179
1180 /* If either operand is a NaN, just return it. Otherwise, set up
1181 for floating-point trap; we return an overflow. */
1182 if (REAL_VALUE_ISNAN (d1))
1183 {
1184 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1185 is off. */
1186 d1.signalling = 0;
1187 t = build_real (type, d1);
1188 return t;
1189 }
1190 else if (REAL_VALUE_ISNAN (d2))
1191 {
1192 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1193 is off. */
1194 d2.signalling = 0;
1195 t = build_real (type, d2);
1196 return t;
1197 }
1198
1199 inexact = real_arithmetic (&value, code, &d1, &d2);
1200 real_convert (&result, mode, &value);
1201
1202 /* Don't constant fold this floating point operation if
1203 the result has overflowed and flag_trapping_math. */
1204 if (flag_trapping_math
1205 && MODE_HAS_INFINITIES (mode)
1206 && REAL_VALUE_ISINF (result)
1207 && !REAL_VALUE_ISINF (d1)
1208 && !REAL_VALUE_ISINF (d2))
1209 return NULL_TREE;
1210
1211 /* Don't constant fold this floating point operation if the
1212 result may dependent upon the run-time rounding mode and
1213 flag_rounding_math is set, or if GCC's software emulation
1214 is unable to accurately represent the result. */
1215 if ((flag_rounding_math
1216 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1217 && (inexact || !real_identical (&result, &value)))
1218 return NULL_TREE;
1219
1220 t = build_real (type, result);
1221
1222 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1223 return t;
1224 }
1225
1226 if (TREE_CODE (arg1) == FIXED_CST)
1227 {
1228 FIXED_VALUE_TYPE f1;
1229 FIXED_VALUE_TYPE f2;
1230 FIXED_VALUE_TYPE result;
1231 tree t, type;
1232 int sat_p;
1233 bool overflow_p;
1234
1235 /* The following codes are handled by fixed_arithmetic. */
1236 switch (code)
1237 {
1238 case PLUS_EXPR:
1239 case MINUS_EXPR:
1240 case MULT_EXPR:
1241 case TRUNC_DIV_EXPR:
1242 if (TREE_CODE (arg2) != FIXED_CST)
1243 return NULL_TREE;
1244 f2 = TREE_FIXED_CST (arg2);
1245 break;
1246
1247 case LSHIFT_EXPR:
1248 case RSHIFT_EXPR:
1249 {
1250 if (TREE_CODE (arg2) != INTEGER_CST)
1251 return NULL_TREE;
1252 wide_int w2 = arg2;
1253 f2.data.high = w2.elt (1);
1254 f2.data.low = w2.elt (0);
1255 f2.mode = SImode;
1256 }
1257 break;
1258
1259 default:
1260 return NULL_TREE;
1261 }
1262
1263 f1 = TREE_FIXED_CST (arg1);
1264 type = TREE_TYPE (arg1);
1265 sat_p = TYPE_SATURATING (type);
1266 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1267 t = build_fixed (type, result);
1268 /* Propagate overflow flags. */
1269 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1270 TREE_OVERFLOW (t) = 1;
1271 return t;
1272 }
1273
1274 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1275 {
1276 tree type = TREE_TYPE (arg1);
1277 tree r1 = TREE_REALPART (arg1);
1278 tree i1 = TREE_IMAGPART (arg1);
1279 tree r2 = TREE_REALPART (arg2);
1280 tree i2 = TREE_IMAGPART (arg2);
1281 tree real, imag;
1282
1283 switch (code)
1284 {
1285 case PLUS_EXPR:
1286 case MINUS_EXPR:
1287 real = const_binop (code, r1, r2);
1288 imag = const_binop (code, i1, i2);
1289 break;
1290
1291 case MULT_EXPR:
1292 if (COMPLEX_FLOAT_TYPE_P (type))
1293 return do_mpc_arg2 (arg1, arg2, type,
1294 /* do_nonfinite= */ folding_initializer,
1295 mpc_mul);
1296
1297 real = const_binop (MINUS_EXPR,
1298 const_binop (MULT_EXPR, r1, r2),
1299 const_binop (MULT_EXPR, i1, i2));
1300 imag = const_binop (PLUS_EXPR,
1301 const_binop (MULT_EXPR, r1, i2),
1302 const_binop (MULT_EXPR, i1, r2));
1303 break;
1304
1305 case RDIV_EXPR:
1306 if (COMPLEX_FLOAT_TYPE_P (type))
1307 return do_mpc_arg2 (arg1, arg2, type,
1308 /* do_nonfinite= */ folding_initializer,
1309 mpc_div);
1310 /* Fallthru ... */
1311 case TRUNC_DIV_EXPR:
1312 case CEIL_DIV_EXPR:
1313 case FLOOR_DIV_EXPR:
1314 case ROUND_DIV_EXPR:
1315 if (flag_complex_method == 0)
1316 {
1317 /* Keep this algorithm in sync with
1318 tree-complex.c:expand_complex_div_straight().
1319
1320 Expand complex division to scalars, straightforward algorithm.
1321 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1322 t = br*br + bi*bi
1323 */
1324 tree magsquared
1325 = const_binop (PLUS_EXPR,
1326 const_binop (MULT_EXPR, r2, r2),
1327 const_binop (MULT_EXPR, i2, i2));
1328 tree t1
1329 = const_binop (PLUS_EXPR,
1330 const_binop (MULT_EXPR, r1, r2),
1331 const_binop (MULT_EXPR, i1, i2));
1332 tree t2
1333 = const_binop (MINUS_EXPR,
1334 const_binop (MULT_EXPR, i1, r2),
1335 const_binop (MULT_EXPR, r1, i2));
1336
1337 real = const_binop (code, t1, magsquared);
1338 imag = const_binop (code, t2, magsquared);
1339 }
1340 else
1341 {
1342 /* Keep this algorithm in sync with
1343 tree-complex.c:expand_complex_div_wide().
1344
1345 Expand complex division to scalars, modified algorithm to minimize
1346 overflow with wide input ranges. */
1347 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1348 fold_abs_const (r2, TREE_TYPE (type)),
1349 fold_abs_const (i2, TREE_TYPE (type)));
1350
1351 if (integer_nonzerop (compare))
1352 {
1353 /* In the TRUE branch, we compute
1354 ratio = br/bi;
1355 div = (br * ratio) + bi;
1356 tr = (ar * ratio) + ai;
1357 ti = (ai * ratio) - ar;
1358 tr = tr / div;
1359 ti = ti / div; */
1360 tree ratio = const_binop (code, r2, i2);
1361 tree div = const_binop (PLUS_EXPR, i2,
1362 const_binop (MULT_EXPR, r2, ratio));
1363 real = const_binop (MULT_EXPR, r1, ratio);
1364 real = const_binop (PLUS_EXPR, real, i1);
1365 real = const_binop (code, real, div);
1366
1367 imag = const_binop (MULT_EXPR, i1, ratio);
1368 imag = const_binop (MINUS_EXPR, imag, r1);
1369 imag = const_binop (code, imag, div);
1370 }
1371 else
1372 {
1373 /* In the FALSE branch, we compute
1374 ratio = d/c;
1375 divisor = (d * ratio) + c;
1376 tr = (b * ratio) + a;
1377 ti = b - (a * ratio);
1378 tr = tr / div;
1379 ti = ti / div; */
1380 tree ratio = const_binop (code, i2, r2);
1381 tree div = const_binop (PLUS_EXPR, r2,
1382 const_binop (MULT_EXPR, i2, ratio));
1383
1384 real = const_binop (MULT_EXPR, i1, ratio);
1385 real = const_binop (PLUS_EXPR, real, r1);
1386 real = const_binop (code, real, div);
1387
1388 imag = const_binop (MULT_EXPR, r1, ratio);
1389 imag = const_binop (MINUS_EXPR, i1, imag);
1390 imag = const_binop (code, imag, div);
1391 }
1392 }
1393 break;
1394
1395 default:
1396 return NULL_TREE;
1397 }
1398
1399 if (real && imag)
1400 return build_complex (type, real, imag);
1401 }
1402
1403 if (TREE_CODE (arg1) == VECTOR_CST
1404 && TREE_CODE (arg2) == VECTOR_CST)
1405 {
1406 tree type = TREE_TYPE (arg1);
1407 int count = TYPE_VECTOR_SUBPARTS (type), i;
1408 tree *elts = XALLOCAVEC (tree, count);
1409
1410 for (i = 0; i < count; i++)
1411 {
1412 tree elem1 = VECTOR_CST_ELT (arg1, i);
1413 tree elem2 = VECTOR_CST_ELT (arg2, i);
1414
1415 elts[i] = const_binop (code, elem1, elem2);
1416
1417 /* It is possible that const_binop cannot handle the given
1418 code and return NULL_TREE */
1419 if (elts[i] == NULL_TREE)
1420 return NULL_TREE;
1421 }
1422
1423 return build_vector (type, elts);
1424 }
1425
1426 /* Shifts allow a scalar offset for a vector. */
1427 if (TREE_CODE (arg1) == VECTOR_CST
1428 && TREE_CODE (arg2) == INTEGER_CST)
1429 {
1430 tree type = TREE_TYPE (arg1);
1431 int count = TYPE_VECTOR_SUBPARTS (type), i;
1432 tree *elts = XALLOCAVEC (tree, count);
1433
1434 for (i = 0; i < count; i++)
1435 {
1436 tree elem1 = VECTOR_CST_ELT (arg1, i);
1437
1438 elts[i] = const_binop (code, elem1, arg2);
1439
1440 /* It is possible that const_binop cannot handle the given
1441 code and return NULL_TREE. */
1442 if (elts[i] == NULL_TREE)
1443 return NULL_TREE;
1444 }
1445
1446 return build_vector (type, elts);
1447 }
1448 return NULL_TREE;
1449 }
1450
1451 /* Overload that adds a TYPE parameter to be able to dispatch
1452 to fold_relational_const. */
1453
1454 tree
1455 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1456 {
1457 if (TREE_CODE_CLASS (code) == tcc_comparison)
1458 return fold_relational_const (code, type, arg1, arg2);
1459
1460 /* ??? Until we make the const_binop worker take the type of the
1461 result as argument put those cases that need it here. */
1462 switch (code)
1463 {
1464 case COMPLEX_EXPR:
1465 if ((TREE_CODE (arg1) == REAL_CST
1466 && TREE_CODE (arg2) == REAL_CST)
1467 || (TREE_CODE (arg1) == INTEGER_CST
1468 && TREE_CODE (arg2) == INTEGER_CST))
1469 return build_complex (type, arg1, arg2);
1470 return NULL_TREE;
1471
1472 case VEC_PACK_TRUNC_EXPR:
1473 case VEC_PACK_FIX_TRUNC_EXPR:
1474 {
1475 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1476 tree *elts;
1477
1478 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2
1479 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts / 2);
1480 if (TREE_CODE (arg1) != VECTOR_CST
1481 || TREE_CODE (arg2) != VECTOR_CST)
1482 return NULL_TREE;
1483
1484 elts = XALLOCAVEC (tree, nelts);
1485 if (!vec_cst_ctor_to_array (arg1, elts)
1486 || !vec_cst_ctor_to_array (arg2, elts + nelts / 2))
1487 return NULL_TREE;
1488
1489 for (i = 0; i < nelts; i++)
1490 {
1491 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1492 ? NOP_EXPR : FIX_TRUNC_EXPR,
1493 TREE_TYPE (type), elts[i]);
1494 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1495 return NULL_TREE;
1496 }
1497
1498 return build_vector (type, elts);
1499 }
1500
1501 case VEC_WIDEN_MULT_LO_EXPR:
1502 case VEC_WIDEN_MULT_HI_EXPR:
1503 case VEC_WIDEN_MULT_EVEN_EXPR:
1504 case VEC_WIDEN_MULT_ODD_EXPR:
1505 {
1506 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
1507 unsigned int out, ofs, scale;
1508 tree *elts;
1509
1510 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2
1511 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts * 2);
1512 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1513 return NULL_TREE;
1514
1515 elts = XALLOCAVEC (tree, nelts * 4);
1516 if (!vec_cst_ctor_to_array (arg1, elts)
1517 || !vec_cst_ctor_to_array (arg2, elts + nelts * 2))
1518 return NULL_TREE;
1519
1520 if (code == VEC_WIDEN_MULT_LO_EXPR)
1521 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
1522 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1523 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
1524 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1525 scale = 1, ofs = 0;
1526 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1527 scale = 1, ofs = 1;
1528
1529 for (out = 0; out < nelts; out++)
1530 {
1531 unsigned int in1 = (out << scale) + ofs;
1532 unsigned int in2 = in1 + nelts * 2;
1533 tree t1, t2;
1534
1535 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
1536 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
1537
1538 if (t1 == NULL_TREE || t2 == NULL_TREE)
1539 return NULL_TREE;
1540 elts[out] = const_binop (MULT_EXPR, t1, t2);
1541 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
1542 return NULL_TREE;
1543 }
1544
1545 return build_vector (type, elts);
1546 }
1547
1548 default:;
1549 }
1550
1551 if (TREE_CODE_CLASS (code) != tcc_binary)
1552 return NULL_TREE;
1553
1554 /* Make sure type and arg0 have the same saturating flag. */
1555 gcc_checking_assert (TYPE_SATURATING (type)
1556 == TYPE_SATURATING (TREE_TYPE (arg1)));
1557
1558 return const_binop (code, arg1, arg2);
1559 }
1560
1561 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1562 Return zero if computing the constants is not possible. */
1563
1564 tree
1565 const_unop (enum tree_code code, tree type, tree arg0)
1566 {
1567 /* Don't perform the operation, other than NEGATE and ABS, if
1568 flag_signaling_nans is on and the operand is a signaling NaN. */
1569 if (TREE_CODE (arg0) == REAL_CST
1570 && HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
1571 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0))
1572 && code != NEGATE_EXPR
1573 && code != ABS_EXPR)
1574 return NULL_TREE;
1575
1576 switch (code)
1577 {
1578 CASE_CONVERT:
1579 case FLOAT_EXPR:
1580 case FIX_TRUNC_EXPR:
1581 case FIXED_CONVERT_EXPR:
1582 return fold_convert_const (code, type, arg0);
1583
1584 case ADDR_SPACE_CONVERT_EXPR:
1585 /* If the source address is 0, and the source address space
1586 cannot have a valid object at 0, fold to dest type null. */
1587 if (integer_zerop (arg0)
1588 && !(targetm.addr_space.zero_address_valid
1589 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))))))
1590 return fold_convert_const (code, type, arg0);
1591 break;
1592
1593 case VIEW_CONVERT_EXPR:
1594 return fold_view_convert_expr (type, arg0);
1595
1596 case NEGATE_EXPR:
1597 {
1598 /* Can't call fold_negate_const directly here as that doesn't
1599 handle all cases and we might not be able to negate some
1600 constants. */
1601 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1602 if (tem && CONSTANT_CLASS_P (tem))
1603 return tem;
1604 break;
1605 }
1606
1607 case ABS_EXPR:
1608 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1609 return fold_abs_const (arg0, type);
1610 break;
1611
1612 case CONJ_EXPR:
1613 if (TREE_CODE (arg0) == COMPLEX_CST)
1614 {
1615 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1616 TREE_TYPE (type));
1617 return build_complex (type, TREE_REALPART (arg0), ipart);
1618 }
1619 break;
1620
1621 case BIT_NOT_EXPR:
1622 if (TREE_CODE (arg0) == INTEGER_CST)
1623 return fold_not_const (arg0, type);
1624 /* Perform BIT_NOT_EXPR on each element individually. */
1625 else if (TREE_CODE (arg0) == VECTOR_CST)
1626 {
1627 tree *elements;
1628 tree elem;
1629 unsigned count = VECTOR_CST_NELTS (arg0), i;
1630
1631 elements = XALLOCAVEC (tree, count);
1632 for (i = 0; i < count; i++)
1633 {
1634 elem = VECTOR_CST_ELT (arg0, i);
1635 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1636 if (elem == NULL_TREE)
1637 break;
1638 elements[i] = elem;
1639 }
1640 if (i == count)
1641 return build_vector (type, elements);
1642 }
1643 break;
1644
1645 case TRUTH_NOT_EXPR:
1646 if (TREE_CODE (arg0) == INTEGER_CST)
1647 return constant_boolean_node (integer_zerop (arg0), type);
1648 break;
1649
1650 case REALPART_EXPR:
1651 if (TREE_CODE (arg0) == COMPLEX_CST)
1652 return fold_convert (type, TREE_REALPART (arg0));
1653 break;
1654
1655 case IMAGPART_EXPR:
1656 if (TREE_CODE (arg0) == COMPLEX_CST)
1657 return fold_convert (type, TREE_IMAGPART (arg0));
1658 break;
1659
1660 case VEC_UNPACK_LO_EXPR:
1661 case VEC_UNPACK_HI_EXPR:
1662 case VEC_UNPACK_FLOAT_LO_EXPR:
1663 case VEC_UNPACK_FLOAT_HI_EXPR:
1664 {
1665 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1666 tree *elts;
1667 enum tree_code subcode;
1668
1669 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
1670 if (TREE_CODE (arg0) != VECTOR_CST)
1671 return NULL_TREE;
1672
1673 elts = XALLOCAVEC (tree, nelts * 2);
1674 if (!vec_cst_ctor_to_array (arg0, elts))
1675 return NULL_TREE;
1676
1677 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1678 || code == VEC_UNPACK_FLOAT_LO_EXPR))
1679 elts += nelts;
1680
1681 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1682 subcode = NOP_EXPR;
1683 else
1684 subcode = FLOAT_EXPR;
1685
1686 for (i = 0; i < nelts; i++)
1687 {
1688 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
1689 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1690 return NULL_TREE;
1691 }
1692
1693 return build_vector (type, elts);
1694 }
1695
1696 case REDUC_MIN_EXPR:
1697 case REDUC_MAX_EXPR:
1698 case REDUC_PLUS_EXPR:
1699 {
1700 unsigned int nelts, i;
1701 tree *elts;
1702 enum tree_code subcode;
1703
1704 if (TREE_CODE (arg0) != VECTOR_CST)
1705 return NULL_TREE;
1706 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
1707
1708 elts = XALLOCAVEC (tree, nelts);
1709 if (!vec_cst_ctor_to_array (arg0, elts))
1710 return NULL_TREE;
1711
1712 switch (code)
1713 {
1714 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
1715 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
1716 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
1717 default: gcc_unreachable ();
1718 }
1719
1720 for (i = 1; i < nelts; i++)
1721 {
1722 elts[0] = const_binop (subcode, elts[0], elts[i]);
1723 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
1724 return NULL_TREE;
1725 }
1726
1727 return elts[0];
1728 }
1729
1730 default:
1731 break;
1732 }
1733
1734 return NULL_TREE;
1735 }
1736
1737 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1738 indicates which particular sizetype to create. */
1739
1740 tree
1741 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1742 {
1743 return build_int_cst (sizetype_tab[(int) kind], number);
1744 }
1745 \f
1746 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1747 is a tree code. The type of the result is taken from the operands.
1748 Both must be equivalent integer types, ala int_binop_types_match_p.
1749 If the operands are constant, so is the result. */
1750
1751 tree
1752 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1753 {
1754 tree type = TREE_TYPE (arg0);
1755
1756 if (arg0 == error_mark_node || arg1 == error_mark_node)
1757 return error_mark_node;
1758
1759 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1760 TREE_TYPE (arg1)));
1761
1762 /* Handle the special case of two integer constants faster. */
1763 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1764 {
1765 /* And some specific cases even faster than that. */
1766 if (code == PLUS_EXPR)
1767 {
1768 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1769 return arg1;
1770 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1771 return arg0;
1772 }
1773 else if (code == MINUS_EXPR)
1774 {
1775 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1776 return arg0;
1777 }
1778 else if (code == MULT_EXPR)
1779 {
1780 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1781 return arg1;
1782 }
1783
1784 /* Handle general case of two integer constants. For sizetype
1785 constant calculations we always want to know about overflow,
1786 even in the unsigned case. */
1787 return int_const_binop_1 (code, arg0, arg1, -1);
1788 }
1789
1790 return fold_build2_loc (loc, code, type, arg0, arg1);
1791 }
1792
1793 /* Given two values, either both of sizetype or both of bitsizetype,
1794 compute the difference between the two values. Return the value
1795 in signed type corresponding to the type of the operands. */
1796
1797 tree
1798 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1799 {
1800 tree type = TREE_TYPE (arg0);
1801 tree ctype;
1802
1803 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1804 TREE_TYPE (arg1)));
1805
1806 /* If the type is already signed, just do the simple thing. */
1807 if (!TYPE_UNSIGNED (type))
1808 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1809
1810 if (type == sizetype)
1811 ctype = ssizetype;
1812 else if (type == bitsizetype)
1813 ctype = sbitsizetype;
1814 else
1815 ctype = signed_type_for (type);
1816
1817 /* If either operand is not a constant, do the conversions to the signed
1818 type and subtract. The hardware will do the right thing with any
1819 overflow in the subtraction. */
1820 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1821 return size_binop_loc (loc, MINUS_EXPR,
1822 fold_convert_loc (loc, ctype, arg0),
1823 fold_convert_loc (loc, ctype, arg1));
1824
1825 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1826 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1827 overflow) and negate (which can't either). Special-case a result
1828 of zero while we're here. */
1829 if (tree_int_cst_equal (arg0, arg1))
1830 return build_int_cst (ctype, 0);
1831 else if (tree_int_cst_lt (arg1, arg0))
1832 return fold_convert_loc (loc, ctype,
1833 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1834 else
1835 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1836 fold_convert_loc (loc, ctype,
1837 size_binop_loc (loc,
1838 MINUS_EXPR,
1839 arg1, arg0)));
1840 }
1841 \f
1842 /* A subroutine of fold_convert_const handling conversions of an
1843 INTEGER_CST to another integer type. */
1844
1845 static tree
1846 fold_convert_const_int_from_int (tree type, const_tree arg1)
1847 {
1848 /* Given an integer constant, make new constant with new type,
1849 appropriately sign-extended or truncated. Use widest_int
1850 so that any extension is done according ARG1's type. */
1851 return force_fit_type (type, wi::to_widest (arg1),
1852 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1853 TREE_OVERFLOW (arg1));
1854 }
1855
1856 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1857 to an integer type. */
1858
1859 static tree
1860 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1861 {
1862 bool overflow = false;
1863 tree t;
1864
1865 /* The following code implements the floating point to integer
1866 conversion rules required by the Java Language Specification,
1867 that IEEE NaNs are mapped to zero and values that overflow
1868 the target precision saturate, i.e. values greater than
1869 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1870 are mapped to INT_MIN. These semantics are allowed by the
1871 C and C++ standards that simply state that the behavior of
1872 FP-to-integer conversion is unspecified upon overflow. */
1873
1874 wide_int val;
1875 REAL_VALUE_TYPE r;
1876 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1877
1878 switch (code)
1879 {
1880 case FIX_TRUNC_EXPR:
1881 real_trunc (&r, VOIDmode, &x);
1882 break;
1883
1884 default:
1885 gcc_unreachable ();
1886 }
1887
1888 /* If R is NaN, return zero and show we have an overflow. */
1889 if (REAL_VALUE_ISNAN (r))
1890 {
1891 overflow = true;
1892 val = wi::zero (TYPE_PRECISION (type));
1893 }
1894
1895 /* See if R is less than the lower bound or greater than the
1896 upper bound. */
1897
1898 if (! overflow)
1899 {
1900 tree lt = TYPE_MIN_VALUE (type);
1901 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1902 if (real_less (&r, &l))
1903 {
1904 overflow = true;
1905 val = lt;
1906 }
1907 }
1908
1909 if (! overflow)
1910 {
1911 tree ut = TYPE_MAX_VALUE (type);
1912 if (ut)
1913 {
1914 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1915 if (real_less (&u, &r))
1916 {
1917 overflow = true;
1918 val = ut;
1919 }
1920 }
1921 }
1922
1923 if (! overflow)
1924 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1925
1926 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1927 return t;
1928 }
1929
1930 /* A subroutine of fold_convert_const handling conversions of a
1931 FIXED_CST to an integer type. */
1932
1933 static tree
1934 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1935 {
1936 tree t;
1937 double_int temp, temp_trunc;
1938 unsigned int mode;
1939
1940 /* Right shift FIXED_CST to temp by fbit. */
1941 temp = TREE_FIXED_CST (arg1).data;
1942 mode = TREE_FIXED_CST (arg1).mode;
1943 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1944 {
1945 temp = temp.rshift (GET_MODE_FBIT (mode),
1946 HOST_BITS_PER_DOUBLE_INT,
1947 SIGNED_FIXED_POINT_MODE_P (mode));
1948
1949 /* Left shift temp to temp_trunc by fbit. */
1950 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1951 HOST_BITS_PER_DOUBLE_INT,
1952 SIGNED_FIXED_POINT_MODE_P (mode));
1953 }
1954 else
1955 {
1956 temp = double_int_zero;
1957 temp_trunc = double_int_zero;
1958 }
1959
1960 /* If FIXED_CST is negative, we need to round the value toward 0.
1961 By checking if the fractional bits are not zero to add 1 to temp. */
1962 if (SIGNED_FIXED_POINT_MODE_P (mode)
1963 && temp_trunc.is_negative ()
1964 && TREE_FIXED_CST (arg1).data != temp_trunc)
1965 temp += double_int_one;
1966
1967 /* Given a fixed-point constant, make new constant with new type,
1968 appropriately sign-extended or truncated. */
1969 t = force_fit_type (type, temp, -1,
1970 (temp.is_negative ()
1971 && (TYPE_UNSIGNED (type)
1972 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1973 | TREE_OVERFLOW (arg1));
1974
1975 return t;
1976 }
1977
1978 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1979 to another floating point type. */
1980
1981 static tree
1982 fold_convert_const_real_from_real (tree type, const_tree arg1)
1983 {
1984 REAL_VALUE_TYPE value;
1985 tree t;
1986
1987 /* Don't perform the operation if flag_signaling_nans is on
1988 and the operand is a signaling NaN. */
1989 if (HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
1990 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1)))
1991 return NULL_TREE;
1992
1993 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1994 t = build_real (type, value);
1995
1996 /* If converting an infinity or NAN to a representation that doesn't
1997 have one, set the overflow bit so that we can produce some kind of
1998 error message at the appropriate point if necessary. It's not the
1999 most user-friendly message, but it's better than nothing. */
2000 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2001 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2002 TREE_OVERFLOW (t) = 1;
2003 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2004 && !MODE_HAS_NANS (TYPE_MODE (type)))
2005 TREE_OVERFLOW (t) = 1;
2006 /* Regular overflow, conversion produced an infinity in a mode that
2007 can't represent them. */
2008 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2009 && REAL_VALUE_ISINF (value)
2010 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2011 TREE_OVERFLOW (t) = 1;
2012 else
2013 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2014 return t;
2015 }
2016
2017 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2018 to a floating point type. */
2019
2020 static tree
2021 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2022 {
2023 REAL_VALUE_TYPE value;
2024 tree t;
2025
2026 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2027 t = build_real (type, value);
2028
2029 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2030 return t;
2031 }
2032
2033 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2034 to another fixed-point type. */
2035
2036 static tree
2037 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2038 {
2039 FIXED_VALUE_TYPE value;
2040 tree t;
2041 bool overflow_p;
2042
2043 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2044 TYPE_SATURATING (type));
2045 t = build_fixed (type, value);
2046
2047 /* Propagate overflow flags. */
2048 if (overflow_p | TREE_OVERFLOW (arg1))
2049 TREE_OVERFLOW (t) = 1;
2050 return t;
2051 }
2052
2053 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2054 to a fixed-point type. */
2055
2056 static tree
2057 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2058 {
2059 FIXED_VALUE_TYPE value;
2060 tree t;
2061 bool overflow_p;
2062 double_int di;
2063
2064 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2065
2066 di.low = TREE_INT_CST_ELT (arg1, 0);
2067 if (TREE_INT_CST_NUNITS (arg1) == 1)
2068 di.high = (HOST_WIDE_INT) di.low < 0 ? (HOST_WIDE_INT) -1 : 0;
2069 else
2070 di.high = TREE_INT_CST_ELT (arg1, 1);
2071
2072 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di,
2073 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2074 TYPE_SATURATING (type));
2075 t = build_fixed (type, value);
2076
2077 /* Propagate overflow flags. */
2078 if (overflow_p | TREE_OVERFLOW (arg1))
2079 TREE_OVERFLOW (t) = 1;
2080 return t;
2081 }
2082
2083 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2084 to a fixed-point type. */
2085
2086 static tree
2087 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2088 {
2089 FIXED_VALUE_TYPE value;
2090 tree t;
2091 bool overflow_p;
2092
2093 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2094 &TREE_REAL_CST (arg1),
2095 TYPE_SATURATING (type));
2096 t = build_fixed (type, value);
2097
2098 /* Propagate overflow flags. */
2099 if (overflow_p | TREE_OVERFLOW (arg1))
2100 TREE_OVERFLOW (t) = 1;
2101 return t;
2102 }
2103
2104 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2105 type TYPE. If no simplification can be done return NULL_TREE. */
2106
2107 static tree
2108 fold_convert_const (enum tree_code code, tree type, tree arg1)
2109 {
2110 if (TREE_TYPE (arg1) == type)
2111 return arg1;
2112
2113 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2114 || TREE_CODE (type) == OFFSET_TYPE)
2115 {
2116 if (TREE_CODE (arg1) == INTEGER_CST)
2117 return fold_convert_const_int_from_int (type, arg1);
2118 else if (TREE_CODE (arg1) == REAL_CST)
2119 return fold_convert_const_int_from_real (code, type, arg1);
2120 else if (TREE_CODE (arg1) == FIXED_CST)
2121 return fold_convert_const_int_from_fixed (type, arg1);
2122 }
2123 else if (TREE_CODE (type) == REAL_TYPE)
2124 {
2125 if (TREE_CODE (arg1) == INTEGER_CST)
2126 return build_real_from_int_cst (type, arg1);
2127 else if (TREE_CODE (arg1) == REAL_CST)
2128 return fold_convert_const_real_from_real (type, arg1);
2129 else if (TREE_CODE (arg1) == FIXED_CST)
2130 return fold_convert_const_real_from_fixed (type, arg1);
2131 }
2132 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2133 {
2134 if (TREE_CODE (arg1) == FIXED_CST)
2135 return fold_convert_const_fixed_from_fixed (type, arg1);
2136 else if (TREE_CODE (arg1) == INTEGER_CST)
2137 return fold_convert_const_fixed_from_int (type, arg1);
2138 else if (TREE_CODE (arg1) == REAL_CST)
2139 return fold_convert_const_fixed_from_real (type, arg1);
2140 }
2141 else if (TREE_CODE (type) == VECTOR_TYPE)
2142 {
2143 if (TREE_CODE (arg1) == VECTOR_CST
2144 && TYPE_VECTOR_SUBPARTS (type) == VECTOR_CST_NELTS (arg1))
2145 {
2146 int len = TYPE_VECTOR_SUBPARTS (type);
2147 tree elttype = TREE_TYPE (type);
2148 tree *v = XALLOCAVEC (tree, len);
2149 for (int i = 0; i < len; ++i)
2150 {
2151 tree elt = VECTOR_CST_ELT (arg1, i);
2152 tree cvt = fold_convert_const (code, elttype, elt);
2153 if (cvt == NULL_TREE)
2154 return NULL_TREE;
2155 v[i] = cvt;
2156 }
2157 return build_vector (type, v);
2158 }
2159 }
2160 return NULL_TREE;
2161 }
2162
2163 /* Construct a vector of zero elements of vector type TYPE. */
2164
2165 static tree
2166 build_zero_vector (tree type)
2167 {
2168 tree t;
2169
2170 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2171 return build_vector_from_val (type, t);
2172 }
2173
2174 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2175
2176 bool
2177 fold_convertible_p (const_tree type, const_tree arg)
2178 {
2179 tree orig = TREE_TYPE (arg);
2180
2181 if (type == orig)
2182 return true;
2183
2184 if (TREE_CODE (arg) == ERROR_MARK
2185 || TREE_CODE (type) == ERROR_MARK
2186 || TREE_CODE (orig) == ERROR_MARK)
2187 return false;
2188
2189 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2190 return true;
2191
2192 switch (TREE_CODE (type))
2193 {
2194 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2195 case POINTER_TYPE: case REFERENCE_TYPE:
2196 case OFFSET_TYPE:
2197 return (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2198 || TREE_CODE (orig) == OFFSET_TYPE);
2199
2200 case REAL_TYPE:
2201 case FIXED_POINT_TYPE:
2202 case COMPLEX_TYPE:
2203 case VECTOR_TYPE:
2204 case VOID_TYPE:
2205 return TREE_CODE (type) == TREE_CODE (orig);
2206
2207 default:
2208 return false;
2209 }
2210 }
2211
2212 /* Convert expression ARG to type TYPE. Used by the middle-end for
2213 simple conversions in preference to calling the front-end's convert. */
2214
2215 tree
2216 fold_convert_loc (location_t loc, tree type, tree arg)
2217 {
2218 tree orig = TREE_TYPE (arg);
2219 tree tem;
2220
2221 if (type == orig)
2222 return arg;
2223
2224 if (TREE_CODE (arg) == ERROR_MARK
2225 || TREE_CODE (type) == ERROR_MARK
2226 || TREE_CODE (orig) == ERROR_MARK)
2227 return error_mark_node;
2228
2229 switch (TREE_CODE (type))
2230 {
2231 case POINTER_TYPE:
2232 case REFERENCE_TYPE:
2233 /* Handle conversions between pointers to different address spaces. */
2234 if (POINTER_TYPE_P (orig)
2235 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2236 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2237 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2238 /* fall through */
2239
2240 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2241 case OFFSET_TYPE:
2242 if (TREE_CODE (arg) == INTEGER_CST)
2243 {
2244 tem = fold_convert_const (NOP_EXPR, type, arg);
2245 if (tem != NULL_TREE)
2246 return tem;
2247 }
2248 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2249 || TREE_CODE (orig) == OFFSET_TYPE)
2250 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2251 if (TREE_CODE (orig) == COMPLEX_TYPE)
2252 return fold_convert_loc (loc, type,
2253 fold_build1_loc (loc, REALPART_EXPR,
2254 TREE_TYPE (orig), arg));
2255 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2256 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2257 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2258
2259 case REAL_TYPE:
2260 if (TREE_CODE (arg) == INTEGER_CST)
2261 {
2262 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2263 if (tem != NULL_TREE)
2264 return tem;
2265 }
2266 else if (TREE_CODE (arg) == REAL_CST)
2267 {
2268 tem = fold_convert_const (NOP_EXPR, type, arg);
2269 if (tem != NULL_TREE)
2270 return tem;
2271 }
2272 else if (TREE_CODE (arg) == FIXED_CST)
2273 {
2274 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2275 if (tem != NULL_TREE)
2276 return tem;
2277 }
2278
2279 switch (TREE_CODE (orig))
2280 {
2281 case INTEGER_TYPE:
2282 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2283 case POINTER_TYPE: case REFERENCE_TYPE:
2284 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2285
2286 case REAL_TYPE:
2287 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2288
2289 case FIXED_POINT_TYPE:
2290 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2291
2292 case COMPLEX_TYPE:
2293 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2294 return fold_convert_loc (loc, type, tem);
2295
2296 default:
2297 gcc_unreachable ();
2298 }
2299
2300 case FIXED_POINT_TYPE:
2301 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2302 || TREE_CODE (arg) == REAL_CST)
2303 {
2304 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2305 if (tem != NULL_TREE)
2306 goto fold_convert_exit;
2307 }
2308
2309 switch (TREE_CODE (orig))
2310 {
2311 case FIXED_POINT_TYPE:
2312 case INTEGER_TYPE:
2313 case ENUMERAL_TYPE:
2314 case BOOLEAN_TYPE:
2315 case REAL_TYPE:
2316 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2317
2318 case COMPLEX_TYPE:
2319 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2320 return fold_convert_loc (loc, type, tem);
2321
2322 default:
2323 gcc_unreachable ();
2324 }
2325
2326 case COMPLEX_TYPE:
2327 switch (TREE_CODE (orig))
2328 {
2329 case INTEGER_TYPE:
2330 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2331 case POINTER_TYPE: case REFERENCE_TYPE:
2332 case REAL_TYPE:
2333 case FIXED_POINT_TYPE:
2334 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2335 fold_convert_loc (loc, TREE_TYPE (type), arg),
2336 fold_convert_loc (loc, TREE_TYPE (type),
2337 integer_zero_node));
2338 case COMPLEX_TYPE:
2339 {
2340 tree rpart, ipart;
2341
2342 if (TREE_CODE (arg) == COMPLEX_EXPR)
2343 {
2344 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2345 TREE_OPERAND (arg, 0));
2346 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2347 TREE_OPERAND (arg, 1));
2348 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2349 }
2350
2351 arg = save_expr (arg);
2352 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2353 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2354 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2355 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2356 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2357 }
2358
2359 default:
2360 gcc_unreachable ();
2361 }
2362
2363 case VECTOR_TYPE:
2364 if (integer_zerop (arg))
2365 return build_zero_vector (type);
2366 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2367 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2368 || TREE_CODE (orig) == VECTOR_TYPE);
2369 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2370
2371 case VOID_TYPE:
2372 tem = fold_ignored_result (arg);
2373 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2374
2375 default:
2376 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2377 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2378 gcc_unreachable ();
2379 }
2380 fold_convert_exit:
2381 protected_set_expr_location_unshare (tem, loc);
2382 return tem;
2383 }
2384 \f
2385 /* Return false if expr can be assumed not to be an lvalue, true
2386 otherwise. */
2387
2388 static bool
2389 maybe_lvalue_p (const_tree x)
2390 {
2391 /* We only need to wrap lvalue tree codes. */
2392 switch (TREE_CODE (x))
2393 {
2394 case VAR_DECL:
2395 case PARM_DECL:
2396 case RESULT_DECL:
2397 case LABEL_DECL:
2398 case FUNCTION_DECL:
2399 case SSA_NAME:
2400
2401 case COMPONENT_REF:
2402 case MEM_REF:
2403 case INDIRECT_REF:
2404 case ARRAY_REF:
2405 case ARRAY_RANGE_REF:
2406 case BIT_FIELD_REF:
2407 case OBJ_TYPE_REF:
2408
2409 case REALPART_EXPR:
2410 case IMAGPART_EXPR:
2411 case PREINCREMENT_EXPR:
2412 case PREDECREMENT_EXPR:
2413 case SAVE_EXPR:
2414 case TRY_CATCH_EXPR:
2415 case WITH_CLEANUP_EXPR:
2416 case COMPOUND_EXPR:
2417 case MODIFY_EXPR:
2418 case TARGET_EXPR:
2419 case COND_EXPR:
2420 case BIND_EXPR:
2421 break;
2422
2423 default:
2424 /* Assume the worst for front-end tree codes. */
2425 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2426 break;
2427 return false;
2428 }
2429
2430 return true;
2431 }
2432
2433 /* Return an expr equal to X but certainly not valid as an lvalue. */
2434
2435 tree
2436 non_lvalue_loc (location_t loc, tree x)
2437 {
2438 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2439 us. */
2440 if (in_gimple_form)
2441 return x;
2442
2443 if (! maybe_lvalue_p (x))
2444 return x;
2445 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2446 }
2447
2448 /* When pedantic, return an expr equal to X but certainly not valid as a
2449 pedantic lvalue. Otherwise, return X. */
2450
2451 static tree
2452 pedantic_non_lvalue_loc (location_t loc, tree x)
2453 {
2454 return protected_set_expr_location_unshare (x, loc);
2455 }
2456 \f
2457 /* Given a tree comparison code, return the code that is the logical inverse.
2458 It is generally not safe to do this for floating-point comparisons, except
2459 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2460 ERROR_MARK in this case. */
2461
2462 enum tree_code
2463 invert_tree_comparison (enum tree_code code, bool honor_nans)
2464 {
2465 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2466 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2467 return ERROR_MARK;
2468
2469 switch (code)
2470 {
2471 case EQ_EXPR:
2472 return NE_EXPR;
2473 case NE_EXPR:
2474 return EQ_EXPR;
2475 case GT_EXPR:
2476 return honor_nans ? UNLE_EXPR : LE_EXPR;
2477 case GE_EXPR:
2478 return honor_nans ? UNLT_EXPR : LT_EXPR;
2479 case LT_EXPR:
2480 return honor_nans ? UNGE_EXPR : GE_EXPR;
2481 case LE_EXPR:
2482 return honor_nans ? UNGT_EXPR : GT_EXPR;
2483 case LTGT_EXPR:
2484 return UNEQ_EXPR;
2485 case UNEQ_EXPR:
2486 return LTGT_EXPR;
2487 case UNGT_EXPR:
2488 return LE_EXPR;
2489 case UNGE_EXPR:
2490 return LT_EXPR;
2491 case UNLT_EXPR:
2492 return GE_EXPR;
2493 case UNLE_EXPR:
2494 return GT_EXPR;
2495 case ORDERED_EXPR:
2496 return UNORDERED_EXPR;
2497 case UNORDERED_EXPR:
2498 return ORDERED_EXPR;
2499 default:
2500 gcc_unreachable ();
2501 }
2502 }
2503
2504 /* Similar, but return the comparison that results if the operands are
2505 swapped. This is safe for floating-point. */
2506
2507 enum tree_code
2508 swap_tree_comparison (enum tree_code code)
2509 {
2510 switch (code)
2511 {
2512 case EQ_EXPR:
2513 case NE_EXPR:
2514 case ORDERED_EXPR:
2515 case UNORDERED_EXPR:
2516 case LTGT_EXPR:
2517 case UNEQ_EXPR:
2518 return code;
2519 case GT_EXPR:
2520 return LT_EXPR;
2521 case GE_EXPR:
2522 return LE_EXPR;
2523 case LT_EXPR:
2524 return GT_EXPR;
2525 case LE_EXPR:
2526 return GE_EXPR;
2527 case UNGT_EXPR:
2528 return UNLT_EXPR;
2529 case UNGE_EXPR:
2530 return UNLE_EXPR;
2531 case UNLT_EXPR:
2532 return UNGT_EXPR;
2533 case UNLE_EXPR:
2534 return UNGE_EXPR;
2535 default:
2536 gcc_unreachable ();
2537 }
2538 }
2539
2540
2541 /* Convert a comparison tree code from an enum tree_code representation
2542 into a compcode bit-based encoding. This function is the inverse of
2543 compcode_to_comparison. */
2544
2545 static enum comparison_code
2546 comparison_to_compcode (enum tree_code code)
2547 {
2548 switch (code)
2549 {
2550 case LT_EXPR:
2551 return COMPCODE_LT;
2552 case EQ_EXPR:
2553 return COMPCODE_EQ;
2554 case LE_EXPR:
2555 return COMPCODE_LE;
2556 case GT_EXPR:
2557 return COMPCODE_GT;
2558 case NE_EXPR:
2559 return COMPCODE_NE;
2560 case GE_EXPR:
2561 return COMPCODE_GE;
2562 case ORDERED_EXPR:
2563 return COMPCODE_ORD;
2564 case UNORDERED_EXPR:
2565 return COMPCODE_UNORD;
2566 case UNLT_EXPR:
2567 return COMPCODE_UNLT;
2568 case UNEQ_EXPR:
2569 return COMPCODE_UNEQ;
2570 case UNLE_EXPR:
2571 return COMPCODE_UNLE;
2572 case UNGT_EXPR:
2573 return COMPCODE_UNGT;
2574 case LTGT_EXPR:
2575 return COMPCODE_LTGT;
2576 case UNGE_EXPR:
2577 return COMPCODE_UNGE;
2578 default:
2579 gcc_unreachable ();
2580 }
2581 }
2582
2583 /* Convert a compcode bit-based encoding of a comparison operator back
2584 to GCC's enum tree_code representation. This function is the
2585 inverse of comparison_to_compcode. */
2586
2587 static enum tree_code
2588 compcode_to_comparison (enum comparison_code code)
2589 {
2590 switch (code)
2591 {
2592 case COMPCODE_LT:
2593 return LT_EXPR;
2594 case COMPCODE_EQ:
2595 return EQ_EXPR;
2596 case COMPCODE_LE:
2597 return LE_EXPR;
2598 case COMPCODE_GT:
2599 return GT_EXPR;
2600 case COMPCODE_NE:
2601 return NE_EXPR;
2602 case COMPCODE_GE:
2603 return GE_EXPR;
2604 case COMPCODE_ORD:
2605 return ORDERED_EXPR;
2606 case COMPCODE_UNORD:
2607 return UNORDERED_EXPR;
2608 case COMPCODE_UNLT:
2609 return UNLT_EXPR;
2610 case COMPCODE_UNEQ:
2611 return UNEQ_EXPR;
2612 case COMPCODE_UNLE:
2613 return UNLE_EXPR;
2614 case COMPCODE_UNGT:
2615 return UNGT_EXPR;
2616 case COMPCODE_LTGT:
2617 return LTGT_EXPR;
2618 case COMPCODE_UNGE:
2619 return UNGE_EXPR;
2620 default:
2621 gcc_unreachable ();
2622 }
2623 }
2624
2625 /* Return a tree for the comparison which is the combination of
2626 doing the AND or OR (depending on CODE) of the two operations LCODE
2627 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2628 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2629 if this makes the transformation invalid. */
2630
2631 tree
2632 combine_comparisons (location_t loc,
2633 enum tree_code code, enum tree_code lcode,
2634 enum tree_code rcode, tree truth_type,
2635 tree ll_arg, tree lr_arg)
2636 {
2637 bool honor_nans = HONOR_NANS (ll_arg);
2638 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2639 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2640 int compcode;
2641
2642 switch (code)
2643 {
2644 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2645 compcode = lcompcode & rcompcode;
2646 break;
2647
2648 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2649 compcode = lcompcode | rcompcode;
2650 break;
2651
2652 default:
2653 return NULL_TREE;
2654 }
2655
2656 if (!honor_nans)
2657 {
2658 /* Eliminate unordered comparisons, as well as LTGT and ORD
2659 which are not used unless the mode has NaNs. */
2660 compcode &= ~COMPCODE_UNORD;
2661 if (compcode == COMPCODE_LTGT)
2662 compcode = COMPCODE_NE;
2663 else if (compcode == COMPCODE_ORD)
2664 compcode = COMPCODE_TRUE;
2665 }
2666 else if (flag_trapping_math)
2667 {
2668 /* Check that the original operation and the optimized ones will trap
2669 under the same condition. */
2670 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2671 && (lcompcode != COMPCODE_EQ)
2672 && (lcompcode != COMPCODE_ORD);
2673 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2674 && (rcompcode != COMPCODE_EQ)
2675 && (rcompcode != COMPCODE_ORD);
2676 bool trap = (compcode & COMPCODE_UNORD) == 0
2677 && (compcode != COMPCODE_EQ)
2678 && (compcode != COMPCODE_ORD);
2679
2680 /* In a short-circuited boolean expression the LHS might be
2681 such that the RHS, if evaluated, will never trap. For
2682 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2683 if neither x nor y is NaN. (This is a mixed blessing: for
2684 example, the expression above will never trap, hence
2685 optimizing it to x < y would be invalid). */
2686 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2687 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2688 rtrap = false;
2689
2690 /* If the comparison was short-circuited, and only the RHS
2691 trapped, we may now generate a spurious trap. */
2692 if (rtrap && !ltrap
2693 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2694 return NULL_TREE;
2695
2696 /* If we changed the conditions that cause a trap, we lose. */
2697 if ((ltrap || rtrap) != trap)
2698 return NULL_TREE;
2699 }
2700
2701 if (compcode == COMPCODE_TRUE)
2702 return constant_boolean_node (true, truth_type);
2703 else if (compcode == COMPCODE_FALSE)
2704 return constant_boolean_node (false, truth_type);
2705 else
2706 {
2707 enum tree_code tcode;
2708
2709 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2710 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2711 }
2712 }
2713 \f
2714 /* Return nonzero if two operands (typically of the same tree node)
2715 are necessarily equal. FLAGS modifies behavior as follows:
2716
2717 If OEP_ONLY_CONST is set, only return nonzero for constants.
2718 This function tests whether the operands are indistinguishable;
2719 it does not test whether they are equal using C's == operation.
2720 The distinction is important for IEEE floating point, because
2721 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2722 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2723
2724 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2725 even though it may hold multiple values during a function.
2726 This is because a GCC tree node guarantees that nothing else is
2727 executed between the evaluation of its "operands" (which may often
2728 be evaluated in arbitrary order). Hence if the operands themselves
2729 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2730 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2731 unset means assuming isochronic (or instantaneous) tree equivalence.
2732 Unless comparing arbitrary expression trees, such as from different
2733 statements, this flag can usually be left unset.
2734
2735 If OEP_PURE_SAME is set, then pure functions with identical arguments
2736 are considered the same. It is used when the caller has other ways
2737 to ensure that global memory is unchanged in between.
2738
2739 If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
2740 not values of expressions.
2741
2742 Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
2743 any operand with side effect. This is unnecesarily conservative in the
2744 case we know that arg0 and arg1 are in disjoint code paths (such as in
2745 ?: operator). In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
2746 addresses with TREE_CONSTANT flag set so we know that &var == &var
2747 even if var is volatile. */
2748
2749 int
2750 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2751 {
2752 /* If either is ERROR_MARK, they aren't equal. */
2753 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2754 || TREE_TYPE (arg0) == error_mark_node
2755 || TREE_TYPE (arg1) == error_mark_node)
2756 return 0;
2757
2758 /* Similar, if either does not have a type (like a released SSA name),
2759 they aren't equal. */
2760 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2761 return 0;
2762
2763 /* We cannot consider pointers to different address space equal. */
2764 if (POINTER_TYPE_P (TREE_TYPE (arg0))
2765 && POINTER_TYPE_P (TREE_TYPE (arg1))
2766 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2767 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2768 return 0;
2769
2770 /* Check equality of integer constants before bailing out due to
2771 precision differences. */
2772 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2773 {
2774 /* Address of INTEGER_CST is not defined; check that we did not forget
2775 to drop the OEP_ADDRESS_OF flags. */
2776 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2777 return tree_int_cst_equal (arg0, arg1);
2778 }
2779
2780 if (!(flags & OEP_ADDRESS_OF))
2781 {
2782 /* If both types don't have the same signedness, then we can't consider
2783 them equal. We must check this before the STRIP_NOPS calls
2784 because they may change the signedness of the arguments. As pointers
2785 strictly don't have a signedness, require either two pointers or
2786 two non-pointers as well. */
2787 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2788 || POINTER_TYPE_P (TREE_TYPE (arg0))
2789 != POINTER_TYPE_P (TREE_TYPE (arg1)))
2790 return 0;
2791
2792 /* If both types don't have the same precision, then it is not safe
2793 to strip NOPs. */
2794 if (element_precision (TREE_TYPE (arg0))
2795 != element_precision (TREE_TYPE (arg1)))
2796 return 0;
2797
2798 STRIP_NOPS (arg0);
2799 STRIP_NOPS (arg1);
2800 }
2801 #if 0
2802 /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
2803 sanity check once the issue is solved. */
2804 else
2805 /* Addresses of conversions and SSA_NAMEs (and many other things)
2806 are not defined. Check that we did not forget to drop the
2807 OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
2808 gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
2809 && TREE_CODE (arg0) != SSA_NAME);
2810 #endif
2811
2812 /* In case both args are comparisons but with different comparison
2813 code, try to swap the comparison operands of one arg to produce
2814 a match and compare that variant. */
2815 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2816 && COMPARISON_CLASS_P (arg0)
2817 && COMPARISON_CLASS_P (arg1))
2818 {
2819 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2820
2821 if (TREE_CODE (arg0) == swap_code)
2822 return operand_equal_p (TREE_OPERAND (arg0, 0),
2823 TREE_OPERAND (arg1, 1), flags)
2824 && operand_equal_p (TREE_OPERAND (arg0, 1),
2825 TREE_OPERAND (arg1, 0), flags);
2826 }
2827
2828 if (TREE_CODE (arg0) != TREE_CODE (arg1))
2829 {
2830 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2831 if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
2832 ;
2833 else if (flags & OEP_ADDRESS_OF)
2834 {
2835 /* If we are interested in comparing addresses ignore
2836 MEM_REF wrappings of the base that can appear just for
2837 TBAA reasons. */
2838 if (TREE_CODE (arg0) == MEM_REF
2839 && DECL_P (arg1)
2840 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
2841 && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
2842 && integer_zerop (TREE_OPERAND (arg0, 1)))
2843 return 1;
2844 else if (TREE_CODE (arg1) == MEM_REF
2845 && DECL_P (arg0)
2846 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
2847 && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
2848 && integer_zerop (TREE_OPERAND (arg1, 1)))
2849 return 1;
2850 return 0;
2851 }
2852 else
2853 return 0;
2854 }
2855
2856 /* When not checking adddresses, this is needed for conversions and for
2857 COMPONENT_REF. Might as well play it safe and always test this. */
2858 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2859 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2860 || (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))
2861 && !(flags & OEP_ADDRESS_OF)))
2862 return 0;
2863
2864 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2865 We don't care about side effects in that case because the SAVE_EXPR
2866 takes care of that for us. In all other cases, two expressions are
2867 equal if they have no side effects. If we have two identical
2868 expressions with side effects that should be treated the same due
2869 to the only side effects being identical SAVE_EXPR's, that will
2870 be detected in the recursive calls below.
2871 If we are taking an invariant address of two identical objects
2872 they are necessarily equal as well. */
2873 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2874 && (TREE_CODE (arg0) == SAVE_EXPR
2875 || (flags & OEP_MATCH_SIDE_EFFECTS)
2876 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2877 return 1;
2878
2879 /* Next handle constant cases, those for which we can return 1 even
2880 if ONLY_CONST is set. */
2881 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2882 switch (TREE_CODE (arg0))
2883 {
2884 case INTEGER_CST:
2885 return tree_int_cst_equal (arg0, arg1);
2886
2887 case FIXED_CST:
2888 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2889 TREE_FIXED_CST (arg1));
2890
2891 case REAL_CST:
2892 if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
2893 return 1;
2894
2895
2896 if (!HONOR_SIGNED_ZEROS (arg0))
2897 {
2898 /* If we do not distinguish between signed and unsigned zero,
2899 consider them equal. */
2900 if (real_zerop (arg0) && real_zerop (arg1))
2901 return 1;
2902 }
2903 return 0;
2904
2905 case VECTOR_CST:
2906 {
2907 unsigned i;
2908
2909 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2910 return 0;
2911
2912 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2913 {
2914 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2915 VECTOR_CST_ELT (arg1, i), flags))
2916 return 0;
2917 }
2918 return 1;
2919 }
2920
2921 case COMPLEX_CST:
2922 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2923 flags)
2924 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2925 flags));
2926
2927 case STRING_CST:
2928 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2929 && ! memcmp (TREE_STRING_POINTER (arg0),
2930 TREE_STRING_POINTER (arg1),
2931 TREE_STRING_LENGTH (arg0)));
2932
2933 case ADDR_EXPR:
2934 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2935 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2936 flags | OEP_ADDRESS_OF
2937 | OEP_MATCH_SIDE_EFFECTS);
2938 case CONSTRUCTOR:
2939 /* In GIMPLE empty constructors are allowed in initializers of
2940 aggregates. */
2941 return (!vec_safe_length (CONSTRUCTOR_ELTS (arg0))
2942 && !vec_safe_length (CONSTRUCTOR_ELTS (arg1)));
2943 default:
2944 break;
2945 }
2946
2947 if (flags & OEP_ONLY_CONST)
2948 return 0;
2949
2950 /* Define macros to test an operand from arg0 and arg1 for equality and a
2951 variant that allows null and views null as being different from any
2952 non-null value. In the latter case, if either is null, the both
2953 must be; otherwise, do the normal comparison. */
2954 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2955 TREE_OPERAND (arg1, N), flags)
2956
2957 #define OP_SAME_WITH_NULL(N) \
2958 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2959 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2960
2961 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2962 {
2963 case tcc_unary:
2964 /* Two conversions are equal only if signedness and modes match. */
2965 switch (TREE_CODE (arg0))
2966 {
2967 CASE_CONVERT:
2968 case FIX_TRUNC_EXPR:
2969 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2970 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2971 return 0;
2972 break;
2973 default:
2974 break;
2975 }
2976
2977 return OP_SAME (0);
2978
2979
2980 case tcc_comparison:
2981 case tcc_binary:
2982 if (OP_SAME (0) && OP_SAME (1))
2983 return 1;
2984
2985 /* For commutative ops, allow the other order. */
2986 return (commutative_tree_code (TREE_CODE (arg0))
2987 && operand_equal_p (TREE_OPERAND (arg0, 0),
2988 TREE_OPERAND (arg1, 1), flags)
2989 && operand_equal_p (TREE_OPERAND (arg0, 1),
2990 TREE_OPERAND (arg1, 0), flags));
2991
2992 case tcc_reference:
2993 /* If either of the pointer (or reference) expressions we are
2994 dereferencing contain a side effect, these cannot be equal,
2995 but their addresses can be. */
2996 if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0
2997 && (TREE_SIDE_EFFECTS (arg0)
2998 || TREE_SIDE_EFFECTS (arg1)))
2999 return 0;
3000
3001 switch (TREE_CODE (arg0))
3002 {
3003 case INDIRECT_REF:
3004 if (!(flags & OEP_ADDRESS_OF)
3005 && (TYPE_ALIGN (TREE_TYPE (arg0))
3006 != TYPE_ALIGN (TREE_TYPE (arg1))))
3007 return 0;
3008 flags &= ~OEP_ADDRESS_OF;
3009 return OP_SAME (0);
3010
3011 case IMAGPART_EXPR:
3012 /* Require the same offset. */
3013 if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3014 TYPE_SIZE (TREE_TYPE (arg1)),
3015 flags & ~OEP_ADDRESS_OF))
3016 return 0;
3017
3018 /* Fallthru. */
3019 case REALPART_EXPR:
3020 case VIEW_CONVERT_EXPR:
3021 return OP_SAME (0);
3022
3023 case TARGET_MEM_REF:
3024 case MEM_REF:
3025 if (!(flags & OEP_ADDRESS_OF))
3026 {
3027 /* Require equal access sizes */
3028 if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1))
3029 && (!TYPE_SIZE (TREE_TYPE (arg0))
3030 || !TYPE_SIZE (TREE_TYPE (arg1))
3031 || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3032 TYPE_SIZE (TREE_TYPE (arg1)),
3033 flags)))
3034 return 0;
3035 /* Verify that accesses are TBAA compatible. */
3036 if (!alias_ptr_types_compatible_p
3037 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
3038 TREE_TYPE (TREE_OPERAND (arg1, 1)))
3039 || (MR_DEPENDENCE_CLIQUE (arg0)
3040 != MR_DEPENDENCE_CLIQUE (arg1))
3041 || (MR_DEPENDENCE_BASE (arg0)
3042 != MR_DEPENDENCE_BASE (arg1)))
3043 return 0;
3044 /* Verify that alignment is compatible. */
3045 if (TYPE_ALIGN (TREE_TYPE (arg0))
3046 != TYPE_ALIGN (TREE_TYPE (arg1)))
3047 return 0;
3048 }
3049 flags &= ~OEP_ADDRESS_OF;
3050 return (OP_SAME (0) && OP_SAME (1)
3051 /* TARGET_MEM_REF require equal extra operands. */
3052 && (TREE_CODE (arg0) != TARGET_MEM_REF
3053 || (OP_SAME_WITH_NULL (2)
3054 && OP_SAME_WITH_NULL (3)
3055 && OP_SAME_WITH_NULL (4))));
3056
3057 case ARRAY_REF:
3058 case ARRAY_RANGE_REF:
3059 if (!OP_SAME (0))
3060 return 0;
3061 flags &= ~OEP_ADDRESS_OF;
3062 /* Compare the array index by value if it is constant first as we
3063 may have different types but same value here. */
3064 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3065 TREE_OPERAND (arg1, 1))
3066 || OP_SAME (1))
3067 && OP_SAME_WITH_NULL (2)
3068 && OP_SAME_WITH_NULL (3)
3069 /* Compare low bound and element size as with OEP_ADDRESS_OF
3070 we have to account for the offset of the ref. */
3071 && (TREE_TYPE (TREE_OPERAND (arg0, 0))
3072 == TREE_TYPE (TREE_OPERAND (arg1, 0))
3073 || (operand_equal_p (array_ref_low_bound
3074 (CONST_CAST_TREE (arg0)),
3075 array_ref_low_bound
3076 (CONST_CAST_TREE (arg1)), flags)
3077 && operand_equal_p (array_ref_element_size
3078 (CONST_CAST_TREE (arg0)),
3079 array_ref_element_size
3080 (CONST_CAST_TREE (arg1)),
3081 flags))));
3082
3083 case COMPONENT_REF:
3084 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3085 may be NULL when we're called to compare MEM_EXPRs. */
3086 if (!OP_SAME_WITH_NULL (0)
3087 || !OP_SAME (1))
3088 return 0;
3089 flags &= ~OEP_ADDRESS_OF;
3090 return OP_SAME_WITH_NULL (2);
3091
3092 case BIT_FIELD_REF:
3093 if (!OP_SAME (0))
3094 return 0;
3095 flags &= ~OEP_ADDRESS_OF;
3096 return OP_SAME (1) && OP_SAME (2);
3097
3098 default:
3099 return 0;
3100 }
3101
3102 case tcc_expression:
3103 switch (TREE_CODE (arg0))
3104 {
3105 case ADDR_EXPR:
3106 /* Be sure we pass right ADDRESS_OF flag. */
3107 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3108 return operand_equal_p (TREE_OPERAND (arg0, 0),
3109 TREE_OPERAND (arg1, 0),
3110 flags | OEP_ADDRESS_OF);
3111
3112 case TRUTH_NOT_EXPR:
3113 return OP_SAME (0);
3114
3115 case TRUTH_ANDIF_EXPR:
3116 case TRUTH_ORIF_EXPR:
3117 return OP_SAME (0) && OP_SAME (1);
3118
3119 case FMA_EXPR:
3120 case WIDEN_MULT_PLUS_EXPR:
3121 case WIDEN_MULT_MINUS_EXPR:
3122 if (!OP_SAME (2))
3123 return 0;
3124 /* The multiplcation operands are commutative. */
3125 /* FALLTHRU */
3126
3127 case TRUTH_AND_EXPR:
3128 case TRUTH_OR_EXPR:
3129 case TRUTH_XOR_EXPR:
3130 if (OP_SAME (0) && OP_SAME (1))
3131 return 1;
3132
3133 /* Otherwise take into account this is a commutative operation. */
3134 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3135 TREE_OPERAND (arg1, 1), flags)
3136 && operand_equal_p (TREE_OPERAND (arg0, 1),
3137 TREE_OPERAND (arg1, 0), flags));
3138
3139 case COND_EXPR:
3140 if (! OP_SAME (1) || ! OP_SAME (2))
3141 return 0;
3142 flags &= ~OEP_ADDRESS_OF;
3143 return OP_SAME (0);
3144
3145 case VEC_COND_EXPR:
3146 case DOT_PROD_EXPR:
3147 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3148
3149 default:
3150 return 0;
3151 }
3152
3153 case tcc_vl_exp:
3154 switch (TREE_CODE (arg0))
3155 {
3156 case CALL_EXPR:
3157 if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3158 != (CALL_EXPR_FN (arg1) == NULL_TREE))
3159 /* If not both CALL_EXPRs are either internal or normal function
3160 functions, then they are not equal. */
3161 return 0;
3162 else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3163 {
3164 /* If the CALL_EXPRs call different internal functions, then they
3165 are not equal. */
3166 if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3167 return 0;
3168 }
3169 else
3170 {
3171 /* If the CALL_EXPRs call different functions, then they are not
3172 equal. */
3173 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3174 flags))
3175 return 0;
3176 }
3177
3178 /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */
3179 {
3180 unsigned int cef = call_expr_flags (arg0);
3181 if (flags & OEP_PURE_SAME)
3182 cef &= ECF_CONST | ECF_PURE;
3183 else
3184 cef &= ECF_CONST;
3185 if (!cef)
3186 return 0;
3187 }
3188
3189 /* Now see if all the arguments are the same. */
3190 {
3191 const_call_expr_arg_iterator iter0, iter1;
3192 const_tree a0, a1;
3193 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3194 a1 = first_const_call_expr_arg (arg1, &iter1);
3195 a0 && a1;
3196 a0 = next_const_call_expr_arg (&iter0),
3197 a1 = next_const_call_expr_arg (&iter1))
3198 if (! operand_equal_p (a0, a1, flags))
3199 return 0;
3200
3201 /* If we get here and both argument lists are exhausted
3202 then the CALL_EXPRs are equal. */
3203 return ! (a0 || a1);
3204 }
3205 default:
3206 return 0;
3207 }
3208
3209 case tcc_declaration:
3210 /* Consider __builtin_sqrt equal to sqrt. */
3211 return (TREE_CODE (arg0) == FUNCTION_DECL
3212 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3213 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3214 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3215
3216 case tcc_exceptional:
3217 if (TREE_CODE (arg0) == CONSTRUCTOR)
3218 {
3219 /* In GIMPLE constructors are used only to build vectors from
3220 elements. Individual elements in the constructor must be
3221 indexed in increasing order and form an initial sequence.
3222
3223 We make no effort to compare constructors in generic.
3224 (see sem_variable::equals in ipa-icf which can do so for
3225 constants). */
3226 if (!VECTOR_TYPE_P (TREE_TYPE (arg0))
3227 || !VECTOR_TYPE_P (TREE_TYPE (arg1)))
3228 return 0;
3229
3230 /* Be sure that vectors constructed have the same representation.
3231 We only tested element precision and modes to match.
3232 Vectors may be BLKmode and thus also check that the number of
3233 parts match. */
3234 if (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))
3235 != TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)))
3236 return 0;
3237
3238 vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3239 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3240 unsigned int len = vec_safe_length (v0);
3241
3242 if (len != vec_safe_length (v1))
3243 return 0;
3244
3245 for (unsigned int i = 0; i < len; i++)
3246 {
3247 constructor_elt *c0 = &(*v0)[i];
3248 constructor_elt *c1 = &(*v1)[i];
3249
3250 if (!operand_equal_p (c0->value, c1->value, flags)
3251 /* In GIMPLE the indexes can be either NULL or matching i.
3252 Double check this so we won't get false
3253 positives for GENERIC. */
3254 || (c0->index
3255 && (TREE_CODE (c0->index) != INTEGER_CST
3256 || !compare_tree_int (c0->index, i)))
3257 || (c1->index
3258 && (TREE_CODE (c1->index) != INTEGER_CST
3259 || !compare_tree_int (c1->index, i))))
3260 return 0;
3261 }
3262 return 1;
3263 }
3264 return 0;
3265
3266 default:
3267 return 0;
3268 }
3269
3270 #undef OP_SAME
3271 #undef OP_SAME_WITH_NULL
3272 }
3273 \f
3274 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3275 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3276
3277 When in doubt, return 0. */
3278
3279 static int
3280 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3281 {
3282 int unsignedp1, unsignedpo;
3283 tree primarg0, primarg1, primother;
3284 unsigned int correct_width;
3285
3286 if (operand_equal_p (arg0, arg1, 0))
3287 return 1;
3288
3289 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3290 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3291 return 0;
3292
3293 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3294 and see if the inner values are the same. This removes any
3295 signedness comparison, which doesn't matter here. */
3296 primarg0 = arg0, primarg1 = arg1;
3297 STRIP_NOPS (primarg0);
3298 STRIP_NOPS (primarg1);
3299 if (operand_equal_p (primarg0, primarg1, 0))
3300 return 1;
3301
3302 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3303 actual comparison operand, ARG0.
3304
3305 First throw away any conversions to wider types
3306 already present in the operands. */
3307
3308 primarg1 = get_narrower (arg1, &unsignedp1);
3309 primother = get_narrower (other, &unsignedpo);
3310
3311 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3312 if (unsignedp1 == unsignedpo
3313 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3314 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3315 {
3316 tree type = TREE_TYPE (arg0);
3317
3318 /* Make sure shorter operand is extended the right way
3319 to match the longer operand. */
3320 primarg1 = fold_convert (signed_or_unsigned_type_for
3321 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3322
3323 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3324 return 1;
3325 }
3326
3327 return 0;
3328 }
3329 \f
3330 /* See if ARG is an expression that is either a comparison or is performing
3331 arithmetic on comparisons. The comparisons must only be comparing
3332 two different values, which will be stored in *CVAL1 and *CVAL2; if
3333 they are nonzero it means that some operands have already been found.
3334 No variables may be used anywhere else in the expression except in the
3335 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3336 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3337
3338 If this is true, return 1. Otherwise, return zero. */
3339
3340 static int
3341 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3342 {
3343 enum tree_code code = TREE_CODE (arg);
3344 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3345
3346 /* We can handle some of the tcc_expression cases here. */
3347 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3348 tclass = tcc_unary;
3349 else if (tclass == tcc_expression
3350 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3351 || code == COMPOUND_EXPR))
3352 tclass = tcc_binary;
3353
3354 else if (tclass == tcc_expression && code == SAVE_EXPR
3355 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3356 {
3357 /* If we've already found a CVAL1 or CVAL2, this expression is
3358 two complex to handle. */
3359 if (*cval1 || *cval2)
3360 return 0;
3361
3362 tclass = tcc_unary;
3363 *save_p = 1;
3364 }
3365
3366 switch (tclass)
3367 {
3368 case tcc_unary:
3369 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3370
3371 case tcc_binary:
3372 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3373 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3374 cval1, cval2, save_p));
3375
3376 case tcc_constant:
3377 return 1;
3378
3379 case tcc_expression:
3380 if (code == COND_EXPR)
3381 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3382 cval1, cval2, save_p)
3383 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3384 cval1, cval2, save_p)
3385 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3386 cval1, cval2, save_p));
3387 return 0;
3388
3389 case tcc_comparison:
3390 /* First see if we can handle the first operand, then the second. For
3391 the second operand, we know *CVAL1 can't be zero. It must be that
3392 one side of the comparison is each of the values; test for the
3393 case where this isn't true by failing if the two operands
3394 are the same. */
3395
3396 if (operand_equal_p (TREE_OPERAND (arg, 0),
3397 TREE_OPERAND (arg, 1), 0))
3398 return 0;
3399
3400 if (*cval1 == 0)
3401 *cval1 = TREE_OPERAND (arg, 0);
3402 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3403 ;
3404 else if (*cval2 == 0)
3405 *cval2 = TREE_OPERAND (arg, 0);
3406 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3407 ;
3408 else
3409 return 0;
3410
3411 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3412 ;
3413 else if (*cval2 == 0)
3414 *cval2 = TREE_OPERAND (arg, 1);
3415 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3416 ;
3417 else
3418 return 0;
3419
3420 return 1;
3421
3422 default:
3423 return 0;
3424 }
3425 }
3426 \f
3427 /* ARG is a tree that is known to contain just arithmetic operations and
3428 comparisons. Evaluate the operations in the tree substituting NEW0 for
3429 any occurrence of OLD0 as an operand of a comparison and likewise for
3430 NEW1 and OLD1. */
3431
3432 static tree
3433 eval_subst (location_t loc, tree arg, tree old0, tree new0,
3434 tree old1, tree new1)
3435 {
3436 tree type = TREE_TYPE (arg);
3437 enum tree_code code = TREE_CODE (arg);
3438 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3439
3440 /* We can handle some of the tcc_expression cases here. */
3441 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3442 tclass = tcc_unary;
3443 else if (tclass == tcc_expression
3444 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3445 tclass = tcc_binary;
3446
3447 switch (tclass)
3448 {
3449 case tcc_unary:
3450 return fold_build1_loc (loc, code, type,
3451 eval_subst (loc, TREE_OPERAND (arg, 0),
3452 old0, new0, old1, new1));
3453
3454 case tcc_binary:
3455 return fold_build2_loc (loc, code, type,
3456 eval_subst (loc, TREE_OPERAND (arg, 0),
3457 old0, new0, old1, new1),
3458 eval_subst (loc, TREE_OPERAND (arg, 1),
3459 old0, new0, old1, new1));
3460
3461 case tcc_expression:
3462 switch (code)
3463 {
3464 case SAVE_EXPR:
3465 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3466 old1, new1);
3467
3468 case COMPOUND_EXPR:
3469 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3470 old1, new1);
3471
3472 case COND_EXPR:
3473 return fold_build3_loc (loc, code, type,
3474 eval_subst (loc, TREE_OPERAND (arg, 0),
3475 old0, new0, old1, new1),
3476 eval_subst (loc, TREE_OPERAND (arg, 1),
3477 old0, new0, old1, new1),
3478 eval_subst (loc, TREE_OPERAND (arg, 2),
3479 old0, new0, old1, new1));
3480 default:
3481 break;
3482 }
3483 /* Fall through - ??? */
3484
3485 case tcc_comparison:
3486 {
3487 tree arg0 = TREE_OPERAND (arg, 0);
3488 tree arg1 = TREE_OPERAND (arg, 1);
3489
3490 /* We need to check both for exact equality and tree equality. The
3491 former will be true if the operand has a side-effect. In that
3492 case, we know the operand occurred exactly once. */
3493
3494 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3495 arg0 = new0;
3496 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3497 arg0 = new1;
3498
3499 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3500 arg1 = new0;
3501 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3502 arg1 = new1;
3503
3504 return fold_build2_loc (loc, code, type, arg0, arg1);
3505 }
3506
3507 default:
3508 return arg;
3509 }
3510 }
3511 \f
3512 /* Return a tree for the case when the result of an expression is RESULT
3513 converted to TYPE and OMITTED was previously an operand of the expression
3514 but is now not needed (e.g., we folded OMITTED * 0).
3515
3516 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3517 the conversion of RESULT to TYPE. */
3518
3519 tree
3520 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3521 {
3522 tree t = fold_convert_loc (loc, type, result);
3523
3524 /* If the resulting operand is an empty statement, just return the omitted
3525 statement casted to void. */
3526 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3527 return build1_loc (loc, NOP_EXPR, void_type_node,
3528 fold_ignored_result (omitted));
3529
3530 if (TREE_SIDE_EFFECTS (omitted))
3531 return build2_loc (loc, COMPOUND_EXPR, type,
3532 fold_ignored_result (omitted), t);
3533
3534 return non_lvalue_loc (loc, t);
3535 }
3536
3537 /* Return a tree for the case when the result of an expression is RESULT
3538 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3539 of the expression but are now not needed.
3540
3541 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3542 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3543 evaluated before OMITTED2. Otherwise, if neither has side effects,
3544 just do the conversion of RESULT to TYPE. */
3545
3546 tree
3547 omit_two_operands_loc (location_t loc, tree type, tree result,
3548 tree omitted1, tree omitted2)
3549 {
3550 tree t = fold_convert_loc (loc, type, result);
3551
3552 if (TREE_SIDE_EFFECTS (omitted2))
3553 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3554 if (TREE_SIDE_EFFECTS (omitted1))
3555 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3556
3557 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3558 }
3559
3560 \f
3561 /* Return a simplified tree node for the truth-negation of ARG. This
3562 never alters ARG itself. We assume that ARG is an operation that
3563 returns a truth value (0 or 1).
3564
3565 FIXME: one would think we would fold the result, but it causes
3566 problems with the dominator optimizer. */
3567
3568 static tree
3569 fold_truth_not_expr (location_t loc, tree arg)
3570 {
3571 tree type = TREE_TYPE (arg);
3572 enum tree_code code = TREE_CODE (arg);
3573 location_t loc1, loc2;
3574
3575 /* If this is a comparison, we can simply invert it, except for
3576 floating-point non-equality comparisons, in which case we just
3577 enclose a TRUTH_NOT_EXPR around what we have. */
3578
3579 if (TREE_CODE_CLASS (code) == tcc_comparison)
3580 {
3581 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3582 if (FLOAT_TYPE_P (op_type)
3583 && flag_trapping_math
3584 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3585 && code != NE_EXPR && code != EQ_EXPR)
3586 return NULL_TREE;
3587
3588 code = invert_tree_comparison (code, HONOR_NANS (op_type));
3589 if (code == ERROR_MARK)
3590 return NULL_TREE;
3591
3592 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3593 TREE_OPERAND (arg, 1));
3594 }
3595
3596 switch (code)
3597 {
3598 case INTEGER_CST:
3599 return constant_boolean_node (integer_zerop (arg), type);
3600
3601 case TRUTH_AND_EXPR:
3602 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3603 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3604 return build2_loc (loc, TRUTH_OR_EXPR, type,
3605 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3606 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3607
3608 case TRUTH_OR_EXPR:
3609 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3610 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3611 return build2_loc (loc, TRUTH_AND_EXPR, type,
3612 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3613 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3614
3615 case TRUTH_XOR_EXPR:
3616 /* Here we can invert either operand. We invert the first operand
3617 unless the second operand is a TRUTH_NOT_EXPR in which case our
3618 result is the XOR of the first operand with the inside of the
3619 negation of the second operand. */
3620
3621 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3622 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3623 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3624 else
3625 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3626 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3627 TREE_OPERAND (arg, 1));
3628
3629 case TRUTH_ANDIF_EXPR:
3630 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3631 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3632 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3633 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3634 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3635
3636 case TRUTH_ORIF_EXPR:
3637 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3638 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3639 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3640 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3641 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3642
3643 case TRUTH_NOT_EXPR:
3644 return TREE_OPERAND (arg, 0);
3645
3646 case COND_EXPR:
3647 {
3648 tree arg1 = TREE_OPERAND (arg, 1);
3649 tree arg2 = TREE_OPERAND (arg, 2);
3650
3651 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3652 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3653
3654 /* A COND_EXPR may have a throw as one operand, which
3655 then has void type. Just leave void operands
3656 as they are. */
3657 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3658 VOID_TYPE_P (TREE_TYPE (arg1))
3659 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3660 VOID_TYPE_P (TREE_TYPE (arg2))
3661 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3662 }
3663
3664 case COMPOUND_EXPR:
3665 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3666 return build2_loc (loc, COMPOUND_EXPR, type,
3667 TREE_OPERAND (arg, 0),
3668 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3669
3670 case NON_LVALUE_EXPR:
3671 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3672 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3673
3674 CASE_CONVERT:
3675 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3676 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3677
3678 /* ... fall through ... */
3679
3680 case FLOAT_EXPR:
3681 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3682 return build1_loc (loc, TREE_CODE (arg), type,
3683 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3684
3685 case BIT_AND_EXPR:
3686 if (!integer_onep (TREE_OPERAND (arg, 1)))
3687 return NULL_TREE;
3688 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3689
3690 case SAVE_EXPR:
3691 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3692
3693 case CLEANUP_POINT_EXPR:
3694 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3695 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3696 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3697
3698 default:
3699 return NULL_TREE;
3700 }
3701 }
3702
3703 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3704 assume that ARG is an operation that returns a truth value (0 or 1
3705 for scalars, 0 or -1 for vectors). Return the folded expression if
3706 folding is successful. Otherwise, return NULL_TREE. */
3707
3708 static tree
3709 fold_invert_truthvalue (location_t loc, tree arg)
3710 {
3711 tree type = TREE_TYPE (arg);
3712 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3713 ? BIT_NOT_EXPR
3714 : TRUTH_NOT_EXPR,
3715 type, arg);
3716 }
3717
3718 /* Return a simplified tree node for the truth-negation of ARG. This
3719 never alters ARG itself. We assume that ARG is an operation that
3720 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3721
3722 tree
3723 invert_truthvalue_loc (location_t loc, tree arg)
3724 {
3725 if (TREE_CODE (arg) == ERROR_MARK)
3726 return arg;
3727
3728 tree type = TREE_TYPE (arg);
3729 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3730 ? BIT_NOT_EXPR
3731 : TRUTH_NOT_EXPR,
3732 type, arg);
3733 }
3734
3735 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3736 with code CODE. This optimization is unsafe. */
3737 static tree
3738 distribute_real_division (location_t loc, enum tree_code code, tree type,
3739 tree arg0, tree arg1)
3740 {
3741 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3742 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3743
3744 /* (A / C) +- (B / C) -> (A +- B) / C. */
3745 if (mul0 == mul1
3746 && operand_equal_p (TREE_OPERAND (arg0, 1),
3747 TREE_OPERAND (arg1, 1), 0))
3748 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3749 fold_build2_loc (loc, code, type,
3750 TREE_OPERAND (arg0, 0),
3751 TREE_OPERAND (arg1, 0)),
3752 TREE_OPERAND (arg0, 1));
3753
3754 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3755 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3756 TREE_OPERAND (arg1, 0), 0)
3757 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3758 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3759 {
3760 REAL_VALUE_TYPE r0, r1;
3761 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3762 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3763 if (!mul0)
3764 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3765 if (!mul1)
3766 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3767 real_arithmetic (&r0, code, &r0, &r1);
3768 return fold_build2_loc (loc, MULT_EXPR, type,
3769 TREE_OPERAND (arg0, 0),
3770 build_real (type, r0));
3771 }
3772
3773 return NULL_TREE;
3774 }
3775 \f
3776 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3777 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero
3778 and uses reverse storage order if REVERSEP is nonzero. */
3779
3780 static tree
3781 make_bit_field_ref (location_t loc, tree inner, tree type,
3782 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
3783 int unsignedp, int reversep)
3784 {
3785 tree result, bftype;
3786
3787 if (bitpos == 0 && !reversep)
3788 {
3789 tree size = TYPE_SIZE (TREE_TYPE (inner));
3790 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3791 || POINTER_TYPE_P (TREE_TYPE (inner)))
3792 && tree_fits_shwi_p (size)
3793 && tree_to_shwi (size) == bitsize)
3794 return fold_convert_loc (loc, type, inner);
3795 }
3796
3797 bftype = type;
3798 if (TYPE_PRECISION (bftype) != bitsize
3799 || TYPE_UNSIGNED (bftype) == !unsignedp)
3800 bftype = build_nonstandard_integer_type (bitsize, 0);
3801
3802 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3803 size_int (bitsize), bitsize_int (bitpos));
3804 REF_REVERSE_STORAGE_ORDER (result) = reversep;
3805
3806 if (bftype != type)
3807 result = fold_convert_loc (loc, type, result);
3808
3809 return result;
3810 }
3811
3812 /* Optimize a bit-field compare.
3813
3814 There are two cases: First is a compare against a constant and the
3815 second is a comparison of two items where the fields are at the same
3816 bit position relative to the start of a chunk (byte, halfword, word)
3817 large enough to contain it. In these cases we can avoid the shift
3818 implicit in bitfield extractions.
3819
3820 For constants, we emit a compare of the shifted constant with the
3821 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3822 compared. For two fields at the same position, we do the ANDs with the
3823 similar mask and compare the result of the ANDs.
3824
3825 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3826 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3827 are the left and right operands of the comparison, respectively.
3828
3829 If the optimization described above can be done, we return the resulting
3830 tree. Otherwise we return zero. */
3831
3832 static tree
3833 optimize_bit_field_compare (location_t loc, enum tree_code code,
3834 tree compare_type, tree lhs, tree rhs)
3835 {
3836 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3837 tree type = TREE_TYPE (lhs);
3838 tree unsigned_type;
3839 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3840 machine_mode lmode, rmode, nmode;
3841 int lunsignedp, runsignedp;
3842 int lreversep, rreversep;
3843 int lvolatilep = 0, rvolatilep = 0;
3844 tree linner, rinner = NULL_TREE;
3845 tree mask;
3846 tree offset;
3847
3848 /* Get all the information about the extractions being done. If the bit size
3849 if the same as the size of the underlying object, we aren't doing an
3850 extraction at all and so can do nothing. We also don't want to
3851 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3852 then will no longer be able to replace it. */
3853 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3854 &lunsignedp, &lreversep, &lvolatilep, false);
3855 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3856 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3857 return 0;
3858
3859 if (const_p)
3860 rreversep = lreversep;
3861 else
3862 {
3863 /* If this is not a constant, we can only do something if bit positions,
3864 sizes, signedness and storage order are the same. */
3865 rinner
3866 = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3867 &runsignedp, &rreversep, &rvolatilep, false);
3868
3869 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3870 || lunsignedp != runsignedp || lreversep != rreversep || offset != 0
3871 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3872 return 0;
3873 }
3874
3875 /* See if we can find a mode to refer to this field. We should be able to,
3876 but fail if we can't. */
3877 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3878 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3879 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3880 TYPE_ALIGN (TREE_TYPE (rinner))),
3881 word_mode, false);
3882 if (nmode == VOIDmode)
3883 return 0;
3884
3885 /* Set signed and unsigned types of the precision of this mode for the
3886 shifts below. */
3887 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3888
3889 /* Compute the bit position and size for the new reference and our offset
3890 within it. If the new reference is the same size as the original, we
3891 won't optimize anything, so return zero. */
3892 nbitsize = GET_MODE_BITSIZE (nmode);
3893 nbitpos = lbitpos & ~ (nbitsize - 1);
3894 lbitpos -= nbitpos;
3895 if (nbitsize == lbitsize)
3896 return 0;
3897
3898 if (lreversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
3899 lbitpos = nbitsize - lbitsize - lbitpos;
3900
3901 /* Make the mask to be used against the extracted field. */
3902 mask = build_int_cst_type (unsigned_type, -1);
3903 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3904 mask = const_binop (RSHIFT_EXPR, mask,
3905 size_int (nbitsize - lbitsize - lbitpos));
3906
3907 if (! const_p)
3908 /* If not comparing with constant, just rework the comparison
3909 and return. */
3910 return fold_build2_loc (loc, code, compare_type,
3911 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3912 make_bit_field_ref (loc, linner,
3913 unsigned_type,
3914 nbitsize, nbitpos,
3915 1, lreversep),
3916 mask),
3917 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3918 make_bit_field_ref (loc, rinner,
3919 unsigned_type,
3920 nbitsize, nbitpos,
3921 1, rreversep),
3922 mask));
3923
3924 /* Otherwise, we are handling the constant case. See if the constant is too
3925 big for the field. Warn and return a tree for 0 (false) if so. We do
3926 this not only for its own sake, but to avoid having to test for this
3927 error case below. If we didn't, we might generate wrong code.
3928
3929 For unsigned fields, the constant shifted right by the field length should
3930 be all zero. For signed fields, the high-order bits should agree with
3931 the sign bit. */
3932
3933 if (lunsignedp)
3934 {
3935 if (wi::lrshift (rhs, lbitsize) != 0)
3936 {
3937 warning (0, "comparison is always %d due to width of bit-field",
3938 code == NE_EXPR);
3939 return constant_boolean_node (code == NE_EXPR, compare_type);
3940 }
3941 }
3942 else
3943 {
3944 wide_int tem = wi::arshift (rhs, lbitsize - 1);
3945 if (tem != 0 && tem != -1)
3946 {
3947 warning (0, "comparison is always %d due to width of bit-field",
3948 code == NE_EXPR);
3949 return constant_boolean_node (code == NE_EXPR, compare_type);
3950 }
3951 }
3952
3953 /* Single-bit compares should always be against zero. */
3954 if (lbitsize == 1 && ! integer_zerop (rhs))
3955 {
3956 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3957 rhs = build_int_cst (type, 0);
3958 }
3959
3960 /* Make a new bitfield reference, shift the constant over the
3961 appropriate number of bits and mask it with the computed mask
3962 (in case this was a signed field). If we changed it, make a new one. */
3963 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1,
3964 lreversep);
3965
3966 rhs = const_binop (BIT_AND_EXPR,
3967 const_binop (LSHIFT_EXPR,
3968 fold_convert_loc (loc, unsigned_type, rhs),
3969 size_int (lbitpos)),
3970 mask);
3971
3972 lhs = build2_loc (loc, code, compare_type,
3973 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3974 return lhs;
3975 }
3976 \f
3977 /* Subroutine for fold_truth_andor_1: decode a field reference.
3978
3979 If EXP is a comparison reference, we return the innermost reference.
3980
3981 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3982 set to the starting bit number.
3983
3984 If the innermost field can be completely contained in a mode-sized
3985 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3986
3987 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3988 otherwise it is not changed.
3989
3990 *PUNSIGNEDP is set to the signedness of the field.
3991
3992 *PREVERSEP is set to the storage order of the field.
3993
3994 *PMASK is set to the mask used. This is either contained in a
3995 BIT_AND_EXPR or derived from the width of the field.
3996
3997 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3998
3999 Return 0 if this is not a component reference or is one that we can't
4000 do anything with. */
4001
4002 static tree
4003 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
4004 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
4005 int *punsignedp, int *preversep, int *pvolatilep,
4006 tree *pmask, tree *pand_mask)
4007 {
4008 tree outer_type = 0;
4009 tree and_mask = 0;
4010 tree mask, inner, offset;
4011 tree unsigned_type;
4012 unsigned int precision;
4013
4014 /* All the optimizations using this function assume integer fields.
4015 There are problems with FP fields since the type_for_size call
4016 below can fail for, e.g., XFmode. */
4017 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4018 return 0;
4019
4020 /* We are interested in the bare arrangement of bits, so strip everything
4021 that doesn't affect the machine mode. However, record the type of the
4022 outermost expression if it may matter below. */
4023 if (CONVERT_EXPR_P (exp)
4024 || TREE_CODE (exp) == NON_LVALUE_EXPR)
4025 outer_type = TREE_TYPE (exp);
4026 STRIP_NOPS (exp);
4027
4028 if (TREE_CODE (exp) == BIT_AND_EXPR)
4029 {
4030 and_mask = TREE_OPERAND (exp, 1);
4031 exp = TREE_OPERAND (exp, 0);
4032 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4033 if (TREE_CODE (and_mask) != INTEGER_CST)
4034 return 0;
4035 }
4036
4037 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
4038 punsignedp, preversep, pvolatilep, false);
4039 if ((inner == exp && and_mask == 0)
4040 || *pbitsize < 0 || offset != 0
4041 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
4042 return 0;
4043
4044 /* If the number of bits in the reference is the same as the bitsize of
4045 the outer type, then the outer type gives the signedness. Otherwise
4046 (in case of a small bitfield) the signedness is unchanged. */
4047 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4048 *punsignedp = TYPE_UNSIGNED (outer_type);
4049
4050 /* Compute the mask to access the bitfield. */
4051 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4052 precision = TYPE_PRECISION (unsigned_type);
4053
4054 mask = build_int_cst_type (unsigned_type, -1);
4055
4056 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4057 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4058
4059 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4060 if (and_mask != 0)
4061 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4062 fold_convert_loc (loc, unsigned_type, and_mask), mask);
4063
4064 *pmask = mask;
4065 *pand_mask = and_mask;
4066 return inner;
4067 }
4068
4069 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4070 bit positions and MASK is SIGNED. */
4071
4072 static int
4073 all_ones_mask_p (const_tree mask, unsigned int size)
4074 {
4075 tree type = TREE_TYPE (mask);
4076 unsigned int precision = TYPE_PRECISION (type);
4077
4078 /* If this function returns true when the type of the mask is
4079 UNSIGNED, then there will be errors. In particular see
4080 gcc.c-torture/execute/990326-1.c. There does not appear to be
4081 any documentation paper trail as to why this is so. But the pre
4082 wide-int worked with that restriction and it has been preserved
4083 here. */
4084 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
4085 return false;
4086
4087 return wi::mask (size, false, precision) == mask;
4088 }
4089
4090 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4091 represents the sign bit of EXP's type. If EXP represents a sign
4092 or zero extension, also test VAL against the unextended type.
4093 The return value is the (sub)expression whose sign bit is VAL,
4094 or NULL_TREE otherwise. */
4095
4096 tree
4097 sign_bit_p (tree exp, const_tree val)
4098 {
4099 int width;
4100 tree t;
4101
4102 /* Tree EXP must have an integral type. */
4103 t = TREE_TYPE (exp);
4104 if (! INTEGRAL_TYPE_P (t))
4105 return NULL_TREE;
4106
4107 /* Tree VAL must be an integer constant. */
4108 if (TREE_CODE (val) != INTEGER_CST
4109 || TREE_OVERFLOW (val))
4110 return NULL_TREE;
4111
4112 width = TYPE_PRECISION (t);
4113 if (wi::only_sign_bit_p (val, width))
4114 return exp;
4115
4116 /* Handle extension from a narrower type. */
4117 if (TREE_CODE (exp) == NOP_EXPR
4118 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4119 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4120
4121 return NULL_TREE;
4122 }
4123
4124 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
4125 to be evaluated unconditionally. */
4126
4127 static int
4128 simple_operand_p (const_tree exp)
4129 {
4130 /* Strip any conversions that don't change the machine mode. */
4131 STRIP_NOPS (exp);
4132
4133 return (CONSTANT_CLASS_P (exp)
4134 || TREE_CODE (exp) == SSA_NAME
4135 || (DECL_P (exp)
4136 && ! TREE_ADDRESSABLE (exp)
4137 && ! TREE_THIS_VOLATILE (exp)
4138 && ! DECL_NONLOCAL (exp)
4139 /* Don't regard global variables as simple. They may be
4140 allocated in ways unknown to the compiler (shared memory,
4141 #pragma weak, etc). */
4142 && ! TREE_PUBLIC (exp)
4143 && ! DECL_EXTERNAL (exp)
4144 /* Weakrefs are not safe to be read, since they can be NULL.
4145 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4146 have DECL_WEAK flag set. */
4147 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4148 /* Loading a static variable is unduly expensive, but global
4149 registers aren't expensive. */
4150 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4151 }
4152
4153 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4154 to be evaluated unconditionally.
4155 I addition to simple_operand_p, we assume that comparisons, conversions,
4156 and logic-not operations are simple, if their operands are simple, too. */
4157
4158 static bool
4159 simple_operand_p_2 (tree exp)
4160 {
4161 enum tree_code code;
4162
4163 if (TREE_SIDE_EFFECTS (exp)
4164 || tree_could_trap_p (exp))
4165 return false;
4166
4167 while (CONVERT_EXPR_P (exp))
4168 exp = TREE_OPERAND (exp, 0);
4169
4170 code = TREE_CODE (exp);
4171
4172 if (TREE_CODE_CLASS (code) == tcc_comparison)
4173 return (simple_operand_p (TREE_OPERAND (exp, 0))
4174 && simple_operand_p (TREE_OPERAND (exp, 1)));
4175
4176 if (code == TRUTH_NOT_EXPR)
4177 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4178
4179 return simple_operand_p (exp);
4180 }
4181
4182 \f
4183 /* The following functions are subroutines to fold_range_test and allow it to
4184 try to change a logical combination of comparisons into a range test.
4185
4186 For example, both
4187 X == 2 || X == 3 || X == 4 || X == 5
4188 and
4189 X >= 2 && X <= 5
4190 are converted to
4191 (unsigned) (X - 2) <= 3
4192
4193 We describe each set of comparisons as being either inside or outside
4194 a range, using a variable named like IN_P, and then describe the
4195 range with a lower and upper bound. If one of the bounds is omitted,
4196 it represents either the highest or lowest value of the type.
4197
4198 In the comments below, we represent a range by two numbers in brackets
4199 preceded by a "+" to designate being inside that range, or a "-" to
4200 designate being outside that range, so the condition can be inverted by
4201 flipping the prefix. An omitted bound is represented by a "-". For
4202 example, "- [-, 10]" means being outside the range starting at the lowest
4203 possible value and ending at 10, in other words, being greater than 10.
4204 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4205 always false.
4206
4207 We set up things so that the missing bounds are handled in a consistent
4208 manner so neither a missing bound nor "true" and "false" need to be
4209 handled using a special case. */
4210
4211 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4212 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4213 and UPPER1_P are nonzero if the respective argument is an upper bound
4214 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4215 must be specified for a comparison. ARG1 will be converted to ARG0's
4216 type if both are specified. */
4217
4218 static tree
4219 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4220 tree arg1, int upper1_p)
4221 {
4222 tree tem;
4223 int result;
4224 int sgn0, sgn1;
4225
4226 /* If neither arg represents infinity, do the normal operation.
4227 Else, if not a comparison, return infinity. Else handle the special
4228 comparison rules. Note that most of the cases below won't occur, but
4229 are handled for consistency. */
4230
4231 if (arg0 != 0 && arg1 != 0)
4232 {
4233 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4234 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4235 STRIP_NOPS (tem);
4236 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4237 }
4238
4239 if (TREE_CODE_CLASS (code) != tcc_comparison)
4240 return 0;
4241
4242 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4243 for neither. In real maths, we cannot assume open ended ranges are
4244 the same. But, this is computer arithmetic, where numbers are finite.
4245 We can therefore make the transformation of any unbounded range with
4246 the value Z, Z being greater than any representable number. This permits
4247 us to treat unbounded ranges as equal. */
4248 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4249 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4250 switch (code)
4251 {
4252 case EQ_EXPR:
4253 result = sgn0 == sgn1;
4254 break;
4255 case NE_EXPR:
4256 result = sgn0 != sgn1;
4257 break;
4258 case LT_EXPR:
4259 result = sgn0 < sgn1;
4260 break;
4261 case LE_EXPR:
4262 result = sgn0 <= sgn1;
4263 break;
4264 case GT_EXPR:
4265 result = sgn0 > sgn1;
4266 break;
4267 case GE_EXPR:
4268 result = sgn0 >= sgn1;
4269 break;
4270 default:
4271 gcc_unreachable ();
4272 }
4273
4274 return constant_boolean_node (result, type);
4275 }
4276 \f
4277 /* Helper routine for make_range. Perform one step for it, return
4278 new expression if the loop should continue or NULL_TREE if it should
4279 stop. */
4280
4281 tree
4282 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4283 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4284 bool *strict_overflow_p)
4285 {
4286 tree arg0_type = TREE_TYPE (arg0);
4287 tree n_low, n_high, low = *p_low, high = *p_high;
4288 int in_p = *p_in_p, n_in_p;
4289
4290 switch (code)
4291 {
4292 case TRUTH_NOT_EXPR:
4293 /* We can only do something if the range is testing for zero. */
4294 if (low == NULL_TREE || high == NULL_TREE
4295 || ! integer_zerop (low) || ! integer_zerop (high))
4296 return NULL_TREE;
4297 *p_in_p = ! in_p;
4298 return arg0;
4299
4300 case EQ_EXPR: case NE_EXPR:
4301 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4302 /* We can only do something if the range is testing for zero
4303 and if the second operand is an integer constant. Note that
4304 saying something is "in" the range we make is done by
4305 complementing IN_P since it will set in the initial case of
4306 being not equal to zero; "out" is leaving it alone. */
4307 if (low == NULL_TREE || high == NULL_TREE
4308 || ! integer_zerop (low) || ! integer_zerop (high)
4309 || TREE_CODE (arg1) != INTEGER_CST)
4310 return NULL_TREE;
4311
4312 switch (code)
4313 {
4314 case NE_EXPR: /* - [c, c] */
4315 low = high = arg1;
4316 break;
4317 case EQ_EXPR: /* + [c, c] */
4318 in_p = ! in_p, low = high = arg1;
4319 break;
4320 case GT_EXPR: /* - [-, c] */
4321 low = 0, high = arg1;
4322 break;
4323 case GE_EXPR: /* + [c, -] */
4324 in_p = ! in_p, low = arg1, high = 0;
4325 break;
4326 case LT_EXPR: /* - [c, -] */
4327 low = arg1, high = 0;
4328 break;
4329 case LE_EXPR: /* + [-, c] */
4330 in_p = ! in_p, low = 0, high = arg1;
4331 break;
4332 default:
4333 gcc_unreachable ();
4334 }
4335
4336 /* If this is an unsigned comparison, we also know that EXP is
4337 greater than or equal to zero. We base the range tests we make
4338 on that fact, so we record it here so we can parse existing
4339 range tests. We test arg0_type since often the return type
4340 of, e.g. EQ_EXPR, is boolean. */
4341 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4342 {
4343 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4344 in_p, low, high, 1,
4345 build_int_cst (arg0_type, 0),
4346 NULL_TREE))
4347 return NULL_TREE;
4348
4349 in_p = n_in_p, low = n_low, high = n_high;
4350
4351 /* If the high bound is missing, but we have a nonzero low
4352 bound, reverse the range so it goes from zero to the low bound
4353 minus 1. */
4354 if (high == 0 && low && ! integer_zerop (low))
4355 {
4356 in_p = ! in_p;
4357 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4358 build_int_cst (TREE_TYPE (low), 1), 0);
4359 low = build_int_cst (arg0_type, 0);
4360 }
4361 }
4362
4363 *p_low = low;
4364 *p_high = high;
4365 *p_in_p = in_p;
4366 return arg0;
4367
4368 case NEGATE_EXPR:
4369 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4370 low and high are non-NULL, then normalize will DTRT. */
4371 if (!TYPE_UNSIGNED (arg0_type)
4372 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4373 {
4374 if (low == NULL_TREE)
4375 low = TYPE_MIN_VALUE (arg0_type);
4376 if (high == NULL_TREE)
4377 high = TYPE_MAX_VALUE (arg0_type);
4378 }
4379
4380 /* (-x) IN [a,b] -> x in [-b, -a] */
4381 n_low = range_binop (MINUS_EXPR, exp_type,
4382 build_int_cst (exp_type, 0),
4383 0, high, 1);
4384 n_high = range_binop (MINUS_EXPR, exp_type,
4385 build_int_cst (exp_type, 0),
4386 0, low, 0);
4387 if (n_high != 0 && TREE_OVERFLOW (n_high))
4388 return NULL_TREE;
4389 goto normalize;
4390
4391 case BIT_NOT_EXPR:
4392 /* ~ X -> -X - 1 */
4393 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4394 build_int_cst (exp_type, 1));
4395
4396 case PLUS_EXPR:
4397 case MINUS_EXPR:
4398 if (TREE_CODE (arg1) != INTEGER_CST)
4399 return NULL_TREE;
4400
4401 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4402 move a constant to the other side. */
4403 if (!TYPE_UNSIGNED (arg0_type)
4404 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4405 return NULL_TREE;
4406
4407 /* If EXP is signed, any overflow in the computation is undefined,
4408 so we don't worry about it so long as our computations on
4409 the bounds don't overflow. For unsigned, overflow is defined
4410 and this is exactly the right thing. */
4411 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4412 arg0_type, low, 0, arg1, 0);
4413 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4414 arg0_type, high, 1, arg1, 0);
4415 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4416 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4417 return NULL_TREE;
4418
4419 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4420 *strict_overflow_p = true;
4421
4422 normalize:
4423 /* Check for an unsigned range which has wrapped around the maximum
4424 value thus making n_high < n_low, and normalize it. */
4425 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4426 {
4427 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4428 build_int_cst (TREE_TYPE (n_high), 1), 0);
4429 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4430 build_int_cst (TREE_TYPE (n_low), 1), 0);
4431
4432 /* If the range is of the form +/- [ x+1, x ], we won't
4433 be able to normalize it. But then, it represents the
4434 whole range or the empty set, so make it
4435 +/- [ -, - ]. */
4436 if (tree_int_cst_equal (n_low, low)
4437 && tree_int_cst_equal (n_high, high))
4438 low = high = 0;
4439 else
4440 in_p = ! in_p;
4441 }
4442 else
4443 low = n_low, high = n_high;
4444
4445 *p_low = low;
4446 *p_high = high;
4447 *p_in_p = in_p;
4448 return arg0;
4449
4450 CASE_CONVERT:
4451 case NON_LVALUE_EXPR:
4452 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4453 return NULL_TREE;
4454
4455 if (! INTEGRAL_TYPE_P (arg0_type)
4456 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4457 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4458 return NULL_TREE;
4459
4460 n_low = low, n_high = high;
4461
4462 if (n_low != 0)
4463 n_low = fold_convert_loc (loc, arg0_type, n_low);
4464
4465 if (n_high != 0)
4466 n_high = fold_convert_loc (loc, arg0_type, n_high);
4467
4468 /* If we're converting arg0 from an unsigned type, to exp,
4469 a signed type, we will be doing the comparison as unsigned.
4470 The tests above have already verified that LOW and HIGH
4471 are both positive.
4472
4473 So we have to ensure that we will handle large unsigned
4474 values the same way that the current signed bounds treat
4475 negative values. */
4476
4477 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4478 {
4479 tree high_positive;
4480 tree equiv_type;
4481 /* For fixed-point modes, we need to pass the saturating flag
4482 as the 2nd parameter. */
4483 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4484 equiv_type
4485 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4486 TYPE_SATURATING (arg0_type));
4487 else
4488 equiv_type
4489 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4490
4491 /* A range without an upper bound is, naturally, unbounded.
4492 Since convert would have cropped a very large value, use
4493 the max value for the destination type. */
4494 high_positive
4495 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4496 : TYPE_MAX_VALUE (arg0_type);
4497
4498 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4499 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4500 fold_convert_loc (loc, arg0_type,
4501 high_positive),
4502 build_int_cst (arg0_type, 1));
4503
4504 /* If the low bound is specified, "and" the range with the
4505 range for which the original unsigned value will be
4506 positive. */
4507 if (low != 0)
4508 {
4509 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4510 1, fold_convert_loc (loc, arg0_type,
4511 integer_zero_node),
4512 high_positive))
4513 return NULL_TREE;
4514
4515 in_p = (n_in_p == in_p);
4516 }
4517 else
4518 {
4519 /* Otherwise, "or" the range with the range of the input
4520 that will be interpreted as negative. */
4521 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4522 1, fold_convert_loc (loc, arg0_type,
4523 integer_zero_node),
4524 high_positive))
4525 return NULL_TREE;
4526
4527 in_p = (in_p != n_in_p);
4528 }
4529 }
4530
4531 *p_low = n_low;
4532 *p_high = n_high;
4533 *p_in_p = in_p;
4534 return arg0;
4535
4536 default:
4537 return NULL_TREE;
4538 }
4539 }
4540
4541 /* Given EXP, a logical expression, set the range it is testing into
4542 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4543 actually being tested. *PLOW and *PHIGH will be made of the same
4544 type as the returned expression. If EXP is not a comparison, we
4545 will most likely not be returning a useful value and range. Set
4546 *STRICT_OVERFLOW_P to true if the return value is only valid
4547 because signed overflow is undefined; otherwise, do not change
4548 *STRICT_OVERFLOW_P. */
4549
4550 tree
4551 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4552 bool *strict_overflow_p)
4553 {
4554 enum tree_code code;
4555 tree arg0, arg1 = NULL_TREE;
4556 tree exp_type, nexp;
4557 int in_p;
4558 tree low, high;
4559 location_t loc = EXPR_LOCATION (exp);
4560
4561 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4562 and see if we can refine the range. Some of the cases below may not
4563 happen, but it doesn't seem worth worrying about this. We "continue"
4564 the outer loop when we've changed something; otherwise we "break"
4565 the switch, which will "break" the while. */
4566
4567 in_p = 0;
4568 low = high = build_int_cst (TREE_TYPE (exp), 0);
4569
4570 while (1)
4571 {
4572 code = TREE_CODE (exp);
4573 exp_type = TREE_TYPE (exp);
4574 arg0 = NULL_TREE;
4575
4576 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4577 {
4578 if (TREE_OPERAND_LENGTH (exp) > 0)
4579 arg0 = TREE_OPERAND (exp, 0);
4580 if (TREE_CODE_CLASS (code) == tcc_binary
4581 || TREE_CODE_CLASS (code) == tcc_comparison
4582 || (TREE_CODE_CLASS (code) == tcc_expression
4583 && TREE_OPERAND_LENGTH (exp) > 1))
4584 arg1 = TREE_OPERAND (exp, 1);
4585 }
4586 if (arg0 == NULL_TREE)
4587 break;
4588
4589 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4590 &high, &in_p, strict_overflow_p);
4591 if (nexp == NULL_TREE)
4592 break;
4593 exp = nexp;
4594 }
4595
4596 /* If EXP is a constant, we can evaluate whether this is true or false. */
4597 if (TREE_CODE (exp) == INTEGER_CST)
4598 {
4599 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4600 exp, 0, low, 0))
4601 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4602 exp, 1, high, 1)));
4603 low = high = 0;
4604 exp = 0;
4605 }
4606
4607 *pin_p = in_p, *plow = low, *phigh = high;
4608 return exp;
4609 }
4610 \f
4611 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4612 type, TYPE, return an expression to test if EXP is in (or out of, depending
4613 on IN_P) the range. Return 0 if the test couldn't be created. */
4614
4615 tree
4616 build_range_check (location_t loc, tree type, tree exp, int in_p,
4617 tree low, tree high)
4618 {
4619 tree etype = TREE_TYPE (exp), value;
4620
4621 /* Disable this optimization for function pointer expressions
4622 on targets that require function pointer canonicalization. */
4623 if (targetm.have_canonicalize_funcptr_for_compare ()
4624 && TREE_CODE (etype) == POINTER_TYPE
4625 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4626 return NULL_TREE;
4627
4628 if (! in_p)
4629 {
4630 value = build_range_check (loc, type, exp, 1, low, high);
4631 if (value != 0)
4632 return invert_truthvalue_loc (loc, value);
4633
4634 return 0;
4635 }
4636
4637 if (low == 0 && high == 0)
4638 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4639
4640 if (low == 0)
4641 return fold_build2_loc (loc, LE_EXPR, type, exp,
4642 fold_convert_loc (loc, etype, high));
4643
4644 if (high == 0)
4645 return fold_build2_loc (loc, GE_EXPR, type, exp,
4646 fold_convert_loc (loc, etype, low));
4647
4648 if (operand_equal_p (low, high, 0))
4649 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4650 fold_convert_loc (loc, etype, low));
4651
4652 if (integer_zerop (low))
4653 {
4654 if (! TYPE_UNSIGNED (etype))
4655 {
4656 etype = unsigned_type_for (etype);
4657 high = fold_convert_loc (loc, etype, high);
4658 exp = fold_convert_loc (loc, etype, exp);
4659 }
4660 return build_range_check (loc, type, exp, 1, 0, high);
4661 }
4662
4663 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4664 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4665 {
4666 int prec = TYPE_PRECISION (etype);
4667
4668 if (wi::mask (prec - 1, false, prec) == high)
4669 {
4670 if (TYPE_UNSIGNED (etype))
4671 {
4672 tree signed_etype = signed_type_for (etype);
4673 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4674 etype
4675 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4676 else
4677 etype = signed_etype;
4678 exp = fold_convert_loc (loc, etype, exp);
4679 }
4680 return fold_build2_loc (loc, GT_EXPR, type, exp,
4681 build_int_cst (etype, 0));
4682 }
4683 }
4684
4685 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4686 This requires wrap-around arithmetics for the type of the expression.
4687 First make sure that arithmetics in this type is valid, then make sure
4688 that it wraps around. */
4689 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4690 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4691 TYPE_UNSIGNED (etype));
4692
4693 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4694 {
4695 tree utype, minv, maxv;
4696
4697 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4698 for the type in question, as we rely on this here. */
4699 utype = unsigned_type_for (etype);
4700 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4701 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4702 build_int_cst (TREE_TYPE (maxv), 1), 1);
4703 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4704
4705 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4706 minv, 1, maxv, 1)))
4707 etype = utype;
4708 else
4709 return 0;
4710 }
4711
4712 high = fold_convert_loc (loc, etype, high);
4713 low = fold_convert_loc (loc, etype, low);
4714 exp = fold_convert_loc (loc, etype, exp);
4715
4716 value = const_binop (MINUS_EXPR, high, low);
4717
4718
4719 if (POINTER_TYPE_P (etype))
4720 {
4721 if (value != 0 && !TREE_OVERFLOW (value))
4722 {
4723 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4724 return build_range_check (loc, type,
4725 fold_build_pointer_plus_loc (loc, exp, low),
4726 1, build_int_cst (etype, 0), value);
4727 }
4728 return 0;
4729 }
4730
4731 if (value != 0 && !TREE_OVERFLOW (value))
4732 return build_range_check (loc, type,
4733 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4734 1, build_int_cst (etype, 0), value);
4735
4736 return 0;
4737 }
4738 \f
4739 /* Return the predecessor of VAL in its type, handling the infinite case. */
4740
4741 static tree
4742 range_predecessor (tree val)
4743 {
4744 tree type = TREE_TYPE (val);
4745
4746 if (INTEGRAL_TYPE_P (type)
4747 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4748 return 0;
4749 else
4750 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4751 build_int_cst (TREE_TYPE (val), 1), 0);
4752 }
4753
4754 /* Return the successor of VAL in its type, handling the infinite case. */
4755
4756 static tree
4757 range_successor (tree val)
4758 {
4759 tree type = TREE_TYPE (val);
4760
4761 if (INTEGRAL_TYPE_P (type)
4762 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4763 return 0;
4764 else
4765 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4766 build_int_cst (TREE_TYPE (val), 1), 0);
4767 }
4768
4769 /* Given two ranges, see if we can merge them into one. Return 1 if we
4770 can, 0 if we can't. Set the output range into the specified parameters. */
4771
4772 bool
4773 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4774 tree high0, int in1_p, tree low1, tree high1)
4775 {
4776 int no_overlap;
4777 int subset;
4778 int temp;
4779 tree tem;
4780 int in_p;
4781 tree low, high;
4782 int lowequal = ((low0 == 0 && low1 == 0)
4783 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4784 low0, 0, low1, 0)));
4785 int highequal = ((high0 == 0 && high1 == 0)
4786 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4787 high0, 1, high1, 1)));
4788
4789 /* Make range 0 be the range that starts first, or ends last if they
4790 start at the same value. Swap them if it isn't. */
4791 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4792 low0, 0, low1, 0))
4793 || (lowequal
4794 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4795 high1, 1, high0, 1))))
4796 {
4797 temp = in0_p, in0_p = in1_p, in1_p = temp;
4798 tem = low0, low0 = low1, low1 = tem;
4799 tem = high0, high0 = high1, high1 = tem;
4800 }
4801
4802 /* Now flag two cases, whether the ranges are disjoint or whether the
4803 second range is totally subsumed in the first. Note that the tests
4804 below are simplified by the ones above. */
4805 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4806 high0, 1, low1, 0));
4807 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4808 high1, 1, high0, 1));
4809
4810 /* We now have four cases, depending on whether we are including or
4811 excluding the two ranges. */
4812 if (in0_p && in1_p)
4813 {
4814 /* If they don't overlap, the result is false. If the second range
4815 is a subset it is the result. Otherwise, the range is from the start
4816 of the second to the end of the first. */
4817 if (no_overlap)
4818 in_p = 0, low = high = 0;
4819 else if (subset)
4820 in_p = 1, low = low1, high = high1;
4821 else
4822 in_p = 1, low = low1, high = high0;
4823 }
4824
4825 else if (in0_p && ! in1_p)
4826 {
4827 /* If they don't overlap, the result is the first range. If they are
4828 equal, the result is false. If the second range is a subset of the
4829 first, and the ranges begin at the same place, we go from just after
4830 the end of the second range to the end of the first. If the second
4831 range is not a subset of the first, or if it is a subset and both
4832 ranges end at the same place, the range starts at the start of the
4833 first range and ends just before the second range.
4834 Otherwise, we can't describe this as a single range. */
4835 if (no_overlap)
4836 in_p = 1, low = low0, high = high0;
4837 else if (lowequal && highequal)
4838 in_p = 0, low = high = 0;
4839 else if (subset && lowequal)
4840 {
4841 low = range_successor (high1);
4842 high = high0;
4843 in_p = 1;
4844 if (low == 0)
4845 {
4846 /* We are in the weird situation where high0 > high1 but
4847 high1 has no successor. Punt. */
4848 return 0;
4849 }
4850 }
4851 else if (! subset || highequal)
4852 {
4853 low = low0;
4854 high = range_predecessor (low1);
4855 in_p = 1;
4856 if (high == 0)
4857 {
4858 /* low0 < low1 but low1 has no predecessor. Punt. */
4859 return 0;
4860 }
4861 }
4862 else
4863 return 0;
4864 }
4865
4866 else if (! in0_p && in1_p)
4867 {
4868 /* If they don't overlap, the result is the second range. If the second
4869 is a subset of the first, the result is false. Otherwise,
4870 the range starts just after the first range and ends at the
4871 end of the second. */
4872 if (no_overlap)
4873 in_p = 1, low = low1, high = high1;
4874 else if (subset || highequal)
4875 in_p = 0, low = high = 0;
4876 else
4877 {
4878 low = range_successor (high0);
4879 high = high1;
4880 in_p = 1;
4881 if (low == 0)
4882 {
4883 /* high1 > high0 but high0 has no successor. Punt. */
4884 return 0;
4885 }
4886 }
4887 }
4888
4889 else
4890 {
4891 /* The case where we are excluding both ranges. Here the complex case
4892 is if they don't overlap. In that case, the only time we have a
4893 range is if they are adjacent. If the second is a subset of the
4894 first, the result is the first. Otherwise, the range to exclude
4895 starts at the beginning of the first range and ends at the end of the
4896 second. */
4897 if (no_overlap)
4898 {
4899 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4900 range_successor (high0),
4901 1, low1, 0)))
4902 in_p = 0, low = low0, high = high1;
4903 else
4904 {
4905 /* Canonicalize - [min, x] into - [-, x]. */
4906 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4907 switch (TREE_CODE (TREE_TYPE (low0)))
4908 {
4909 case ENUMERAL_TYPE:
4910 if (TYPE_PRECISION (TREE_TYPE (low0))
4911 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4912 break;
4913 /* FALLTHROUGH */
4914 case INTEGER_TYPE:
4915 if (tree_int_cst_equal (low0,
4916 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4917 low0 = 0;
4918 break;
4919 case POINTER_TYPE:
4920 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4921 && integer_zerop (low0))
4922 low0 = 0;
4923 break;
4924 default:
4925 break;
4926 }
4927
4928 /* Canonicalize - [x, max] into - [x, -]. */
4929 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4930 switch (TREE_CODE (TREE_TYPE (high1)))
4931 {
4932 case ENUMERAL_TYPE:
4933 if (TYPE_PRECISION (TREE_TYPE (high1))
4934 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4935 break;
4936 /* FALLTHROUGH */
4937 case INTEGER_TYPE:
4938 if (tree_int_cst_equal (high1,
4939 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4940 high1 = 0;
4941 break;
4942 case POINTER_TYPE:
4943 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4944 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4945 high1, 1,
4946 build_int_cst (TREE_TYPE (high1), 1),
4947 1)))
4948 high1 = 0;
4949 break;
4950 default:
4951 break;
4952 }
4953
4954 /* The ranges might be also adjacent between the maximum and
4955 minimum values of the given type. For
4956 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4957 return + [x + 1, y - 1]. */
4958 if (low0 == 0 && high1 == 0)
4959 {
4960 low = range_successor (high0);
4961 high = range_predecessor (low1);
4962 if (low == 0 || high == 0)
4963 return 0;
4964
4965 in_p = 1;
4966 }
4967 else
4968 return 0;
4969 }
4970 }
4971 else if (subset)
4972 in_p = 0, low = low0, high = high0;
4973 else
4974 in_p = 0, low = low0, high = high1;
4975 }
4976
4977 *pin_p = in_p, *plow = low, *phigh = high;
4978 return 1;
4979 }
4980 \f
4981
4982 /* Subroutine of fold, looking inside expressions of the form
4983 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4984 of the COND_EXPR. This function is being used also to optimize
4985 A op B ? C : A, by reversing the comparison first.
4986
4987 Return a folded expression whose code is not a COND_EXPR
4988 anymore, or NULL_TREE if no folding opportunity is found. */
4989
4990 static tree
4991 fold_cond_expr_with_comparison (location_t loc, tree type,
4992 tree arg0, tree arg1, tree arg2)
4993 {
4994 enum tree_code comp_code = TREE_CODE (arg0);
4995 tree arg00 = TREE_OPERAND (arg0, 0);
4996 tree arg01 = TREE_OPERAND (arg0, 1);
4997 tree arg1_type = TREE_TYPE (arg1);
4998 tree tem;
4999
5000 STRIP_NOPS (arg1);
5001 STRIP_NOPS (arg2);
5002
5003 /* If we have A op 0 ? A : -A, consider applying the following
5004 transformations:
5005
5006 A == 0? A : -A same as -A
5007 A != 0? A : -A same as A
5008 A >= 0? A : -A same as abs (A)
5009 A > 0? A : -A same as abs (A)
5010 A <= 0? A : -A same as -abs (A)
5011 A < 0? A : -A same as -abs (A)
5012
5013 None of these transformations work for modes with signed
5014 zeros. If A is +/-0, the first two transformations will
5015 change the sign of the result (from +0 to -0, or vice
5016 versa). The last four will fix the sign of the result,
5017 even though the original expressions could be positive or
5018 negative, depending on the sign of A.
5019
5020 Note that all these transformations are correct if A is
5021 NaN, since the two alternatives (A and -A) are also NaNs. */
5022 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5023 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5024 ? real_zerop (arg01)
5025 : integer_zerop (arg01))
5026 && ((TREE_CODE (arg2) == NEGATE_EXPR
5027 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5028 /* In the case that A is of the form X-Y, '-A' (arg2) may
5029 have already been folded to Y-X, check for that. */
5030 || (TREE_CODE (arg1) == MINUS_EXPR
5031 && TREE_CODE (arg2) == MINUS_EXPR
5032 && operand_equal_p (TREE_OPERAND (arg1, 0),
5033 TREE_OPERAND (arg2, 1), 0)
5034 && operand_equal_p (TREE_OPERAND (arg1, 1),
5035 TREE_OPERAND (arg2, 0), 0))))
5036 switch (comp_code)
5037 {
5038 case EQ_EXPR:
5039 case UNEQ_EXPR:
5040 tem = fold_convert_loc (loc, arg1_type, arg1);
5041 return pedantic_non_lvalue_loc (loc,
5042 fold_convert_loc (loc, type,
5043 negate_expr (tem)));
5044 case NE_EXPR:
5045 case LTGT_EXPR:
5046 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5047 case UNGE_EXPR:
5048 case UNGT_EXPR:
5049 if (flag_trapping_math)
5050 break;
5051 /* Fall through. */
5052 case GE_EXPR:
5053 case GT_EXPR:
5054 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5055 break;
5056 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5057 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5058 case UNLE_EXPR:
5059 case UNLT_EXPR:
5060 if (flag_trapping_math)
5061 break;
5062 case LE_EXPR:
5063 case LT_EXPR:
5064 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5065 break;
5066 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5067 return negate_expr (fold_convert_loc (loc, type, tem));
5068 default:
5069 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5070 break;
5071 }
5072
5073 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5074 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5075 both transformations are correct when A is NaN: A != 0
5076 is then true, and A == 0 is false. */
5077
5078 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5079 && integer_zerop (arg01) && integer_zerop (arg2))
5080 {
5081 if (comp_code == NE_EXPR)
5082 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5083 else if (comp_code == EQ_EXPR)
5084 return build_zero_cst (type);
5085 }
5086
5087 /* Try some transformations of A op B ? A : B.
5088
5089 A == B? A : B same as B
5090 A != B? A : B same as A
5091 A >= B? A : B same as max (A, B)
5092 A > B? A : B same as max (B, A)
5093 A <= B? A : B same as min (A, B)
5094 A < B? A : B same as min (B, A)
5095
5096 As above, these transformations don't work in the presence
5097 of signed zeros. For example, if A and B are zeros of
5098 opposite sign, the first two transformations will change
5099 the sign of the result. In the last four, the original
5100 expressions give different results for (A=+0, B=-0) and
5101 (A=-0, B=+0), but the transformed expressions do not.
5102
5103 The first two transformations are correct if either A or B
5104 is a NaN. In the first transformation, the condition will
5105 be false, and B will indeed be chosen. In the case of the
5106 second transformation, the condition A != B will be true,
5107 and A will be chosen.
5108
5109 The conversions to max() and min() are not correct if B is
5110 a number and A is not. The conditions in the original
5111 expressions will be false, so all four give B. The min()
5112 and max() versions would give a NaN instead. */
5113 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5114 && operand_equal_for_comparison_p (arg01, arg2, arg00)
5115 /* Avoid these transformations if the COND_EXPR may be used
5116 as an lvalue in the C++ front-end. PR c++/19199. */
5117 && (in_gimple_form
5118 || VECTOR_TYPE_P (type)
5119 || (! lang_GNU_CXX ()
5120 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5121 || ! maybe_lvalue_p (arg1)
5122 || ! maybe_lvalue_p (arg2)))
5123 {
5124 tree comp_op0 = arg00;
5125 tree comp_op1 = arg01;
5126 tree comp_type = TREE_TYPE (comp_op0);
5127
5128 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
5129 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
5130 {
5131 comp_type = type;
5132 comp_op0 = arg1;
5133 comp_op1 = arg2;
5134 }
5135
5136 switch (comp_code)
5137 {
5138 case EQ_EXPR:
5139 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
5140 case NE_EXPR:
5141 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5142 case LE_EXPR:
5143 case LT_EXPR:
5144 case UNLE_EXPR:
5145 case UNLT_EXPR:
5146 /* In C++ a ?: expression can be an lvalue, so put the
5147 operand which will be used if they are equal first
5148 so that we can convert this back to the
5149 corresponding COND_EXPR. */
5150 if (!HONOR_NANS (arg1))
5151 {
5152 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5153 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5154 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5155 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5156 : fold_build2_loc (loc, MIN_EXPR, comp_type,
5157 comp_op1, comp_op0);
5158 return pedantic_non_lvalue_loc (loc,
5159 fold_convert_loc (loc, type, tem));
5160 }
5161 break;
5162 case GE_EXPR:
5163 case GT_EXPR:
5164 case UNGE_EXPR:
5165 case UNGT_EXPR:
5166 if (!HONOR_NANS (arg1))
5167 {
5168 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5169 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5170 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5171 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5172 : fold_build2_loc (loc, MAX_EXPR, comp_type,
5173 comp_op1, comp_op0);
5174 return pedantic_non_lvalue_loc (loc,
5175 fold_convert_loc (loc, type, tem));
5176 }
5177 break;
5178 case UNEQ_EXPR:
5179 if (!HONOR_NANS (arg1))
5180 return pedantic_non_lvalue_loc (loc,
5181 fold_convert_loc (loc, type, arg2));
5182 break;
5183 case LTGT_EXPR:
5184 if (!HONOR_NANS (arg1))
5185 return pedantic_non_lvalue_loc (loc,
5186 fold_convert_loc (loc, type, arg1));
5187 break;
5188 default:
5189 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5190 break;
5191 }
5192 }
5193
5194 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5195 we might still be able to simplify this. For example,
5196 if C1 is one less or one more than C2, this might have started
5197 out as a MIN or MAX and been transformed by this function.
5198 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5199
5200 if (INTEGRAL_TYPE_P (type)
5201 && TREE_CODE (arg01) == INTEGER_CST
5202 && TREE_CODE (arg2) == INTEGER_CST)
5203 switch (comp_code)
5204 {
5205 case EQ_EXPR:
5206 if (TREE_CODE (arg1) == INTEGER_CST)
5207 break;
5208 /* We can replace A with C1 in this case. */
5209 arg1 = fold_convert_loc (loc, type, arg01);
5210 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
5211
5212 case LT_EXPR:
5213 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
5214 MIN_EXPR, to preserve the signedness of the comparison. */
5215 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5216 OEP_ONLY_CONST)
5217 && operand_equal_p (arg01,
5218 const_binop (PLUS_EXPR, arg2,
5219 build_int_cst (type, 1)),
5220 OEP_ONLY_CONST))
5221 {
5222 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5223 fold_convert_loc (loc, TREE_TYPE (arg00),
5224 arg2));
5225 return pedantic_non_lvalue_loc (loc,
5226 fold_convert_loc (loc, type, tem));
5227 }
5228 break;
5229
5230 case LE_EXPR:
5231 /* If C1 is C2 - 1, this is min(A, C2), with the same care
5232 as above. */
5233 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5234 OEP_ONLY_CONST)
5235 && operand_equal_p (arg01,
5236 const_binop (MINUS_EXPR, arg2,
5237 build_int_cst (type, 1)),
5238 OEP_ONLY_CONST))
5239 {
5240 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5241 fold_convert_loc (loc, TREE_TYPE (arg00),
5242 arg2));
5243 return pedantic_non_lvalue_loc (loc,
5244 fold_convert_loc (loc, type, tem));
5245 }
5246 break;
5247
5248 case GT_EXPR:
5249 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
5250 MAX_EXPR, to preserve the signedness of the comparison. */
5251 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5252 OEP_ONLY_CONST)
5253 && operand_equal_p (arg01,
5254 const_binop (MINUS_EXPR, arg2,
5255 build_int_cst (type, 1)),
5256 OEP_ONLY_CONST))
5257 {
5258 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5259 fold_convert_loc (loc, TREE_TYPE (arg00),
5260 arg2));
5261 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5262 }
5263 break;
5264
5265 case GE_EXPR:
5266 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
5267 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5268 OEP_ONLY_CONST)
5269 && operand_equal_p (arg01,
5270 const_binop (PLUS_EXPR, arg2,
5271 build_int_cst (type, 1)),
5272 OEP_ONLY_CONST))
5273 {
5274 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5275 fold_convert_loc (loc, TREE_TYPE (arg00),
5276 arg2));
5277 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5278 }
5279 break;
5280 case NE_EXPR:
5281 break;
5282 default:
5283 gcc_unreachable ();
5284 }
5285
5286 return NULL_TREE;
5287 }
5288
5289
5290 \f
5291 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5292 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5293 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5294 false) >= 2)
5295 #endif
5296
5297 /* EXP is some logical combination of boolean tests. See if we can
5298 merge it into some range test. Return the new tree if so. */
5299
5300 static tree
5301 fold_range_test (location_t loc, enum tree_code code, tree type,
5302 tree op0, tree op1)
5303 {
5304 int or_op = (code == TRUTH_ORIF_EXPR
5305 || code == TRUTH_OR_EXPR);
5306 int in0_p, in1_p, in_p;
5307 tree low0, low1, low, high0, high1, high;
5308 bool strict_overflow_p = false;
5309 tree tem, lhs, rhs;
5310 const char * const warnmsg = G_("assuming signed overflow does not occur "
5311 "when simplifying range test");
5312
5313 if (!INTEGRAL_TYPE_P (type))
5314 return 0;
5315
5316 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5317 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5318
5319 /* If this is an OR operation, invert both sides; we will invert
5320 again at the end. */
5321 if (or_op)
5322 in0_p = ! in0_p, in1_p = ! in1_p;
5323
5324 /* If both expressions are the same, if we can merge the ranges, and we
5325 can build the range test, return it or it inverted. If one of the
5326 ranges is always true or always false, consider it to be the same
5327 expression as the other. */
5328 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5329 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5330 in1_p, low1, high1)
5331 && 0 != (tem = (build_range_check (loc, type,
5332 lhs != 0 ? lhs
5333 : rhs != 0 ? rhs : integer_zero_node,
5334 in_p, low, high))))
5335 {
5336 if (strict_overflow_p)
5337 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5338 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5339 }
5340
5341 /* On machines where the branch cost is expensive, if this is a
5342 short-circuited branch and the underlying object on both sides
5343 is the same, make a non-short-circuit operation. */
5344 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5345 && lhs != 0 && rhs != 0
5346 && (code == TRUTH_ANDIF_EXPR
5347 || code == TRUTH_ORIF_EXPR)
5348 && operand_equal_p (lhs, rhs, 0))
5349 {
5350 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5351 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5352 which cases we can't do this. */
5353 if (simple_operand_p (lhs))
5354 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5355 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5356 type, op0, op1);
5357
5358 else if (!lang_hooks.decls.global_bindings_p ()
5359 && !CONTAINS_PLACEHOLDER_P (lhs))
5360 {
5361 tree common = save_expr (lhs);
5362
5363 if (0 != (lhs = build_range_check (loc, type, common,
5364 or_op ? ! in0_p : in0_p,
5365 low0, high0))
5366 && (0 != (rhs = build_range_check (loc, type, common,
5367 or_op ? ! in1_p : in1_p,
5368 low1, high1))))
5369 {
5370 if (strict_overflow_p)
5371 fold_overflow_warning (warnmsg,
5372 WARN_STRICT_OVERFLOW_COMPARISON);
5373 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5374 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5375 type, lhs, rhs);
5376 }
5377 }
5378 }
5379
5380 return 0;
5381 }
5382 \f
5383 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5384 bit value. Arrange things so the extra bits will be set to zero if and
5385 only if C is signed-extended to its full width. If MASK is nonzero,
5386 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5387
5388 static tree
5389 unextend (tree c, int p, int unsignedp, tree mask)
5390 {
5391 tree type = TREE_TYPE (c);
5392 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5393 tree temp;
5394
5395 if (p == modesize || unsignedp)
5396 return c;
5397
5398 /* We work by getting just the sign bit into the low-order bit, then
5399 into the high-order bit, then sign-extend. We then XOR that value
5400 with C. */
5401 temp = build_int_cst (TREE_TYPE (c), wi::extract_uhwi (c, p - 1, 1));
5402
5403 /* We must use a signed type in order to get an arithmetic right shift.
5404 However, we must also avoid introducing accidental overflows, so that
5405 a subsequent call to integer_zerop will work. Hence we must
5406 do the type conversion here. At this point, the constant is either
5407 zero or one, and the conversion to a signed type can never overflow.
5408 We could get an overflow if this conversion is done anywhere else. */
5409 if (TYPE_UNSIGNED (type))
5410 temp = fold_convert (signed_type_for (type), temp);
5411
5412 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5413 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5414 if (mask != 0)
5415 temp = const_binop (BIT_AND_EXPR, temp,
5416 fold_convert (TREE_TYPE (c), mask));
5417 /* If necessary, convert the type back to match the type of C. */
5418 if (TYPE_UNSIGNED (type))
5419 temp = fold_convert (type, temp);
5420
5421 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5422 }
5423 \f
5424 /* For an expression that has the form
5425 (A && B) || ~B
5426 or
5427 (A || B) && ~B,
5428 we can drop one of the inner expressions and simplify to
5429 A || ~B
5430 or
5431 A && ~B
5432 LOC is the location of the resulting expression. OP is the inner
5433 logical operation; the left-hand side in the examples above, while CMPOP
5434 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5435 removing a condition that guards another, as in
5436 (A != NULL && A->...) || A == NULL
5437 which we must not transform. If RHS_ONLY is true, only eliminate the
5438 right-most operand of the inner logical operation. */
5439
5440 static tree
5441 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5442 bool rhs_only)
5443 {
5444 tree type = TREE_TYPE (cmpop);
5445 enum tree_code code = TREE_CODE (cmpop);
5446 enum tree_code truthop_code = TREE_CODE (op);
5447 tree lhs = TREE_OPERAND (op, 0);
5448 tree rhs = TREE_OPERAND (op, 1);
5449 tree orig_lhs = lhs, orig_rhs = rhs;
5450 enum tree_code rhs_code = TREE_CODE (rhs);
5451 enum tree_code lhs_code = TREE_CODE (lhs);
5452 enum tree_code inv_code;
5453
5454 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5455 return NULL_TREE;
5456
5457 if (TREE_CODE_CLASS (code) != tcc_comparison)
5458 return NULL_TREE;
5459
5460 if (rhs_code == truthop_code)
5461 {
5462 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5463 if (newrhs != NULL_TREE)
5464 {
5465 rhs = newrhs;
5466 rhs_code = TREE_CODE (rhs);
5467 }
5468 }
5469 if (lhs_code == truthop_code && !rhs_only)
5470 {
5471 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5472 if (newlhs != NULL_TREE)
5473 {
5474 lhs = newlhs;
5475 lhs_code = TREE_CODE (lhs);
5476 }
5477 }
5478
5479 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
5480 if (inv_code == rhs_code
5481 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5482 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5483 return lhs;
5484 if (!rhs_only && inv_code == lhs_code
5485 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5486 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5487 return rhs;
5488 if (rhs != orig_rhs || lhs != orig_lhs)
5489 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5490 lhs, rhs);
5491 return NULL_TREE;
5492 }
5493
5494 /* Find ways of folding logical expressions of LHS and RHS:
5495 Try to merge two comparisons to the same innermost item.
5496 Look for range tests like "ch >= '0' && ch <= '9'".
5497 Look for combinations of simple terms on machines with expensive branches
5498 and evaluate the RHS unconditionally.
5499
5500 For example, if we have p->a == 2 && p->b == 4 and we can make an
5501 object large enough to span both A and B, we can do this with a comparison
5502 against the object ANDed with the a mask.
5503
5504 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5505 operations to do this with one comparison.
5506
5507 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5508 function and the one above.
5509
5510 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5511 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5512
5513 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5514 two operands.
5515
5516 We return the simplified tree or 0 if no optimization is possible. */
5517
5518 static tree
5519 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5520 tree lhs, tree rhs)
5521 {
5522 /* If this is the "or" of two comparisons, we can do something if
5523 the comparisons are NE_EXPR. If this is the "and", we can do something
5524 if the comparisons are EQ_EXPR. I.e.,
5525 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5526
5527 WANTED_CODE is this operation code. For single bit fields, we can
5528 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5529 comparison for one-bit fields. */
5530
5531 enum tree_code wanted_code;
5532 enum tree_code lcode, rcode;
5533 tree ll_arg, lr_arg, rl_arg, rr_arg;
5534 tree ll_inner, lr_inner, rl_inner, rr_inner;
5535 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5536 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5537 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5538 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5539 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5540 int ll_reversep, lr_reversep, rl_reversep, rr_reversep;
5541 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5542 machine_mode lnmode, rnmode;
5543 tree ll_mask, lr_mask, rl_mask, rr_mask;
5544 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5545 tree l_const, r_const;
5546 tree lntype, rntype, result;
5547 HOST_WIDE_INT first_bit, end_bit;
5548 int volatilep;
5549
5550 /* Start by getting the comparison codes. Fail if anything is volatile.
5551 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5552 it were surrounded with a NE_EXPR. */
5553
5554 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5555 return 0;
5556
5557 lcode = TREE_CODE (lhs);
5558 rcode = TREE_CODE (rhs);
5559
5560 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5561 {
5562 lhs = build2 (NE_EXPR, truth_type, lhs,
5563 build_int_cst (TREE_TYPE (lhs), 0));
5564 lcode = NE_EXPR;
5565 }
5566
5567 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5568 {
5569 rhs = build2 (NE_EXPR, truth_type, rhs,
5570 build_int_cst (TREE_TYPE (rhs), 0));
5571 rcode = NE_EXPR;
5572 }
5573
5574 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5575 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5576 return 0;
5577
5578 ll_arg = TREE_OPERAND (lhs, 0);
5579 lr_arg = TREE_OPERAND (lhs, 1);
5580 rl_arg = TREE_OPERAND (rhs, 0);
5581 rr_arg = TREE_OPERAND (rhs, 1);
5582
5583 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5584 if (simple_operand_p (ll_arg)
5585 && simple_operand_p (lr_arg))
5586 {
5587 if (operand_equal_p (ll_arg, rl_arg, 0)
5588 && operand_equal_p (lr_arg, rr_arg, 0))
5589 {
5590 result = combine_comparisons (loc, code, lcode, rcode,
5591 truth_type, ll_arg, lr_arg);
5592 if (result)
5593 return result;
5594 }
5595 else if (operand_equal_p (ll_arg, rr_arg, 0)
5596 && operand_equal_p (lr_arg, rl_arg, 0))
5597 {
5598 result = combine_comparisons (loc, code, lcode,
5599 swap_tree_comparison (rcode),
5600 truth_type, ll_arg, lr_arg);
5601 if (result)
5602 return result;
5603 }
5604 }
5605
5606 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5607 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5608
5609 /* If the RHS can be evaluated unconditionally and its operands are
5610 simple, it wins to evaluate the RHS unconditionally on machines
5611 with expensive branches. In this case, this isn't a comparison
5612 that can be merged. */
5613
5614 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5615 false) >= 2
5616 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5617 && simple_operand_p (rl_arg)
5618 && simple_operand_p (rr_arg))
5619 {
5620 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5621 if (code == TRUTH_OR_EXPR
5622 && lcode == NE_EXPR && integer_zerop (lr_arg)
5623 && rcode == NE_EXPR && integer_zerop (rr_arg)
5624 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5625 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5626 return build2_loc (loc, NE_EXPR, truth_type,
5627 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5628 ll_arg, rl_arg),
5629 build_int_cst (TREE_TYPE (ll_arg), 0));
5630
5631 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5632 if (code == TRUTH_AND_EXPR
5633 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5634 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5635 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5636 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5637 return build2_loc (loc, EQ_EXPR, truth_type,
5638 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5639 ll_arg, rl_arg),
5640 build_int_cst (TREE_TYPE (ll_arg), 0));
5641 }
5642
5643 /* See if the comparisons can be merged. Then get all the parameters for
5644 each side. */
5645
5646 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5647 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5648 return 0;
5649
5650 ll_reversep = lr_reversep = rl_reversep = rr_reversep = 0;
5651 volatilep = 0;
5652 ll_inner = decode_field_reference (loc, ll_arg,
5653 &ll_bitsize, &ll_bitpos, &ll_mode,
5654 &ll_unsignedp, &ll_reversep, &volatilep,
5655 &ll_mask, &ll_and_mask);
5656 lr_inner = decode_field_reference (loc, lr_arg,
5657 &lr_bitsize, &lr_bitpos, &lr_mode,
5658 &lr_unsignedp, &lr_reversep, &volatilep,
5659 &lr_mask, &lr_and_mask);
5660 rl_inner = decode_field_reference (loc, rl_arg,
5661 &rl_bitsize, &rl_bitpos, &rl_mode,
5662 &rl_unsignedp, &rl_reversep, &volatilep,
5663 &rl_mask, &rl_and_mask);
5664 rr_inner = decode_field_reference (loc, rr_arg,
5665 &rr_bitsize, &rr_bitpos, &rr_mode,
5666 &rr_unsignedp, &rr_reversep, &volatilep,
5667 &rr_mask, &rr_and_mask);
5668
5669 /* It must be true that the inner operation on the lhs of each
5670 comparison must be the same if we are to be able to do anything.
5671 Then see if we have constants. If not, the same must be true for
5672 the rhs's. */
5673 if (volatilep
5674 || ll_reversep != rl_reversep
5675 || ll_inner == 0 || rl_inner == 0
5676 || ! operand_equal_p (ll_inner, rl_inner, 0))
5677 return 0;
5678
5679 if (TREE_CODE (lr_arg) == INTEGER_CST
5680 && TREE_CODE (rr_arg) == INTEGER_CST)
5681 {
5682 l_const = lr_arg, r_const = rr_arg;
5683 lr_reversep = ll_reversep;
5684 }
5685 else if (lr_reversep != rr_reversep
5686 || lr_inner == 0 || rr_inner == 0
5687 || ! operand_equal_p (lr_inner, rr_inner, 0))
5688 return 0;
5689 else
5690 l_const = r_const = 0;
5691
5692 /* If either comparison code is not correct for our logical operation,
5693 fail. However, we can convert a one-bit comparison against zero into
5694 the opposite comparison against that bit being set in the field. */
5695
5696 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5697 if (lcode != wanted_code)
5698 {
5699 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5700 {
5701 /* Make the left operand unsigned, since we are only interested
5702 in the value of one bit. Otherwise we are doing the wrong
5703 thing below. */
5704 ll_unsignedp = 1;
5705 l_const = ll_mask;
5706 }
5707 else
5708 return 0;
5709 }
5710
5711 /* This is analogous to the code for l_const above. */
5712 if (rcode != wanted_code)
5713 {
5714 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5715 {
5716 rl_unsignedp = 1;
5717 r_const = rl_mask;
5718 }
5719 else
5720 return 0;
5721 }
5722
5723 /* See if we can find a mode that contains both fields being compared on
5724 the left. If we can't, fail. Otherwise, update all constants and masks
5725 to be relative to a field of that size. */
5726 first_bit = MIN (ll_bitpos, rl_bitpos);
5727 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5728 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5729 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5730 volatilep);
5731 if (lnmode == VOIDmode)
5732 return 0;
5733
5734 lnbitsize = GET_MODE_BITSIZE (lnmode);
5735 lnbitpos = first_bit & ~ (lnbitsize - 1);
5736 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5737 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5738
5739 if (ll_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5740 {
5741 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5742 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5743 }
5744
5745 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5746 size_int (xll_bitpos));
5747 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5748 size_int (xrl_bitpos));
5749
5750 if (l_const)
5751 {
5752 l_const = fold_convert_loc (loc, lntype, l_const);
5753 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5754 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5755 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5756 fold_build1_loc (loc, BIT_NOT_EXPR,
5757 lntype, ll_mask))))
5758 {
5759 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5760
5761 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5762 }
5763 }
5764 if (r_const)
5765 {
5766 r_const = fold_convert_loc (loc, lntype, r_const);
5767 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5768 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5769 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5770 fold_build1_loc (loc, BIT_NOT_EXPR,
5771 lntype, rl_mask))))
5772 {
5773 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5774
5775 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5776 }
5777 }
5778
5779 /* If the right sides are not constant, do the same for it. Also,
5780 disallow this optimization if a size or signedness mismatch occurs
5781 between the left and right sides. */
5782 if (l_const == 0)
5783 {
5784 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5785 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5786 /* Make sure the two fields on the right
5787 correspond to the left without being swapped. */
5788 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5789 return 0;
5790
5791 first_bit = MIN (lr_bitpos, rr_bitpos);
5792 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5793 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5794 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5795 volatilep);
5796 if (rnmode == VOIDmode)
5797 return 0;
5798
5799 rnbitsize = GET_MODE_BITSIZE (rnmode);
5800 rnbitpos = first_bit & ~ (rnbitsize - 1);
5801 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5802 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5803
5804 if (lr_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5805 {
5806 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5807 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5808 }
5809
5810 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5811 rntype, lr_mask),
5812 size_int (xlr_bitpos));
5813 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5814 rntype, rr_mask),
5815 size_int (xrr_bitpos));
5816
5817 /* Make a mask that corresponds to both fields being compared.
5818 Do this for both items being compared. If the operands are the
5819 same size and the bits being compared are in the same position
5820 then we can do this by masking both and comparing the masked
5821 results. */
5822 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5823 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5824 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5825 {
5826 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5827 ll_unsignedp || rl_unsignedp, ll_reversep);
5828 if (! all_ones_mask_p (ll_mask, lnbitsize))
5829 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5830
5831 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5832 lr_unsignedp || rr_unsignedp, lr_reversep);
5833 if (! all_ones_mask_p (lr_mask, rnbitsize))
5834 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5835
5836 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5837 }
5838
5839 /* There is still another way we can do something: If both pairs of
5840 fields being compared are adjacent, we may be able to make a wider
5841 field containing them both.
5842
5843 Note that we still must mask the lhs/rhs expressions. Furthermore,
5844 the mask must be shifted to account for the shift done by
5845 make_bit_field_ref. */
5846 if ((ll_bitsize + ll_bitpos == rl_bitpos
5847 && lr_bitsize + lr_bitpos == rr_bitpos)
5848 || (ll_bitpos == rl_bitpos + rl_bitsize
5849 && lr_bitpos == rr_bitpos + rr_bitsize))
5850 {
5851 tree type;
5852
5853 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5854 ll_bitsize + rl_bitsize,
5855 MIN (ll_bitpos, rl_bitpos),
5856 ll_unsignedp, ll_reversep);
5857 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5858 lr_bitsize + rr_bitsize,
5859 MIN (lr_bitpos, rr_bitpos),
5860 lr_unsignedp, lr_reversep);
5861
5862 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5863 size_int (MIN (xll_bitpos, xrl_bitpos)));
5864 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5865 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5866
5867 /* Convert to the smaller type before masking out unwanted bits. */
5868 type = lntype;
5869 if (lntype != rntype)
5870 {
5871 if (lnbitsize > rnbitsize)
5872 {
5873 lhs = fold_convert_loc (loc, rntype, lhs);
5874 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5875 type = rntype;
5876 }
5877 else if (lnbitsize < rnbitsize)
5878 {
5879 rhs = fold_convert_loc (loc, lntype, rhs);
5880 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5881 type = lntype;
5882 }
5883 }
5884
5885 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5886 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5887
5888 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5889 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5890
5891 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5892 }
5893
5894 return 0;
5895 }
5896
5897 /* Handle the case of comparisons with constants. If there is something in
5898 common between the masks, those bits of the constants must be the same.
5899 If not, the condition is always false. Test for this to avoid generating
5900 incorrect code below. */
5901 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5902 if (! integer_zerop (result)
5903 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5904 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5905 {
5906 if (wanted_code == NE_EXPR)
5907 {
5908 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5909 return constant_boolean_node (true, truth_type);
5910 }
5911 else
5912 {
5913 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5914 return constant_boolean_node (false, truth_type);
5915 }
5916 }
5917
5918 /* Construct the expression we will return. First get the component
5919 reference we will make. Unless the mask is all ones the width of
5920 that field, perform the mask operation. Then compare with the
5921 merged constant. */
5922 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5923 ll_unsignedp || rl_unsignedp, ll_reversep);
5924
5925 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5926 if (! all_ones_mask_p (ll_mask, lnbitsize))
5927 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5928
5929 return build2_loc (loc, wanted_code, truth_type, result,
5930 const_binop (BIT_IOR_EXPR, l_const, r_const));
5931 }
5932 \f
5933 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5934 constant. */
5935
5936 static tree
5937 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5938 tree op0, tree op1)
5939 {
5940 tree arg0 = op0;
5941 enum tree_code op_code;
5942 tree comp_const;
5943 tree minmax_const;
5944 int consts_equal, consts_lt;
5945 tree inner;
5946
5947 STRIP_SIGN_NOPS (arg0);
5948
5949 op_code = TREE_CODE (arg0);
5950 minmax_const = TREE_OPERAND (arg0, 1);
5951 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5952 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5953 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5954 inner = TREE_OPERAND (arg0, 0);
5955
5956 /* If something does not permit us to optimize, return the original tree. */
5957 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5958 || TREE_CODE (comp_const) != INTEGER_CST
5959 || TREE_OVERFLOW (comp_const)
5960 || TREE_CODE (minmax_const) != INTEGER_CST
5961 || TREE_OVERFLOW (minmax_const))
5962 return NULL_TREE;
5963
5964 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5965 and GT_EXPR, doing the rest with recursive calls using logical
5966 simplifications. */
5967 switch (code)
5968 {
5969 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5970 {
5971 tree tem
5972 = optimize_minmax_comparison (loc,
5973 invert_tree_comparison (code, false),
5974 type, op0, op1);
5975 if (tem)
5976 return invert_truthvalue_loc (loc, tem);
5977 return NULL_TREE;
5978 }
5979
5980 case GE_EXPR:
5981 return
5982 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5983 optimize_minmax_comparison
5984 (loc, EQ_EXPR, type, arg0, comp_const),
5985 optimize_minmax_comparison
5986 (loc, GT_EXPR, type, arg0, comp_const));
5987
5988 case EQ_EXPR:
5989 if (op_code == MAX_EXPR && consts_equal)
5990 /* MAX (X, 0) == 0 -> X <= 0 */
5991 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5992
5993 else if (op_code == MAX_EXPR && consts_lt)
5994 /* MAX (X, 0) == 5 -> X == 5 */
5995 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5996
5997 else if (op_code == MAX_EXPR)
5998 /* MAX (X, 0) == -1 -> false */
5999 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
6000
6001 else if (consts_equal)
6002 /* MIN (X, 0) == 0 -> X >= 0 */
6003 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
6004
6005 else if (consts_lt)
6006 /* MIN (X, 0) == 5 -> false */
6007 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
6008
6009 else
6010 /* MIN (X, 0) == -1 -> X == -1 */
6011 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
6012
6013 case GT_EXPR:
6014 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
6015 /* MAX (X, 0) > 0 -> X > 0
6016 MAX (X, 0) > 5 -> X > 5 */
6017 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
6018
6019 else if (op_code == MAX_EXPR)
6020 /* MAX (X, 0) > -1 -> true */
6021 return omit_one_operand_loc (loc, type, integer_one_node, inner);
6022
6023 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
6024 /* MIN (X, 0) > 0 -> false
6025 MIN (X, 0) > 5 -> false */
6026 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
6027
6028 else
6029 /* MIN (X, 0) > -1 -> X > -1 */
6030 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
6031
6032 default:
6033 return NULL_TREE;
6034 }
6035 }
6036 \f
6037 /* T is an integer expression that is being multiplied, divided, or taken a
6038 modulus (CODE says which and what kind of divide or modulus) by a
6039 constant C. See if we can eliminate that operation by folding it with
6040 other operations already in T. WIDE_TYPE, if non-null, is a type that
6041 should be used for the computation if wider than our type.
6042
6043 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6044 (X * 2) + (Y * 4). We must, however, be assured that either the original
6045 expression would not overflow or that overflow is undefined for the type
6046 in the language in question.
6047
6048 If we return a non-null expression, it is an equivalent form of the
6049 original computation, but need not be in the original type.
6050
6051 We set *STRICT_OVERFLOW_P to true if the return values depends on
6052 signed overflow being undefined. Otherwise we do not change
6053 *STRICT_OVERFLOW_P. */
6054
6055 static tree
6056 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6057 bool *strict_overflow_p)
6058 {
6059 /* To avoid exponential search depth, refuse to allow recursion past
6060 three levels. Beyond that (1) it's highly unlikely that we'll find
6061 something interesting and (2) we've probably processed it before
6062 when we built the inner expression. */
6063
6064 static int depth;
6065 tree ret;
6066
6067 if (depth > 3)
6068 return NULL;
6069
6070 depth++;
6071 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6072 depth--;
6073
6074 return ret;
6075 }
6076
6077 static tree
6078 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6079 bool *strict_overflow_p)
6080 {
6081 tree type = TREE_TYPE (t);
6082 enum tree_code tcode = TREE_CODE (t);
6083 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
6084 > GET_MODE_SIZE (TYPE_MODE (type)))
6085 ? wide_type : type);
6086 tree t1, t2;
6087 int same_p = tcode == code;
6088 tree op0 = NULL_TREE, op1 = NULL_TREE;
6089 bool sub_strict_overflow_p;
6090
6091 /* Don't deal with constants of zero here; they confuse the code below. */
6092 if (integer_zerop (c))
6093 return NULL_TREE;
6094
6095 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6096 op0 = TREE_OPERAND (t, 0);
6097
6098 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6099 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6100
6101 /* Note that we need not handle conditional operations here since fold
6102 already handles those cases. So just do arithmetic here. */
6103 switch (tcode)
6104 {
6105 case INTEGER_CST:
6106 /* For a constant, we can always simplify if we are a multiply
6107 or (for divide and modulus) if it is a multiple of our constant. */
6108 if (code == MULT_EXPR
6109 || wi::multiple_of_p (t, c, TYPE_SIGN (type)))
6110 {
6111 tree tem = const_binop (code, fold_convert (ctype, t),
6112 fold_convert (ctype, c));
6113 /* If the multiplication overflowed to INT_MIN then we lost sign
6114 information on it and a subsequent multiplication might
6115 spuriously overflow. See PR68142. */
6116 if (TREE_OVERFLOW (tem)
6117 && wi::eq_p (tem, wi::min_value (TYPE_PRECISION (ctype), SIGNED)))
6118 return NULL_TREE;
6119 return tem;
6120 }
6121 break;
6122
6123 CASE_CONVERT: case NON_LVALUE_EXPR:
6124 /* If op0 is an expression ... */
6125 if ((COMPARISON_CLASS_P (op0)
6126 || UNARY_CLASS_P (op0)
6127 || BINARY_CLASS_P (op0)
6128 || VL_EXP_CLASS_P (op0)
6129 || EXPRESSION_CLASS_P (op0))
6130 /* ... and has wrapping overflow, and its type is smaller
6131 than ctype, then we cannot pass through as widening. */
6132 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6133 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0)))
6134 && (TYPE_PRECISION (ctype)
6135 > TYPE_PRECISION (TREE_TYPE (op0))))
6136 /* ... or this is a truncation (t is narrower than op0),
6137 then we cannot pass through this narrowing. */
6138 || (TYPE_PRECISION (type)
6139 < TYPE_PRECISION (TREE_TYPE (op0)))
6140 /* ... or signedness changes for division or modulus,
6141 then we cannot pass through this conversion. */
6142 || (code != MULT_EXPR
6143 && (TYPE_UNSIGNED (ctype)
6144 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6145 /* ... or has undefined overflow while the converted to
6146 type has not, we cannot do the operation in the inner type
6147 as that would introduce undefined overflow. */
6148 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6149 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0)))
6150 && !TYPE_OVERFLOW_UNDEFINED (type))))
6151 break;
6152
6153 /* Pass the constant down and see if we can make a simplification. If
6154 we can, replace this expression with the inner simplification for
6155 possible later conversion to our or some other type. */
6156 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6157 && TREE_CODE (t2) == INTEGER_CST
6158 && !TREE_OVERFLOW (t2)
6159 && (0 != (t1 = extract_muldiv (op0, t2, code,
6160 code == MULT_EXPR
6161 ? ctype : NULL_TREE,
6162 strict_overflow_p))))
6163 return t1;
6164 break;
6165
6166 case ABS_EXPR:
6167 /* If widening the type changes it from signed to unsigned, then we
6168 must avoid building ABS_EXPR itself as unsigned. */
6169 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6170 {
6171 tree cstype = (*signed_type_for) (ctype);
6172 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6173 != 0)
6174 {
6175 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6176 return fold_convert (ctype, t1);
6177 }
6178 break;
6179 }
6180 /* If the constant is negative, we cannot simplify this. */
6181 if (tree_int_cst_sgn (c) == -1)
6182 break;
6183 /* FALLTHROUGH */
6184 case NEGATE_EXPR:
6185 /* For division and modulus, type can't be unsigned, as e.g.
6186 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6187 For signed types, even with wrapping overflow, this is fine. */
6188 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6189 break;
6190 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6191 != 0)
6192 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6193 break;
6194
6195 case MIN_EXPR: case MAX_EXPR:
6196 /* If widening the type changes the signedness, then we can't perform
6197 this optimization as that changes the result. */
6198 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6199 break;
6200
6201 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6202 sub_strict_overflow_p = false;
6203 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6204 &sub_strict_overflow_p)) != 0
6205 && (t2 = extract_muldiv (op1, c, code, wide_type,
6206 &sub_strict_overflow_p)) != 0)
6207 {
6208 if (tree_int_cst_sgn (c) < 0)
6209 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6210 if (sub_strict_overflow_p)
6211 *strict_overflow_p = true;
6212 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6213 fold_convert (ctype, t2));
6214 }
6215 break;
6216
6217 case LSHIFT_EXPR: case RSHIFT_EXPR:
6218 /* If the second operand is constant, this is a multiplication
6219 or floor division, by a power of two, so we can treat it that
6220 way unless the multiplier or divisor overflows. Signed
6221 left-shift overflow is implementation-defined rather than
6222 undefined in C90, so do not convert signed left shift into
6223 multiplication. */
6224 if (TREE_CODE (op1) == INTEGER_CST
6225 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6226 /* const_binop may not detect overflow correctly,
6227 so check for it explicitly here. */
6228 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
6229 && 0 != (t1 = fold_convert (ctype,
6230 const_binop (LSHIFT_EXPR,
6231 size_one_node,
6232 op1)))
6233 && !TREE_OVERFLOW (t1))
6234 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6235 ? MULT_EXPR : FLOOR_DIV_EXPR,
6236 ctype,
6237 fold_convert (ctype, op0),
6238 t1),
6239 c, code, wide_type, strict_overflow_p);
6240 break;
6241
6242 case PLUS_EXPR: case MINUS_EXPR:
6243 /* See if we can eliminate the operation on both sides. If we can, we
6244 can return a new PLUS or MINUS. If we can't, the only remaining
6245 cases where we can do anything are if the second operand is a
6246 constant. */
6247 sub_strict_overflow_p = false;
6248 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6249 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6250 if (t1 != 0 && t2 != 0
6251 && (code == MULT_EXPR
6252 /* If not multiplication, we can only do this if both operands
6253 are divisible by c. */
6254 || (multiple_of_p (ctype, op0, c)
6255 && multiple_of_p (ctype, op1, c))))
6256 {
6257 if (sub_strict_overflow_p)
6258 *strict_overflow_p = true;
6259 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6260 fold_convert (ctype, t2));
6261 }
6262
6263 /* If this was a subtraction, negate OP1 and set it to be an addition.
6264 This simplifies the logic below. */
6265 if (tcode == MINUS_EXPR)
6266 {
6267 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6268 /* If OP1 was not easily negatable, the constant may be OP0. */
6269 if (TREE_CODE (op0) == INTEGER_CST)
6270 {
6271 std::swap (op0, op1);
6272 std::swap (t1, t2);
6273 }
6274 }
6275
6276 if (TREE_CODE (op1) != INTEGER_CST)
6277 break;
6278
6279 /* If either OP1 or C are negative, this optimization is not safe for
6280 some of the division and remainder types while for others we need
6281 to change the code. */
6282 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6283 {
6284 if (code == CEIL_DIV_EXPR)
6285 code = FLOOR_DIV_EXPR;
6286 else if (code == FLOOR_DIV_EXPR)
6287 code = CEIL_DIV_EXPR;
6288 else if (code != MULT_EXPR
6289 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6290 break;
6291 }
6292
6293 /* If it's a multiply or a division/modulus operation of a multiple
6294 of our constant, do the operation and verify it doesn't overflow. */
6295 if (code == MULT_EXPR
6296 || wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6297 {
6298 op1 = const_binop (code, fold_convert (ctype, op1),
6299 fold_convert (ctype, c));
6300 /* We allow the constant to overflow with wrapping semantics. */
6301 if (op1 == 0
6302 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6303 break;
6304 }
6305 else
6306 break;
6307
6308 /* If we have an unsigned type, we cannot widen the operation since it
6309 will change the result if the original computation overflowed. */
6310 if (TYPE_UNSIGNED (ctype) && ctype != type)
6311 break;
6312
6313 /* If we were able to eliminate our operation from the first side,
6314 apply our operation to the second side and reform the PLUS. */
6315 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
6316 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
6317
6318 /* The last case is if we are a multiply. In that case, we can
6319 apply the distributive law to commute the multiply and addition
6320 if the multiplication of the constants doesn't overflow
6321 and overflow is defined. With undefined overflow
6322 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
6323 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
6324 return fold_build2 (tcode, ctype,
6325 fold_build2 (code, ctype,
6326 fold_convert (ctype, op0),
6327 fold_convert (ctype, c)),
6328 op1);
6329
6330 break;
6331
6332 case MULT_EXPR:
6333 /* We have a special case here if we are doing something like
6334 (C * 8) % 4 since we know that's zero. */
6335 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6336 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6337 /* If the multiplication can overflow we cannot optimize this. */
6338 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6339 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6340 && wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6341 {
6342 *strict_overflow_p = true;
6343 return omit_one_operand (type, integer_zero_node, op0);
6344 }
6345
6346 /* ... fall through ... */
6347
6348 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6349 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6350 /* If we can extract our operation from the LHS, do so and return a
6351 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6352 do something only if the second operand is a constant. */
6353 if (same_p
6354 && (t1 = extract_muldiv (op0, c, code, wide_type,
6355 strict_overflow_p)) != 0)
6356 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6357 fold_convert (ctype, op1));
6358 else if (tcode == MULT_EXPR && code == MULT_EXPR
6359 && (t1 = extract_muldiv (op1, c, code, wide_type,
6360 strict_overflow_p)) != 0)
6361 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6362 fold_convert (ctype, t1));
6363 else if (TREE_CODE (op1) != INTEGER_CST)
6364 return 0;
6365
6366 /* If these are the same operation types, we can associate them
6367 assuming no overflow. */
6368 if (tcode == code)
6369 {
6370 bool overflow_p = false;
6371 bool overflow_mul_p;
6372 signop sign = TYPE_SIGN (ctype);
6373 wide_int mul = wi::mul (op1, c, sign, &overflow_mul_p);
6374 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6375 if (overflow_mul_p
6376 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6377 overflow_p = true;
6378 if (!overflow_p)
6379 {
6380 mul = wide_int::from (mul, TYPE_PRECISION (ctype),
6381 TYPE_SIGN (TREE_TYPE (op1)));
6382 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6383 wide_int_to_tree (ctype, mul));
6384 }
6385 }
6386
6387 /* If these operations "cancel" each other, we have the main
6388 optimizations of this pass, which occur when either constant is a
6389 multiple of the other, in which case we replace this with either an
6390 operation or CODE or TCODE.
6391
6392 If we have an unsigned type, we cannot do this since it will change
6393 the result if the original computation overflowed. */
6394 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6395 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6396 || (tcode == MULT_EXPR
6397 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6398 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6399 && code != MULT_EXPR)))
6400 {
6401 if (wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6402 {
6403 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6404 *strict_overflow_p = true;
6405 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6406 fold_convert (ctype,
6407 const_binop (TRUNC_DIV_EXPR,
6408 op1, c)));
6409 }
6410 else if (wi::multiple_of_p (c, op1, TYPE_SIGN (type)))
6411 {
6412 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6413 *strict_overflow_p = true;
6414 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6415 fold_convert (ctype,
6416 const_binop (TRUNC_DIV_EXPR,
6417 c, op1)));
6418 }
6419 }
6420 break;
6421
6422 default:
6423 break;
6424 }
6425
6426 return 0;
6427 }
6428 \f
6429 /* Return a node which has the indicated constant VALUE (either 0 or
6430 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6431 and is of the indicated TYPE. */
6432
6433 tree
6434 constant_boolean_node (bool value, tree type)
6435 {
6436 if (type == integer_type_node)
6437 return value ? integer_one_node : integer_zero_node;
6438 else if (type == boolean_type_node)
6439 return value ? boolean_true_node : boolean_false_node;
6440 else if (TREE_CODE (type) == VECTOR_TYPE)
6441 return build_vector_from_val (type,
6442 build_int_cst (TREE_TYPE (type),
6443 value ? -1 : 0));
6444 else
6445 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6446 }
6447
6448
6449 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6450 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6451 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6452 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6453 COND is the first argument to CODE; otherwise (as in the example
6454 given here), it is the second argument. TYPE is the type of the
6455 original expression. Return NULL_TREE if no simplification is
6456 possible. */
6457
6458 static tree
6459 fold_binary_op_with_conditional_arg (location_t loc,
6460 enum tree_code code,
6461 tree type, tree op0, tree op1,
6462 tree cond, tree arg, int cond_first_p)
6463 {
6464 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6465 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6466 tree test, true_value, false_value;
6467 tree lhs = NULL_TREE;
6468 tree rhs = NULL_TREE;
6469 enum tree_code cond_code = COND_EXPR;
6470
6471 if (TREE_CODE (cond) == COND_EXPR
6472 || TREE_CODE (cond) == VEC_COND_EXPR)
6473 {
6474 test = TREE_OPERAND (cond, 0);
6475 true_value = TREE_OPERAND (cond, 1);
6476 false_value = TREE_OPERAND (cond, 2);
6477 /* If this operand throws an expression, then it does not make
6478 sense to try to perform a logical or arithmetic operation
6479 involving it. */
6480 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6481 lhs = true_value;
6482 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6483 rhs = false_value;
6484 }
6485 else if (!(TREE_CODE (type) != VECTOR_TYPE
6486 && TREE_CODE (TREE_TYPE (cond)) == VECTOR_TYPE))
6487 {
6488 tree testtype = TREE_TYPE (cond);
6489 test = cond;
6490 true_value = constant_boolean_node (true, testtype);
6491 false_value = constant_boolean_node (false, testtype);
6492 }
6493 else
6494 /* Detect the case of mixing vector and scalar types - bail out. */
6495 return NULL_TREE;
6496
6497 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6498 cond_code = VEC_COND_EXPR;
6499
6500 /* This transformation is only worthwhile if we don't have to wrap ARG
6501 in a SAVE_EXPR and the operation can be simplified without recursing
6502 on at least one of the branches once its pushed inside the COND_EXPR. */
6503 if (!TREE_CONSTANT (arg)
6504 && (TREE_SIDE_EFFECTS (arg)
6505 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6506 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6507 return NULL_TREE;
6508
6509 arg = fold_convert_loc (loc, arg_type, arg);
6510 if (lhs == 0)
6511 {
6512 true_value = fold_convert_loc (loc, cond_type, true_value);
6513 if (cond_first_p)
6514 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6515 else
6516 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6517 }
6518 if (rhs == 0)
6519 {
6520 false_value = fold_convert_loc (loc, cond_type, false_value);
6521 if (cond_first_p)
6522 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6523 else
6524 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6525 }
6526
6527 /* Check that we have simplified at least one of the branches. */
6528 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6529 return NULL_TREE;
6530
6531 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6532 }
6533
6534 \f
6535 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6536
6537 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6538 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6539 ADDEND is the same as X.
6540
6541 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6542 and finite. The problematic cases are when X is zero, and its mode
6543 has signed zeros. In the case of rounding towards -infinity,
6544 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6545 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6546
6547 bool
6548 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6549 {
6550 if (!real_zerop (addend))
6551 return false;
6552
6553 /* Don't allow the fold with -fsignaling-nans. */
6554 if (HONOR_SNANS (element_mode (type)))
6555 return false;
6556
6557 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6558 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
6559 return true;
6560
6561 /* In a vector or complex, we would need to check the sign of all zeros. */
6562 if (TREE_CODE (addend) != REAL_CST)
6563 return false;
6564
6565 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6566 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6567 negate = !negate;
6568
6569 /* The mode has signed zeros, and we have to honor their sign.
6570 In this situation, there is only one case we can return true for.
6571 X - 0 is the same as X unless rounding towards -infinity is
6572 supported. */
6573 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type));
6574 }
6575
6576 /* Subroutine of fold() that optimizes comparisons of a division by
6577 a nonzero integer constant against an integer constant, i.e.
6578 X/C1 op C2.
6579
6580 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6581 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6582 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6583
6584 The function returns the constant folded tree if a simplification
6585 can be made, and NULL_TREE otherwise. */
6586
6587 static tree
6588 fold_div_compare (location_t loc,
6589 enum tree_code code, tree type, tree arg0, tree arg1)
6590 {
6591 tree prod, tmp, hi, lo;
6592 tree arg00 = TREE_OPERAND (arg0, 0);
6593 tree arg01 = TREE_OPERAND (arg0, 1);
6594 signop sign = TYPE_SIGN (TREE_TYPE (arg0));
6595 bool neg_overflow = false;
6596 bool overflow;
6597
6598 /* We have to do this the hard way to detect unsigned overflow.
6599 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6600 wide_int val = wi::mul (arg01, arg1, sign, &overflow);
6601 prod = force_fit_type (TREE_TYPE (arg00), val, -1, overflow);
6602 neg_overflow = false;
6603
6604 if (sign == UNSIGNED)
6605 {
6606 tmp = int_const_binop (MINUS_EXPR, arg01,
6607 build_int_cst (TREE_TYPE (arg01), 1));
6608 lo = prod;
6609
6610 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6611 val = wi::add (prod, tmp, sign, &overflow);
6612 hi = force_fit_type (TREE_TYPE (arg00), val,
6613 -1, overflow | TREE_OVERFLOW (prod));
6614 }
6615 else if (tree_int_cst_sgn (arg01) >= 0)
6616 {
6617 tmp = int_const_binop (MINUS_EXPR, arg01,
6618 build_int_cst (TREE_TYPE (arg01), 1));
6619 switch (tree_int_cst_sgn (arg1))
6620 {
6621 case -1:
6622 neg_overflow = true;
6623 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6624 hi = prod;
6625 break;
6626
6627 case 0:
6628 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6629 hi = tmp;
6630 break;
6631
6632 case 1:
6633 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6634 lo = prod;
6635 break;
6636
6637 default:
6638 gcc_unreachable ();
6639 }
6640 }
6641 else
6642 {
6643 /* A negative divisor reverses the relational operators. */
6644 code = swap_tree_comparison (code);
6645
6646 tmp = int_const_binop (PLUS_EXPR, arg01,
6647 build_int_cst (TREE_TYPE (arg01), 1));
6648 switch (tree_int_cst_sgn (arg1))
6649 {
6650 case -1:
6651 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6652 lo = prod;
6653 break;
6654
6655 case 0:
6656 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6657 lo = tmp;
6658 break;
6659
6660 case 1:
6661 neg_overflow = true;
6662 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6663 hi = prod;
6664 break;
6665
6666 default:
6667 gcc_unreachable ();
6668 }
6669 }
6670
6671 switch (code)
6672 {
6673 case EQ_EXPR:
6674 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6675 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6676 if (TREE_OVERFLOW (hi))
6677 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6678 if (TREE_OVERFLOW (lo))
6679 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6680 return build_range_check (loc, type, arg00, 1, lo, hi);
6681
6682 case NE_EXPR:
6683 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6684 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6685 if (TREE_OVERFLOW (hi))
6686 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6687 if (TREE_OVERFLOW (lo))
6688 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6689 return build_range_check (loc, type, arg00, 0, lo, hi);
6690
6691 case LT_EXPR:
6692 if (TREE_OVERFLOW (lo))
6693 {
6694 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6695 return omit_one_operand_loc (loc, type, tmp, arg00);
6696 }
6697 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6698
6699 case LE_EXPR:
6700 if (TREE_OVERFLOW (hi))
6701 {
6702 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6703 return omit_one_operand_loc (loc, type, tmp, arg00);
6704 }
6705 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6706
6707 case GT_EXPR:
6708 if (TREE_OVERFLOW (hi))
6709 {
6710 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6711 return omit_one_operand_loc (loc, type, tmp, arg00);
6712 }
6713 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6714
6715 case GE_EXPR:
6716 if (TREE_OVERFLOW (lo))
6717 {
6718 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6719 return omit_one_operand_loc (loc, type, tmp, arg00);
6720 }
6721 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6722
6723 default:
6724 break;
6725 }
6726
6727 return NULL_TREE;
6728 }
6729
6730
6731 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6732 equality/inequality test, then return a simplified form of the test
6733 using a sign testing. Otherwise return NULL. TYPE is the desired
6734 result type. */
6735
6736 static tree
6737 fold_single_bit_test_into_sign_test (location_t loc,
6738 enum tree_code code, tree arg0, tree arg1,
6739 tree result_type)
6740 {
6741 /* If this is testing a single bit, we can optimize the test. */
6742 if ((code == NE_EXPR || code == EQ_EXPR)
6743 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6744 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6745 {
6746 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6747 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6748 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6749
6750 if (arg00 != NULL_TREE
6751 /* This is only a win if casting to a signed type is cheap,
6752 i.e. when arg00's type is not a partial mode. */
6753 && TYPE_PRECISION (TREE_TYPE (arg00))
6754 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6755 {
6756 tree stype = signed_type_for (TREE_TYPE (arg00));
6757 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6758 result_type,
6759 fold_convert_loc (loc, stype, arg00),
6760 build_int_cst (stype, 0));
6761 }
6762 }
6763
6764 return NULL_TREE;
6765 }
6766
6767 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6768 equality/inequality test, then return a simplified form of
6769 the test using shifts and logical operations. Otherwise return
6770 NULL. TYPE is the desired result type. */
6771
6772 tree
6773 fold_single_bit_test (location_t loc, enum tree_code code,
6774 tree arg0, tree arg1, tree result_type)
6775 {
6776 /* If this is testing a single bit, we can optimize the test. */
6777 if ((code == NE_EXPR || code == EQ_EXPR)
6778 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6779 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6780 {
6781 tree inner = TREE_OPERAND (arg0, 0);
6782 tree type = TREE_TYPE (arg0);
6783 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6784 machine_mode operand_mode = TYPE_MODE (type);
6785 int ops_unsigned;
6786 tree signed_type, unsigned_type, intermediate_type;
6787 tree tem, one;
6788
6789 /* First, see if we can fold the single bit test into a sign-bit
6790 test. */
6791 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6792 result_type);
6793 if (tem)
6794 return tem;
6795
6796 /* Otherwise we have (A & C) != 0 where C is a single bit,
6797 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6798 Similarly for (A & C) == 0. */
6799
6800 /* If INNER is a right shift of a constant and it plus BITNUM does
6801 not overflow, adjust BITNUM and INNER. */
6802 if (TREE_CODE (inner) == RSHIFT_EXPR
6803 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6804 && bitnum < TYPE_PRECISION (type)
6805 && wi::ltu_p (TREE_OPERAND (inner, 1),
6806 TYPE_PRECISION (type) - bitnum))
6807 {
6808 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6809 inner = TREE_OPERAND (inner, 0);
6810 }
6811
6812 /* If we are going to be able to omit the AND below, we must do our
6813 operations as unsigned. If we must use the AND, we have a choice.
6814 Normally unsigned is faster, but for some machines signed is. */
6815 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6816 && !flag_syntax_only) ? 0 : 1;
6817
6818 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6819 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6820 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6821 inner = fold_convert_loc (loc, intermediate_type, inner);
6822
6823 if (bitnum != 0)
6824 inner = build2 (RSHIFT_EXPR, intermediate_type,
6825 inner, size_int (bitnum));
6826
6827 one = build_int_cst (intermediate_type, 1);
6828
6829 if (code == EQ_EXPR)
6830 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6831
6832 /* Put the AND last so it can combine with more things. */
6833 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6834
6835 /* Make sure to return the proper type. */
6836 inner = fold_convert_loc (loc, result_type, inner);
6837
6838 return inner;
6839 }
6840 return NULL_TREE;
6841 }
6842
6843 /* Check whether we are allowed to reorder operands arg0 and arg1,
6844 such that the evaluation of arg1 occurs before arg0. */
6845
6846 static bool
6847 reorder_operands_p (const_tree arg0, const_tree arg1)
6848 {
6849 if (! flag_evaluation_order)
6850 return true;
6851 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6852 return true;
6853 return ! TREE_SIDE_EFFECTS (arg0)
6854 && ! TREE_SIDE_EFFECTS (arg1);
6855 }
6856
6857 /* Test whether it is preferable two swap two operands, ARG0 and
6858 ARG1, for example because ARG0 is an integer constant and ARG1
6859 isn't. If REORDER is true, only recommend swapping if we can
6860 evaluate the operands in reverse order. */
6861
6862 bool
6863 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6864 {
6865 if (CONSTANT_CLASS_P (arg1))
6866 return 0;
6867 if (CONSTANT_CLASS_P (arg0))
6868 return 1;
6869
6870 STRIP_NOPS (arg0);
6871 STRIP_NOPS (arg1);
6872
6873 if (TREE_CONSTANT (arg1))
6874 return 0;
6875 if (TREE_CONSTANT (arg0))
6876 return 1;
6877
6878 if (reorder && flag_evaluation_order
6879 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6880 return 0;
6881
6882 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6883 for commutative and comparison operators. Ensuring a canonical
6884 form allows the optimizers to find additional redundancies without
6885 having to explicitly check for both orderings. */
6886 if (TREE_CODE (arg0) == SSA_NAME
6887 && TREE_CODE (arg1) == SSA_NAME
6888 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6889 return 1;
6890
6891 /* Put SSA_NAMEs last. */
6892 if (TREE_CODE (arg1) == SSA_NAME)
6893 return 0;
6894 if (TREE_CODE (arg0) == SSA_NAME)
6895 return 1;
6896
6897 /* Put variables last. */
6898 if (DECL_P (arg1))
6899 return 0;
6900 if (DECL_P (arg0))
6901 return 1;
6902
6903 return 0;
6904 }
6905
6906
6907 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6908 means A >= Y && A != MAX, but in this case we know that
6909 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6910
6911 static tree
6912 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6913 {
6914 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6915
6916 if (TREE_CODE (bound) == LT_EXPR)
6917 a = TREE_OPERAND (bound, 0);
6918 else if (TREE_CODE (bound) == GT_EXPR)
6919 a = TREE_OPERAND (bound, 1);
6920 else
6921 return NULL_TREE;
6922
6923 typea = TREE_TYPE (a);
6924 if (!INTEGRAL_TYPE_P (typea)
6925 && !POINTER_TYPE_P (typea))
6926 return NULL_TREE;
6927
6928 if (TREE_CODE (ineq) == LT_EXPR)
6929 {
6930 a1 = TREE_OPERAND (ineq, 1);
6931 y = TREE_OPERAND (ineq, 0);
6932 }
6933 else if (TREE_CODE (ineq) == GT_EXPR)
6934 {
6935 a1 = TREE_OPERAND (ineq, 0);
6936 y = TREE_OPERAND (ineq, 1);
6937 }
6938 else
6939 return NULL_TREE;
6940
6941 if (TREE_TYPE (a1) != typea)
6942 return NULL_TREE;
6943
6944 if (POINTER_TYPE_P (typea))
6945 {
6946 /* Convert the pointer types into integer before taking the difference. */
6947 tree ta = fold_convert_loc (loc, ssizetype, a);
6948 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
6949 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
6950 }
6951 else
6952 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
6953
6954 if (!diff || !integer_onep (diff))
6955 return NULL_TREE;
6956
6957 return fold_build2_loc (loc, GE_EXPR, type, a, y);
6958 }
6959
6960 /* Fold a sum or difference of at least one multiplication.
6961 Returns the folded tree or NULL if no simplification could be made. */
6962
6963 static tree
6964 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
6965 tree arg0, tree arg1)
6966 {
6967 tree arg00, arg01, arg10, arg11;
6968 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6969
6970 /* (A * C) +- (B * C) -> (A+-B) * C.
6971 (A * C) +- A -> A * (C+-1).
6972 We are most concerned about the case where C is a constant,
6973 but other combinations show up during loop reduction. Since
6974 it is not difficult, try all four possibilities. */
6975
6976 if (TREE_CODE (arg0) == MULT_EXPR)
6977 {
6978 arg00 = TREE_OPERAND (arg0, 0);
6979 arg01 = TREE_OPERAND (arg0, 1);
6980 }
6981 else if (TREE_CODE (arg0) == INTEGER_CST)
6982 {
6983 arg00 = build_one_cst (type);
6984 arg01 = arg0;
6985 }
6986 else
6987 {
6988 /* We cannot generate constant 1 for fract. */
6989 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6990 return NULL_TREE;
6991 arg00 = arg0;
6992 arg01 = build_one_cst (type);
6993 }
6994 if (TREE_CODE (arg1) == MULT_EXPR)
6995 {
6996 arg10 = TREE_OPERAND (arg1, 0);
6997 arg11 = TREE_OPERAND (arg1, 1);
6998 }
6999 else if (TREE_CODE (arg1) == INTEGER_CST)
7000 {
7001 arg10 = build_one_cst (type);
7002 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7003 the purpose of this canonicalization. */
7004 if (wi::neg_p (arg1, TYPE_SIGN (TREE_TYPE (arg1)))
7005 && negate_expr_p (arg1)
7006 && code == PLUS_EXPR)
7007 {
7008 arg11 = negate_expr (arg1);
7009 code = MINUS_EXPR;
7010 }
7011 else
7012 arg11 = arg1;
7013 }
7014 else
7015 {
7016 /* We cannot generate constant 1 for fract. */
7017 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7018 return NULL_TREE;
7019 arg10 = arg1;
7020 arg11 = build_one_cst (type);
7021 }
7022 same = NULL_TREE;
7023
7024 if (operand_equal_p (arg01, arg11, 0))
7025 same = arg01, alt0 = arg00, alt1 = arg10;
7026 else if (operand_equal_p (arg00, arg10, 0))
7027 same = arg00, alt0 = arg01, alt1 = arg11;
7028 else if (operand_equal_p (arg00, arg11, 0))
7029 same = arg00, alt0 = arg01, alt1 = arg10;
7030 else if (operand_equal_p (arg01, arg10, 0))
7031 same = arg01, alt0 = arg00, alt1 = arg11;
7032
7033 /* No identical multiplicands; see if we can find a common
7034 power-of-two factor in non-power-of-two multiplies. This
7035 can help in multi-dimensional array access. */
7036 else if (tree_fits_shwi_p (arg01)
7037 && tree_fits_shwi_p (arg11))
7038 {
7039 HOST_WIDE_INT int01, int11, tmp;
7040 bool swap = false;
7041 tree maybe_same;
7042 int01 = tree_to_shwi (arg01);
7043 int11 = tree_to_shwi (arg11);
7044
7045 /* Move min of absolute values to int11. */
7046 if (absu_hwi (int01) < absu_hwi (int11))
7047 {
7048 tmp = int01, int01 = int11, int11 = tmp;
7049 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7050 maybe_same = arg01;
7051 swap = true;
7052 }
7053 else
7054 maybe_same = arg11;
7055
7056 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7057 /* The remainder should not be a constant, otherwise we
7058 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7059 increased the number of multiplications necessary. */
7060 && TREE_CODE (arg10) != INTEGER_CST)
7061 {
7062 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7063 build_int_cst (TREE_TYPE (arg00),
7064 int01 / int11));
7065 alt1 = arg10;
7066 same = maybe_same;
7067 if (swap)
7068 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7069 }
7070 }
7071
7072 if (same)
7073 return fold_build2_loc (loc, MULT_EXPR, type,
7074 fold_build2_loc (loc, code, type,
7075 fold_convert_loc (loc, type, alt0),
7076 fold_convert_loc (loc, type, alt1)),
7077 fold_convert_loc (loc, type, same));
7078
7079 return NULL_TREE;
7080 }
7081
7082 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7083 specified by EXPR into the buffer PTR of length LEN bytes.
7084 Return the number of bytes placed in the buffer, or zero
7085 upon failure. */
7086
7087 static int
7088 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7089 {
7090 tree type = TREE_TYPE (expr);
7091 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7092 int byte, offset, word, words;
7093 unsigned char value;
7094
7095 if ((off == -1 && total_bytes > len)
7096 || off >= total_bytes)
7097 return 0;
7098 if (off == -1)
7099 off = 0;
7100 words = total_bytes / UNITS_PER_WORD;
7101
7102 for (byte = 0; byte < total_bytes; byte++)
7103 {
7104 int bitpos = byte * BITS_PER_UNIT;
7105 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7106 number of bytes. */
7107 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7108
7109 if (total_bytes > UNITS_PER_WORD)
7110 {
7111 word = byte / UNITS_PER_WORD;
7112 if (WORDS_BIG_ENDIAN)
7113 word = (words - 1) - word;
7114 offset = word * UNITS_PER_WORD;
7115 if (BYTES_BIG_ENDIAN)
7116 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7117 else
7118 offset += byte % UNITS_PER_WORD;
7119 }
7120 else
7121 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7122 if (offset >= off
7123 && offset - off < len)
7124 ptr[offset - off] = value;
7125 }
7126 return MIN (len, total_bytes - off);
7127 }
7128
7129
7130 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7131 specified by EXPR into the buffer PTR of length LEN bytes.
7132 Return the number of bytes placed in the buffer, or zero
7133 upon failure. */
7134
7135 static int
7136 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7137 {
7138 tree type = TREE_TYPE (expr);
7139 machine_mode mode = TYPE_MODE (type);
7140 int total_bytes = GET_MODE_SIZE (mode);
7141 FIXED_VALUE_TYPE value;
7142 tree i_value, i_type;
7143
7144 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7145 return 0;
7146
7147 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7148
7149 if (NULL_TREE == i_type
7150 || TYPE_PRECISION (i_type) != total_bytes)
7151 return 0;
7152
7153 value = TREE_FIXED_CST (expr);
7154 i_value = double_int_to_tree (i_type, value.data);
7155
7156 return native_encode_int (i_value, ptr, len, off);
7157 }
7158
7159
7160 /* Subroutine of native_encode_expr. Encode the REAL_CST
7161 specified by EXPR into the buffer PTR of length LEN bytes.
7162 Return the number of bytes placed in the buffer, or zero
7163 upon failure. */
7164
7165 static int
7166 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7167 {
7168 tree type = TREE_TYPE (expr);
7169 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7170 int byte, offset, word, words, bitpos;
7171 unsigned char value;
7172
7173 /* There are always 32 bits in each long, no matter the size of
7174 the hosts long. We handle floating point representations with
7175 up to 192 bits. */
7176 long tmp[6];
7177
7178 if ((off == -1 && total_bytes > len)
7179 || off >= total_bytes)
7180 return 0;
7181 if (off == -1)
7182 off = 0;
7183 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7184
7185 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7186
7187 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7188 bitpos += BITS_PER_UNIT)
7189 {
7190 byte = (bitpos / BITS_PER_UNIT) & 3;
7191 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7192
7193 if (UNITS_PER_WORD < 4)
7194 {
7195 word = byte / UNITS_PER_WORD;
7196 if (WORDS_BIG_ENDIAN)
7197 word = (words - 1) - word;
7198 offset = word * UNITS_PER_WORD;
7199 if (BYTES_BIG_ENDIAN)
7200 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7201 else
7202 offset += byte % UNITS_PER_WORD;
7203 }
7204 else
7205 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7206 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7207 if (offset >= off
7208 && offset - off < len)
7209 ptr[offset - off] = value;
7210 }
7211 return MIN (len, total_bytes - off);
7212 }
7213
7214 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7215 specified by EXPR into the buffer PTR of length LEN bytes.
7216 Return the number of bytes placed in the buffer, or zero
7217 upon failure. */
7218
7219 static int
7220 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7221 {
7222 int rsize, isize;
7223 tree part;
7224
7225 part = TREE_REALPART (expr);
7226 rsize = native_encode_expr (part, ptr, len, off);
7227 if (off == -1
7228 && rsize == 0)
7229 return 0;
7230 part = TREE_IMAGPART (expr);
7231 if (off != -1)
7232 off = MAX (0, off - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part))));
7233 isize = native_encode_expr (part, ptr+rsize, len-rsize, off);
7234 if (off == -1
7235 && isize != rsize)
7236 return 0;
7237 return rsize + isize;
7238 }
7239
7240
7241 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7242 specified by EXPR into the buffer PTR of length LEN bytes.
7243 Return the number of bytes placed in the buffer, or zero
7244 upon failure. */
7245
7246 static int
7247 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7248 {
7249 unsigned i, count;
7250 int size, offset;
7251 tree itype, elem;
7252
7253 offset = 0;
7254 count = VECTOR_CST_NELTS (expr);
7255 itype = TREE_TYPE (TREE_TYPE (expr));
7256 size = GET_MODE_SIZE (TYPE_MODE (itype));
7257 for (i = 0; i < count; i++)
7258 {
7259 if (off >= size)
7260 {
7261 off -= size;
7262 continue;
7263 }
7264 elem = VECTOR_CST_ELT (expr, i);
7265 int res = native_encode_expr (elem, ptr+offset, len-offset, off);
7266 if ((off == -1 && res != size)
7267 || res == 0)
7268 return 0;
7269 offset += res;
7270 if (offset >= len)
7271 return offset;
7272 if (off != -1)
7273 off = 0;
7274 }
7275 return offset;
7276 }
7277
7278
7279 /* Subroutine of native_encode_expr. Encode the STRING_CST
7280 specified by EXPR into the buffer PTR of length LEN bytes.
7281 Return the number of bytes placed in the buffer, or zero
7282 upon failure. */
7283
7284 static int
7285 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7286 {
7287 tree type = TREE_TYPE (expr);
7288 HOST_WIDE_INT total_bytes;
7289
7290 if (TREE_CODE (type) != ARRAY_TYPE
7291 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7292 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7293 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7294 return 0;
7295 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7296 if ((off == -1 && total_bytes > len)
7297 || off >= total_bytes)
7298 return 0;
7299 if (off == -1)
7300 off = 0;
7301 if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7302 {
7303 int written = 0;
7304 if (off < TREE_STRING_LENGTH (expr))
7305 {
7306 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7307 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7308 }
7309 memset (ptr + written, 0,
7310 MIN (total_bytes - written, len - written));
7311 }
7312 else
7313 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7314 return MIN (total_bytes - off, len);
7315 }
7316
7317
7318 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7319 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7320 buffer PTR of length LEN bytes. If OFF is not -1 then start
7321 the encoding at byte offset OFF and encode at most LEN bytes.
7322 Return the number of bytes placed in the buffer, or zero upon failure. */
7323
7324 int
7325 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7326 {
7327 /* We don't support starting at negative offset and -1 is special. */
7328 if (off < -1)
7329 return 0;
7330
7331 switch (TREE_CODE (expr))
7332 {
7333 case INTEGER_CST:
7334 return native_encode_int (expr, ptr, len, off);
7335
7336 case REAL_CST:
7337 return native_encode_real (expr, ptr, len, off);
7338
7339 case FIXED_CST:
7340 return native_encode_fixed (expr, ptr, len, off);
7341
7342 case COMPLEX_CST:
7343 return native_encode_complex (expr, ptr, len, off);
7344
7345 case VECTOR_CST:
7346 return native_encode_vector (expr, ptr, len, off);
7347
7348 case STRING_CST:
7349 return native_encode_string (expr, ptr, len, off);
7350
7351 default:
7352 return 0;
7353 }
7354 }
7355
7356
7357 /* Subroutine of native_interpret_expr. Interpret the contents of
7358 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7359 If the buffer cannot be interpreted, return NULL_TREE. */
7360
7361 static tree
7362 native_interpret_int (tree type, const unsigned char *ptr, int len)
7363 {
7364 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7365
7366 if (total_bytes > len
7367 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7368 return NULL_TREE;
7369
7370 wide_int result = wi::from_buffer (ptr, total_bytes);
7371
7372 return wide_int_to_tree (type, result);
7373 }
7374
7375
7376 /* Subroutine of native_interpret_expr. Interpret the contents of
7377 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7378 If the buffer cannot be interpreted, return NULL_TREE. */
7379
7380 static tree
7381 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7382 {
7383 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7384 double_int result;
7385 FIXED_VALUE_TYPE fixed_value;
7386
7387 if (total_bytes > len
7388 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7389 return NULL_TREE;
7390
7391 result = double_int::from_buffer (ptr, total_bytes);
7392 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7393
7394 return build_fixed (type, fixed_value);
7395 }
7396
7397
7398 /* Subroutine of native_interpret_expr. Interpret the contents of
7399 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7400 If the buffer cannot be interpreted, return NULL_TREE. */
7401
7402 static tree
7403 native_interpret_real (tree type, const unsigned char *ptr, int len)
7404 {
7405 machine_mode mode = TYPE_MODE (type);
7406 int total_bytes = GET_MODE_SIZE (mode);
7407 unsigned char value;
7408 /* There are always 32 bits in each long, no matter the size of
7409 the hosts long. We handle floating point representations with
7410 up to 192 bits. */
7411 REAL_VALUE_TYPE r;
7412 long tmp[6];
7413
7414 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7415 if (total_bytes > len || total_bytes > 24)
7416 return NULL_TREE;
7417 int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7418
7419 memset (tmp, 0, sizeof (tmp));
7420 for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7421 bitpos += BITS_PER_UNIT)
7422 {
7423 /* Both OFFSET and BYTE index within a long;
7424 bitpos indexes the whole float. */
7425 int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
7426 if (UNITS_PER_WORD < 4)
7427 {
7428 int word = byte / UNITS_PER_WORD;
7429 if (WORDS_BIG_ENDIAN)
7430 word = (words - 1) - word;
7431 offset = word * UNITS_PER_WORD;
7432 if (BYTES_BIG_ENDIAN)
7433 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7434 else
7435 offset += byte % UNITS_PER_WORD;
7436 }
7437 else
7438 {
7439 offset = byte;
7440 if (BYTES_BIG_ENDIAN)
7441 {
7442 /* Reverse bytes within each long, or within the entire float
7443 if it's smaller than a long (for HFmode). */
7444 offset = MIN (3, total_bytes - 1) - offset;
7445 gcc_assert (offset >= 0);
7446 }
7447 }
7448 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7449
7450 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7451 }
7452
7453 real_from_target (&r, tmp, mode);
7454 return build_real (type, r);
7455 }
7456
7457
7458 /* Subroutine of native_interpret_expr. Interpret the contents of
7459 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7460 If the buffer cannot be interpreted, return NULL_TREE. */
7461
7462 static tree
7463 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7464 {
7465 tree etype, rpart, ipart;
7466 int size;
7467
7468 etype = TREE_TYPE (type);
7469 size = GET_MODE_SIZE (TYPE_MODE (etype));
7470 if (size * 2 > len)
7471 return NULL_TREE;
7472 rpart = native_interpret_expr (etype, ptr, size);
7473 if (!rpart)
7474 return NULL_TREE;
7475 ipart = native_interpret_expr (etype, ptr+size, size);
7476 if (!ipart)
7477 return NULL_TREE;
7478 return build_complex (type, rpart, ipart);
7479 }
7480
7481
7482 /* Subroutine of native_interpret_expr. Interpret the contents of
7483 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7484 If the buffer cannot be interpreted, return NULL_TREE. */
7485
7486 static tree
7487 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7488 {
7489 tree etype, elem;
7490 int i, size, count;
7491 tree *elements;
7492
7493 etype = TREE_TYPE (type);
7494 size = GET_MODE_SIZE (TYPE_MODE (etype));
7495 count = TYPE_VECTOR_SUBPARTS (type);
7496 if (size * count > len)
7497 return NULL_TREE;
7498
7499 elements = XALLOCAVEC (tree, count);
7500 for (i = count - 1; i >= 0; i--)
7501 {
7502 elem = native_interpret_expr (etype, ptr+(i*size), size);
7503 if (!elem)
7504 return NULL_TREE;
7505 elements[i] = elem;
7506 }
7507 return build_vector (type, elements);
7508 }
7509
7510
7511 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7512 the buffer PTR of length LEN as a constant of type TYPE. For
7513 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7514 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7515 return NULL_TREE. */
7516
7517 tree
7518 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7519 {
7520 switch (TREE_CODE (type))
7521 {
7522 case INTEGER_TYPE:
7523 case ENUMERAL_TYPE:
7524 case BOOLEAN_TYPE:
7525 case POINTER_TYPE:
7526 case REFERENCE_TYPE:
7527 return native_interpret_int (type, ptr, len);
7528
7529 case REAL_TYPE:
7530 return native_interpret_real (type, ptr, len);
7531
7532 case FIXED_POINT_TYPE:
7533 return native_interpret_fixed (type, ptr, len);
7534
7535 case COMPLEX_TYPE:
7536 return native_interpret_complex (type, ptr, len);
7537
7538 case VECTOR_TYPE:
7539 return native_interpret_vector (type, ptr, len);
7540
7541 default:
7542 return NULL_TREE;
7543 }
7544 }
7545
7546 /* Returns true if we can interpret the contents of a native encoding
7547 as TYPE. */
7548
7549 static bool
7550 can_native_interpret_type_p (tree type)
7551 {
7552 switch (TREE_CODE (type))
7553 {
7554 case INTEGER_TYPE:
7555 case ENUMERAL_TYPE:
7556 case BOOLEAN_TYPE:
7557 case POINTER_TYPE:
7558 case REFERENCE_TYPE:
7559 case FIXED_POINT_TYPE:
7560 case REAL_TYPE:
7561 case COMPLEX_TYPE:
7562 case VECTOR_TYPE:
7563 return true;
7564 default:
7565 return false;
7566 }
7567 }
7568
7569 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7570 TYPE at compile-time. If we're unable to perform the conversion
7571 return NULL_TREE. */
7572
7573 static tree
7574 fold_view_convert_expr (tree type, tree expr)
7575 {
7576 /* We support up to 512-bit values (for V8DFmode). */
7577 unsigned char buffer[64];
7578 int len;
7579
7580 /* Check that the host and target are sane. */
7581 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7582 return NULL_TREE;
7583
7584 len = native_encode_expr (expr, buffer, sizeof (buffer));
7585 if (len == 0)
7586 return NULL_TREE;
7587
7588 return native_interpret_expr (type, buffer, len);
7589 }
7590
7591 /* Build an expression for the address of T. Folds away INDIRECT_REF
7592 to avoid confusing the gimplify process. */
7593
7594 tree
7595 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7596 {
7597 /* The size of the object is not relevant when talking about its address. */
7598 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7599 t = TREE_OPERAND (t, 0);
7600
7601 if (TREE_CODE (t) == INDIRECT_REF)
7602 {
7603 t = TREE_OPERAND (t, 0);
7604
7605 if (TREE_TYPE (t) != ptrtype)
7606 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7607 }
7608 else if (TREE_CODE (t) == MEM_REF
7609 && integer_zerop (TREE_OPERAND (t, 1)))
7610 return TREE_OPERAND (t, 0);
7611 else if (TREE_CODE (t) == MEM_REF
7612 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7613 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7614 TREE_OPERAND (t, 0),
7615 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7616 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7617 {
7618 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7619
7620 if (TREE_TYPE (t) != ptrtype)
7621 t = fold_convert_loc (loc, ptrtype, t);
7622 }
7623 else
7624 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7625
7626 return t;
7627 }
7628
7629 /* Build an expression for the address of T. */
7630
7631 tree
7632 build_fold_addr_expr_loc (location_t loc, tree t)
7633 {
7634 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7635
7636 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7637 }
7638
7639 /* Fold a unary expression of code CODE and type TYPE with operand
7640 OP0. Return the folded expression if folding is successful.
7641 Otherwise, return NULL_TREE. */
7642
7643 tree
7644 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7645 {
7646 tree tem;
7647 tree arg0;
7648 enum tree_code_class kind = TREE_CODE_CLASS (code);
7649
7650 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7651 && TREE_CODE_LENGTH (code) == 1);
7652
7653 arg0 = op0;
7654 if (arg0)
7655 {
7656 if (CONVERT_EXPR_CODE_P (code)
7657 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7658 {
7659 /* Don't use STRIP_NOPS, because signedness of argument type
7660 matters. */
7661 STRIP_SIGN_NOPS (arg0);
7662 }
7663 else
7664 {
7665 /* Strip any conversions that don't change the mode. This
7666 is safe for every expression, except for a comparison
7667 expression because its signedness is derived from its
7668 operands.
7669
7670 Note that this is done as an internal manipulation within
7671 the constant folder, in order to find the simplest
7672 representation of the arguments so that their form can be
7673 studied. In any cases, the appropriate type conversions
7674 should be put back in the tree that will get out of the
7675 constant folder. */
7676 STRIP_NOPS (arg0);
7677 }
7678
7679 if (CONSTANT_CLASS_P (arg0))
7680 {
7681 tree tem = const_unop (code, type, arg0);
7682 if (tem)
7683 {
7684 if (TREE_TYPE (tem) != type)
7685 tem = fold_convert_loc (loc, type, tem);
7686 return tem;
7687 }
7688 }
7689 }
7690
7691 tem = generic_simplify (loc, code, type, op0);
7692 if (tem)
7693 return tem;
7694
7695 if (TREE_CODE_CLASS (code) == tcc_unary)
7696 {
7697 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7698 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7699 fold_build1_loc (loc, code, type,
7700 fold_convert_loc (loc, TREE_TYPE (op0),
7701 TREE_OPERAND (arg0, 1))));
7702 else if (TREE_CODE (arg0) == COND_EXPR)
7703 {
7704 tree arg01 = TREE_OPERAND (arg0, 1);
7705 tree arg02 = TREE_OPERAND (arg0, 2);
7706 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7707 arg01 = fold_build1_loc (loc, code, type,
7708 fold_convert_loc (loc,
7709 TREE_TYPE (op0), arg01));
7710 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7711 arg02 = fold_build1_loc (loc, code, type,
7712 fold_convert_loc (loc,
7713 TREE_TYPE (op0), arg02));
7714 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7715 arg01, arg02);
7716
7717 /* If this was a conversion, and all we did was to move into
7718 inside the COND_EXPR, bring it back out. But leave it if
7719 it is a conversion from integer to integer and the
7720 result precision is no wider than a word since such a
7721 conversion is cheap and may be optimized away by combine,
7722 while it couldn't if it were outside the COND_EXPR. Then return
7723 so we don't get into an infinite recursion loop taking the
7724 conversion out and then back in. */
7725
7726 if ((CONVERT_EXPR_CODE_P (code)
7727 || code == NON_LVALUE_EXPR)
7728 && TREE_CODE (tem) == COND_EXPR
7729 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7730 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7731 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7732 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7733 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7734 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7735 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7736 && (INTEGRAL_TYPE_P
7737 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7738 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7739 || flag_syntax_only))
7740 tem = build1_loc (loc, code, type,
7741 build3 (COND_EXPR,
7742 TREE_TYPE (TREE_OPERAND
7743 (TREE_OPERAND (tem, 1), 0)),
7744 TREE_OPERAND (tem, 0),
7745 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7746 TREE_OPERAND (TREE_OPERAND (tem, 2),
7747 0)));
7748 return tem;
7749 }
7750 }
7751
7752 switch (code)
7753 {
7754 case NON_LVALUE_EXPR:
7755 if (!maybe_lvalue_p (op0))
7756 return fold_convert_loc (loc, type, op0);
7757 return NULL_TREE;
7758
7759 CASE_CONVERT:
7760 case FLOAT_EXPR:
7761 case FIX_TRUNC_EXPR:
7762 if (COMPARISON_CLASS_P (op0))
7763 {
7764 /* If we have (type) (a CMP b) and type is an integral type, return
7765 new expression involving the new type. Canonicalize
7766 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7767 non-integral type.
7768 Do not fold the result as that would not simplify further, also
7769 folding again results in recursions. */
7770 if (TREE_CODE (type) == BOOLEAN_TYPE)
7771 return build2_loc (loc, TREE_CODE (op0), type,
7772 TREE_OPERAND (op0, 0),
7773 TREE_OPERAND (op0, 1));
7774 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7775 && TREE_CODE (type) != VECTOR_TYPE)
7776 return build3_loc (loc, COND_EXPR, type, op0,
7777 constant_boolean_node (true, type),
7778 constant_boolean_node (false, type));
7779 }
7780
7781 /* Handle (T *)&A.B.C for A being of type T and B and C
7782 living at offset zero. This occurs frequently in
7783 C++ upcasting and then accessing the base. */
7784 if (TREE_CODE (op0) == ADDR_EXPR
7785 && POINTER_TYPE_P (type)
7786 && handled_component_p (TREE_OPERAND (op0, 0)))
7787 {
7788 HOST_WIDE_INT bitsize, bitpos;
7789 tree offset;
7790 machine_mode mode;
7791 int unsignedp, reversep, volatilep;
7792 tree base
7793 = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
7794 &offset, &mode, &unsignedp, &reversep,
7795 &volatilep, false);
7796 /* If the reference was to a (constant) zero offset, we can use
7797 the address of the base if it has the same base type
7798 as the result type and the pointer type is unqualified. */
7799 if (! offset && bitpos == 0
7800 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7801 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7802 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7803 return fold_convert_loc (loc, type,
7804 build_fold_addr_expr_loc (loc, base));
7805 }
7806
7807 if (TREE_CODE (op0) == MODIFY_EXPR
7808 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7809 /* Detect assigning a bitfield. */
7810 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7811 && DECL_BIT_FIELD
7812 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7813 {
7814 /* Don't leave an assignment inside a conversion
7815 unless assigning a bitfield. */
7816 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7817 /* First do the assignment, then return converted constant. */
7818 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7819 TREE_NO_WARNING (tem) = 1;
7820 TREE_USED (tem) = 1;
7821 return tem;
7822 }
7823
7824 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7825 constants (if x has signed type, the sign bit cannot be set
7826 in c). This folds extension into the BIT_AND_EXPR.
7827 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7828 very likely don't have maximal range for their precision and this
7829 transformation effectively doesn't preserve non-maximal ranges. */
7830 if (TREE_CODE (type) == INTEGER_TYPE
7831 && TREE_CODE (op0) == BIT_AND_EXPR
7832 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7833 {
7834 tree and_expr = op0;
7835 tree and0 = TREE_OPERAND (and_expr, 0);
7836 tree and1 = TREE_OPERAND (and_expr, 1);
7837 int change = 0;
7838
7839 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7840 || (TYPE_PRECISION (type)
7841 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7842 change = 1;
7843 else if (TYPE_PRECISION (TREE_TYPE (and1))
7844 <= HOST_BITS_PER_WIDE_INT
7845 && tree_fits_uhwi_p (and1))
7846 {
7847 unsigned HOST_WIDE_INT cst;
7848
7849 cst = tree_to_uhwi (and1);
7850 cst &= HOST_WIDE_INT_M1U
7851 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7852 change = (cst == 0);
7853 if (change
7854 && !flag_syntax_only
7855 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7856 == ZERO_EXTEND))
7857 {
7858 tree uns = unsigned_type_for (TREE_TYPE (and0));
7859 and0 = fold_convert_loc (loc, uns, and0);
7860 and1 = fold_convert_loc (loc, uns, and1);
7861 }
7862 }
7863 if (change)
7864 {
7865 tem = force_fit_type (type, wi::to_widest (and1), 0,
7866 TREE_OVERFLOW (and1));
7867 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7868 fold_convert_loc (loc, type, and0), tem);
7869 }
7870 }
7871
7872 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
7873 cast (T1)X will fold away. We assume that this happens when X itself
7874 is a cast. */
7875 if (POINTER_TYPE_P (type)
7876 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7877 && CONVERT_EXPR_P (TREE_OPERAND (arg0, 0)))
7878 {
7879 tree arg00 = TREE_OPERAND (arg0, 0);
7880 tree arg01 = TREE_OPERAND (arg0, 1);
7881
7882 return fold_build_pointer_plus_loc
7883 (loc, fold_convert_loc (loc, type, arg00), arg01);
7884 }
7885
7886 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7887 of the same precision, and X is an integer type not narrower than
7888 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7889 if (INTEGRAL_TYPE_P (type)
7890 && TREE_CODE (op0) == BIT_NOT_EXPR
7891 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7892 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7893 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7894 {
7895 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7896 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7897 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7898 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7899 fold_convert_loc (loc, type, tem));
7900 }
7901
7902 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7903 type of X and Y (integer types only). */
7904 if (INTEGRAL_TYPE_P (type)
7905 && TREE_CODE (op0) == MULT_EXPR
7906 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7907 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7908 {
7909 /* Be careful not to introduce new overflows. */
7910 tree mult_type;
7911 if (TYPE_OVERFLOW_WRAPS (type))
7912 mult_type = type;
7913 else
7914 mult_type = unsigned_type_for (type);
7915
7916 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7917 {
7918 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7919 fold_convert_loc (loc, mult_type,
7920 TREE_OPERAND (op0, 0)),
7921 fold_convert_loc (loc, mult_type,
7922 TREE_OPERAND (op0, 1)));
7923 return fold_convert_loc (loc, type, tem);
7924 }
7925 }
7926
7927 return NULL_TREE;
7928
7929 case VIEW_CONVERT_EXPR:
7930 if (TREE_CODE (op0) == MEM_REF)
7931 {
7932 tem = fold_build2_loc (loc, MEM_REF, type,
7933 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
7934 REF_REVERSE_STORAGE_ORDER (tem) = REF_REVERSE_STORAGE_ORDER (op0);
7935 return tem;
7936 }
7937
7938 return NULL_TREE;
7939
7940 case NEGATE_EXPR:
7941 tem = fold_negate_expr (loc, arg0);
7942 if (tem)
7943 return fold_convert_loc (loc, type, tem);
7944 return NULL_TREE;
7945
7946 case ABS_EXPR:
7947 /* Convert fabs((double)float) into (double)fabsf(float). */
7948 if (TREE_CODE (arg0) == NOP_EXPR
7949 && TREE_CODE (type) == REAL_TYPE)
7950 {
7951 tree targ0 = strip_float_extensions (arg0);
7952 if (targ0 != arg0)
7953 return fold_convert_loc (loc, type,
7954 fold_build1_loc (loc, ABS_EXPR,
7955 TREE_TYPE (targ0),
7956 targ0));
7957 }
7958 return NULL_TREE;
7959
7960 case BIT_NOT_EXPR:
7961 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7962 if (TREE_CODE (arg0) == BIT_XOR_EXPR
7963 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7964 fold_convert_loc (loc, type,
7965 TREE_OPERAND (arg0, 0)))))
7966 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
7967 fold_convert_loc (loc, type,
7968 TREE_OPERAND (arg0, 1)));
7969 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7970 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7971 fold_convert_loc (loc, type,
7972 TREE_OPERAND (arg0, 1)))))
7973 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
7974 fold_convert_loc (loc, type,
7975 TREE_OPERAND (arg0, 0)), tem);
7976
7977 return NULL_TREE;
7978
7979 case TRUTH_NOT_EXPR:
7980 /* Note that the operand of this must be an int
7981 and its values must be 0 or 1.
7982 ("true" is a fixed value perhaps depending on the language,
7983 but we don't handle values other than 1 correctly yet.) */
7984 tem = fold_truth_not_expr (loc, arg0);
7985 if (!tem)
7986 return NULL_TREE;
7987 return fold_convert_loc (loc, type, tem);
7988
7989 case INDIRECT_REF:
7990 /* Fold *&X to X if X is an lvalue. */
7991 if (TREE_CODE (op0) == ADDR_EXPR)
7992 {
7993 tree op00 = TREE_OPERAND (op0, 0);
7994 if ((TREE_CODE (op00) == VAR_DECL
7995 || TREE_CODE (op00) == PARM_DECL
7996 || TREE_CODE (op00) == RESULT_DECL)
7997 && !TREE_READONLY (op00))
7998 return op00;
7999 }
8000 return NULL_TREE;
8001
8002 default:
8003 return NULL_TREE;
8004 } /* switch (code) */
8005 }
8006
8007
8008 /* If the operation was a conversion do _not_ mark a resulting constant
8009 with TREE_OVERFLOW if the original constant was not. These conversions
8010 have implementation defined behavior and retaining the TREE_OVERFLOW
8011 flag here would confuse later passes such as VRP. */
8012 tree
8013 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8014 tree type, tree op0)
8015 {
8016 tree res = fold_unary_loc (loc, code, type, op0);
8017 if (res
8018 && TREE_CODE (res) == INTEGER_CST
8019 && TREE_CODE (op0) == INTEGER_CST
8020 && CONVERT_EXPR_CODE_P (code))
8021 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8022
8023 return res;
8024 }
8025
8026 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8027 operands OP0 and OP1. LOC is the location of the resulting expression.
8028 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8029 Return the folded expression if folding is successful. Otherwise,
8030 return NULL_TREE. */
8031 static tree
8032 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8033 tree arg0, tree arg1, tree op0, tree op1)
8034 {
8035 tree tem;
8036
8037 /* We only do these simplifications if we are optimizing. */
8038 if (!optimize)
8039 return NULL_TREE;
8040
8041 /* Check for things like (A || B) && (A || C). We can convert this
8042 to A || (B && C). Note that either operator can be any of the four
8043 truth and/or operations and the transformation will still be
8044 valid. Also note that we only care about order for the
8045 ANDIF and ORIF operators. If B contains side effects, this
8046 might change the truth-value of A. */
8047 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8048 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8049 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8050 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8051 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8052 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8053 {
8054 tree a00 = TREE_OPERAND (arg0, 0);
8055 tree a01 = TREE_OPERAND (arg0, 1);
8056 tree a10 = TREE_OPERAND (arg1, 0);
8057 tree a11 = TREE_OPERAND (arg1, 1);
8058 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8059 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8060 && (code == TRUTH_AND_EXPR
8061 || code == TRUTH_OR_EXPR));
8062
8063 if (operand_equal_p (a00, a10, 0))
8064 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8065 fold_build2_loc (loc, code, type, a01, a11));
8066 else if (commutative && operand_equal_p (a00, a11, 0))
8067 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8068 fold_build2_loc (loc, code, type, a01, a10));
8069 else if (commutative && operand_equal_p (a01, a10, 0))
8070 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8071 fold_build2_loc (loc, code, type, a00, a11));
8072
8073 /* This case if tricky because we must either have commutative
8074 operators or else A10 must not have side-effects. */
8075
8076 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8077 && operand_equal_p (a01, a11, 0))
8078 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8079 fold_build2_loc (loc, code, type, a00, a10),
8080 a01);
8081 }
8082
8083 /* See if we can build a range comparison. */
8084 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8085 return tem;
8086
8087 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8088 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8089 {
8090 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8091 if (tem)
8092 return fold_build2_loc (loc, code, type, tem, arg1);
8093 }
8094
8095 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8096 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8097 {
8098 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8099 if (tem)
8100 return fold_build2_loc (loc, code, type, arg0, tem);
8101 }
8102
8103 /* Check for the possibility of merging component references. If our
8104 lhs is another similar operation, try to merge its rhs with our
8105 rhs. Then try to merge our lhs and rhs. */
8106 if (TREE_CODE (arg0) == code
8107 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8108 TREE_OPERAND (arg0, 1), arg1)))
8109 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8110
8111 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8112 return tem;
8113
8114 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8115 && (code == TRUTH_AND_EXPR
8116 || code == TRUTH_ANDIF_EXPR
8117 || code == TRUTH_OR_EXPR
8118 || code == TRUTH_ORIF_EXPR))
8119 {
8120 enum tree_code ncode, icode;
8121
8122 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8123 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8124 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8125
8126 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8127 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8128 We don't want to pack more than two leafs to a non-IF AND/OR
8129 expression.
8130 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8131 equal to IF-CODE, then we don't want to add right-hand operand.
8132 If the inner right-hand side of left-hand operand has
8133 side-effects, or isn't simple, then we can't add to it,
8134 as otherwise we might destroy if-sequence. */
8135 if (TREE_CODE (arg0) == icode
8136 && simple_operand_p_2 (arg1)
8137 /* Needed for sequence points to handle trappings, and
8138 side-effects. */
8139 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8140 {
8141 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8142 arg1);
8143 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8144 tem);
8145 }
8146 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8147 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8148 else if (TREE_CODE (arg1) == icode
8149 && simple_operand_p_2 (arg0)
8150 /* Needed for sequence points to handle trappings, and
8151 side-effects. */
8152 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8153 {
8154 tem = fold_build2_loc (loc, ncode, type,
8155 arg0, TREE_OPERAND (arg1, 0));
8156 return fold_build2_loc (loc, icode, type, tem,
8157 TREE_OPERAND (arg1, 1));
8158 }
8159 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8160 into (A OR B).
8161 For sequence point consistancy, we need to check for trapping,
8162 and side-effects. */
8163 else if (code == icode && simple_operand_p_2 (arg0)
8164 && simple_operand_p_2 (arg1))
8165 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8166 }
8167
8168 return NULL_TREE;
8169 }
8170
8171 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8172 by changing CODE to reduce the magnitude of constants involved in
8173 ARG0 of the comparison.
8174 Returns a canonicalized comparison tree if a simplification was
8175 possible, otherwise returns NULL_TREE.
8176 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8177 valid if signed overflow is undefined. */
8178
8179 static tree
8180 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8181 tree arg0, tree arg1,
8182 bool *strict_overflow_p)
8183 {
8184 enum tree_code code0 = TREE_CODE (arg0);
8185 tree t, cst0 = NULL_TREE;
8186 int sgn0;
8187
8188 /* Match A +- CST code arg1. We can change this only if overflow
8189 is undefined. */
8190 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8191 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8192 /* In principle pointers also have undefined overflow behavior,
8193 but that causes problems elsewhere. */
8194 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8195 && (code0 == MINUS_EXPR
8196 || code0 == PLUS_EXPR)
8197 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
8198 return NULL_TREE;
8199
8200 /* Identify the constant in arg0 and its sign. */
8201 cst0 = TREE_OPERAND (arg0, 1);
8202 sgn0 = tree_int_cst_sgn (cst0);
8203
8204 /* Overflowed constants and zero will cause problems. */
8205 if (integer_zerop (cst0)
8206 || TREE_OVERFLOW (cst0))
8207 return NULL_TREE;
8208
8209 /* See if we can reduce the magnitude of the constant in
8210 arg0 by changing the comparison code. */
8211 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8212 if (code == LT_EXPR
8213 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8214 code = LE_EXPR;
8215 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8216 else if (code == GT_EXPR
8217 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8218 code = GE_EXPR;
8219 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8220 else if (code == LE_EXPR
8221 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8222 code = LT_EXPR;
8223 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8224 else if (code == GE_EXPR
8225 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8226 code = GT_EXPR;
8227 else
8228 return NULL_TREE;
8229 *strict_overflow_p = true;
8230
8231 /* Now build the constant reduced in magnitude. But not if that
8232 would produce one outside of its types range. */
8233 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8234 && ((sgn0 == 1
8235 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8236 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8237 || (sgn0 == -1
8238 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8239 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8240 return NULL_TREE;
8241
8242 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8243 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8244 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8245 t = fold_convert (TREE_TYPE (arg1), t);
8246
8247 return fold_build2_loc (loc, code, type, t, arg1);
8248 }
8249
8250 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8251 overflow further. Try to decrease the magnitude of constants involved
8252 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8253 and put sole constants at the second argument position.
8254 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8255
8256 static tree
8257 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8258 tree arg0, tree arg1)
8259 {
8260 tree t;
8261 bool strict_overflow_p;
8262 const char * const warnmsg = G_("assuming signed overflow does not occur "
8263 "when reducing constant in comparison");
8264
8265 /* Try canonicalization by simplifying arg0. */
8266 strict_overflow_p = false;
8267 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8268 &strict_overflow_p);
8269 if (t)
8270 {
8271 if (strict_overflow_p)
8272 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8273 return t;
8274 }
8275
8276 /* Try canonicalization by simplifying arg1 using the swapped
8277 comparison. */
8278 code = swap_tree_comparison (code);
8279 strict_overflow_p = false;
8280 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8281 &strict_overflow_p);
8282 if (t && strict_overflow_p)
8283 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8284 return t;
8285 }
8286
8287 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8288 space. This is used to avoid issuing overflow warnings for
8289 expressions like &p->x which can not wrap. */
8290
8291 static bool
8292 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8293 {
8294 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8295 return true;
8296
8297 if (bitpos < 0)
8298 return true;
8299
8300 wide_int wi_offset;
8301 int precision = TYPE_PRECISION (TREE_TYPE (base));
8302 if (offset == NULL_TREE)
8303 wi_offset = wi::zero (precision);
8304 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8305 return true;
8306 else
8307 wi_offset = offset;
8308
8309 bool overflow;
8310 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8311 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8312 if (overflow)
8313 return true;
8314
8315 if (!wi::fits_uhwi_p (total))
8316 return true;
8317
8318 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8319 if (size <= 0)
8320 return true;
8321
8322 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8323 array. */
8324 if (TREE_CODE (base) == ADDR_EXPR)
8325 {
8326 HOST_WIDE_INT base_size;
8327
8328 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8329 if (base_size > 0 && size < base_size)
8330 size = base_size;
8331 }
8332
8333 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8334 }
8335
8336 /* Subroutine of fold_binary. This routine performs all of the
8337 transformations that are common to the equality/inequality
8338 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8339 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8340 fold_binary should call fold_binary. Fold a comparison with
8341 tree code CODE and type TYPE with operands OP0 and OP1. Return
8342 the folded comparison or NULL_TREE. */
8343
8344 static tree
8345 fold_comparison (location_t loc, enum tree_code code, tree type,
8346 tree op0, tree op1)
8347 {
8348 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8349 tree arg0, arg1, tem;
8350
8351 arg0 = op0;
8352 arg1 = op1;
8353
8354 STRIP_SIGN_NOPS (arg0);
8355 STRIP_SIGN_NOPS (arg1);
8356
8357 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
8358 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8359 && (equality_code
8360 || (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8361 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
8362 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8363 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8364 && TREE_CODE (arg1) == INTEGER_CST
8365 && !TREE_OVERFLOW (arg1))
8366 {
8367 const enum tree_code
8368 reverse_op = TREE_CODE (arg0) == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR;
8369 tree const1 = TREE_OPERAND (arg0, 1);
8370 tree const2 = fold_convert_loc (loc, TREE_TYPE (const1), arg1);
8371 tree variable = TREE_OPERAND (arg0, 0);
8372 tree new_const = int_const_binop (reverse_op, const2, const1);
8373
8374 /* If the constant operation overflowed this can be
8375 simplified as a comparison against INT_MAX/INT_MIN. */
8376 if (TREE_OVERFLOW (new_const)
8377 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
8378 {
8379 int const1_sgn = tree_int_cst_sgn (const1);
8380 enum tree_code code2 = code;
8381
8382 /* Get the sign of the constant on the lhs if the
8383 operation were VARIABLE + CONST1. */
8384 if (TREE_CODE (arg0) == MINUS_EXPR)
8385 const1_sgn = -const1_sgn;
8386
8387 /* The sign of the constant determines if we overflowed
8388 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8389 Canonicalize to the INT_MIN overflow by swapping the comparison
8390 if necessary. */
8391 if (const1_sgn == -1)
8392 code2 = swap_tree_comparison (code);
8393
8394 /* We now can look at the canonicalized case
8395 VARIABLE + 1 CODE2 INT_MIN
8396 and decide on the result. */
8397 switch (code2)
8398 {
8399 case EQ_EXPR:
8400 case LT_EXPR:
8401 case LE_EXPR:
8402 return
8403 omit_one_operand_loc (loc, type, boolean_false_node, variable);
8404
8405 case NE_EXPR:
8406 case GE_EXPR:
8407 case GT_EXPR:
8408 return
8409 omit_one_operand_loc (loc, type, boolean_true_node, variable);
8410
8411 default:
8412 gcc_unreachable ();
8413 }
8414 }
8415 else
8416 {
8417 if (!equality_code)
8418 fold_overflow_warning ("assuming signed overflow does not occur "
8419 "when changing X +- C1 cmp C2 to "
8420 "X cmp C2 -+ C1",
8421 WARN_STRICT_OVERFLOW_COMPARISON);
8422 return fold_build2_loc (loc, code, type, variable, new_const);
8423 }
8424 }
8425
8426 /* For comparisons of pointers we can decompose it to a compile time
8427 comparison of the base objects and the offsets into the object.
8428 This requires at least one operand being an ADDR_EXPR or a
8429 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8430 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8431 && (TREE_CODE (arg0) == ADDR_EXPR
8432 || TREE_CODE (arg1) == ADDR_EXPR
8433 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8434 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8435 {
8436 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8437 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8438 machine_mode mode;
8439 int volatilep, reversep, unsignedp;
8440 bool indirect_base0 = false, indirect_base1 = false;
8441
8442 /* Get base and offset for the access. Strip ADDR_EXPR for
8443 get_inner_reference, but put it back by stripping INDIRECT_REF
8444 off the base object if possible. indirect_baseN will be true
8445 if baseN is not an address but refers to the object itself. */
8446 base0 = arg0;
8447 if (TREE_CODE (arg0) == ADDR_EXPR)
8448 {
8449 base0
8450 = get_inner_reference (TREE_OPERAND (arg0, 0),
8451 &bitsize, &bitpos0, &offset0, &mode,
8452 &unsignedp, &reversep, &volatilep, false);
8453 if (TREE_CODE (base0) == INDIRECT_REF)
8454 base0 = TREE_OPERAND (base0, 0);
8455 else
8456 indirect_base0 = true;
8457 }
8458 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8459 {
8460 base0 = TREE_OPERAND (arg0, 0);
8461 STRIP_SIGN_NOPS (base0);
8462 if (TREE_CODE (base0) == ADDR_EXPR)
8463 {
8464 base0
8465 = get_inner_reference (TREE_OPERAND (base0, 0),
8466 &bitsize, &bitpos0, &offset0, &mode,
8467 &unsignedp, &reversep, &volatilep,
8468 false);
8469 if (TREE_CODE (base0) == INDIRECT_REF)
8470 base0 = TREE_OPERAND (base0, 0);
8471 else
8472 indirect_base0 = true;
8473 }
8474 if (offset0 == NULL_TREE || integer_zerop (offset0))
8475 offset0 = TREE_OPERAND (arg0, 1);
8476 else
8477 offset0 = size_binop (PLUS_EXPR, offset0,
8478 TREE_OPERAND (arg0, 1));
8479 if (TREE_CODE (offset0) == INTEGER_CST)
8480 {
8481 offset_int tem = wi::sext (wi::to_offset (offset0),
8482 TYPE_PRECISION (sizetype));
8483 tem = wi::lshift (tem, LOG2_BITS_PER_UNIT);
8484 tem += bitpos0;
8485 if (wi::fits_shwi_p (tem))
8486 {
8487 bitpos0 = tem.to_shwi ();
8488 offset0 = NULL_TREE;
8489 }
8490 }
8491 }
8492
8493 base1 = arg1;
8494 if (TREE_CODE (arg1) == ADDR_EXPR)
8495 {
8496 base1
8497 = get_inner_reference (TREE_OPERAND (arg1, 0),
8498 &bitsize, &bitpos1, &offset1, &mode,
8499 &unsignedp, &reversep, &volatilep, false);
8500 if (TREE_CODE (base1) == INDIRECT_REF)
8501 base1 = TREE_OPERAND (base1, 0);
8502 else
8503 indirect_base1 = true;
8504 }
8505 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8506 {
8507 base1 = TREE_OPERAND (arg1, 0);
8508 STRIP_SIGN_NOPS (base1);
8509 if (TREE_CODE (base1) == ADDR_EXPR)
8510 {
8511 base1
8512 = get_inner_reference (TREE_OPERAND (base1, 0),
8513 &bitsize, &bitpos1, &offset1, &mode,
8514 &unsignedp, &reversep, &volatilep,
8515 false);
8516 if (TREE_CODE (base1) == INDIRECT_REF)
8517 base1 = TREE_OPERAND (base1, 0);
8518 else
8519 indirect_base1 = true;
8520 }
8521 if (offset1 == NULL_TREE || integer_zerop (offset1))
8522 offset1 = TREE_OPERAND (arg1, 1);
8523 else
8524 offset1 = size_binop (PLUS_EXPR, offset1,
8525 TREE_OPERAND (arg1, 1));
8526 if (TREE_CODE (offset1) == INTEGER_CST)
8527 {
8528 offset_int tem = wi::sext (wi::to_offset (offset1),
8529 TYPE_PRECISION (sizetype));
8530 tem = wi::lshift (tem, LOG2_BITS_PER_UNIT);
8531 tem += bitpos1;
8532 if (wi::fits_shwi_p (tem))
8533 {
8534 bitpos1 = tem.to_shwi ();
8535 offset1 = NULL_TREE;
8536 }
8537 }
8538 }
8539
8540 /* If we have equivalent bases we might be able to simplify. */
8541 if (indirect_base0 == indirect_base1
8542 && operand_equal_p (base0, base1,
8543 indirect_base0 ? OEP_ADDRESS_OF : 0))
8544 {
8545 /* We can fold this expression to a constant if the non-constant
8546 offset parts are equal. */
8547 if ((offset0 == offset1
8548 || (offset0 && offset1
8549 && operand_equal_p (offset0, offset1, 0)))
8550 && (code == EQ_EXPR
8551 || code == NE_EXPR
8552 || (indirect_base0 && DECL_P (base0))
8553 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8554
8555 {
8556 if (!equality_code
8557 && bitpos0 != bitpos1
8558 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8559 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8560 fold_overflow_warning (("assuming pointer wraparound does not "
8561 "occur when comparing P +- C1 with "
8562 "P +- C2"),
8563 WARN_STRICT_OVERFLOW_CONDITIONAL);
8564
8565 switch (code)
8566 {
8567 case EQ_EXPR:
8568 return constant_boolean_node (bitpos0 == bitpos1, type);
8569 case NE_EXPR:
8570 return constant_boolean_node (bitpos0 != bitpos1, type);
8571 case LT_EXPR:
8572 return constant_boolean_node (bitpos0 < bitpos1, type);
8573 case LE_EXPR:
8574 return constant_boolean_node (bitpos0 <= bitpos1, type);
8575 case GE_EXPR:
8576 return constant_boolean_node (bitpos0 >= bitpos1, type);
8577 case GT_EXPR:
8578 return constant_boolean_node (bitpos0 > bitpos1, type);
8579 default:;
8580 }
8581 }
8582 /* We can simplify the comparison to a comparison of the variable
8583 offset parts if the constant offset parts are equal.
8584 Be careful to use signed sizetype here because otherwise we
8585 mess with array offsets in the wrong way. This is possible
8586 because pointer arithmetic is restricted to retain within an
8587 object and overflow on pointer differences is undefined as of
8588 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8589 else if (bitpos0 == bitpos1
8590 && (equality_code
8591 || (indirect_base0 && DECL_P (base0))
8592 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8593 {
8594 /* By converting to signed sizetype we cover middle-end pointer
8595 arithmetic which operates on unsigned pointer types of size
8596 type size and ARRAY_REF offsets which are properly sign or
8597 zero extended from their type in case it is narrower than
8598 sizetype. */
8599 if (offset0 == NULL_TREE)
8600 offset0 = build_int_cst (ssizetype, 0);
8601 else
8602 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8603 if (offset1 == NULL_TREE)
8604 offset1 = build_int_cst (ssizetype, 0);
8605 else
8606 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8607
8608 if (!equality_code
8609 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8610 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8611 fold_overflow_warning (("assuming pointer wraparound does not "
8612 "occur when comparing P +- C1 with "
8613 "P +- C2"),
8614 WARN_STRICT_OVERFLOW_COMPARISON);
8615
8616 return fold_build2_loc (loc, code, type, offset0, offset1);
8617 }
8618 }
8619 /* For equal offsets we can simplify to a comparison of the
8620 base addresses. */
8621 else if (bitpos0 == bitpos1
8622 && (indirect_base0
8623 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8624 && (indirect_base1
8625 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8626 && ((offset0 == offset1)
8627 || (offset0 && offset1
8628 && operand_equal_p (offset0, offset1, 0))))
8629 {
8630 if (indirect_base0)
8631 base0 = build_fold_addr_expr_loc (loc, base0);
8632 if (indirect_base1)
8633 base1 = build_fold_addr_expr_loc (loc, base1);
8634 return fold_build2_loc (loc, code, type, base0, base1);
8635 }
8636 }
8637
8638 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8639 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8640 the resulting offset is smaller in absolute value than the
8641 original one and has the same sign. */
8642 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8643 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8644 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8645 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8646 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8647 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8648 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8649 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8650 {
8651 tree const1 = TREE_OPERAND (arg0, 1);
8652 tree const2 = TREE_OPERAND (arg1, 1);
8653 tree variable1 = TREE_OPERAND (arg0, 0);
8654 tree variable2 = TREE_OPERAND (arg1, 0);
8655 tree cst;
8656 const char * const warnmsg = G_("assuming signed overflow does not "
8657 "occur when combining constants around "
8658 "a comparison");
8659
8660 /* Put the constant on the side where it doesn't overflow and is
8661 of lower absolute value and of same sign than before. */
8662 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8663 ? MINUS_EXPR : PLUS_EXPR,
8664 const2, const1);
8665 if (!TREE_OVERFLOW (cst)
8666 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
8667 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
8668 {
8669 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8670 return fold_build2_loc (loc, code, type,
8671 variable1,
8672 fold_build2_loc (loc, TREE_CODE (arg1),
8673 TREE_TYPE (arg1),
8674 variable2, cst));
8675 }
8676
8677 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8678 ? MINUS_EXPR : PLUS_EXPR,
8679 const1, const2);
8680 if (!TREE_OVERFLOW (cst)
8681 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
8682 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
8683 {
8684 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8685 return fold_build2_loc (loc, code, type,
8686 fold_build2_loc (loc, TREE_CODE (arg0),
8687 TREE_TYPE (arg0),
8688 variable1, cst),
8689 variable2);
8690 }
8691 }
8692
8693 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
8694 if (tem)
8695 return tem;
8696
8697 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8698 constant, we can simplify it. */
8699 if (TREE_CODE (arg1) == INTEGER_CST
8700 && (TREE_CODE (arg0) == MIN_EXPR
8701 || TREE_CODE (arg0) == MAX_EXPR)
8702 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8703 {
8704 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
8705 if (tem)
8706 return tem;
8707 }
8708
8709 /* If we are comparing an expression that just has comparisons
8710 of two integer values, arithmetic expressions of those comparisons,
8711 and constants, we can simplify it. There are only three cases
8712 to check: the two values can either be equal, the first can be
8713 greater, or the second can be greater. Fold the expression for
8714 those three values. Since each value must be 0 or 1, we have
8715 eight possibilities, each of which corresponds to the constant 0
8716 or 1 or one of the six possible comparisons.
8717
8718 This handles common cases like (a > b) == 0 but also handles
8719 expressions like ((x > y) - (y > x)) > 0, which supposedly
8720 occur in macroized code. */
8721
8722 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8723 {
8724 tree cval1 = 0, cval2 = 0;
8725 int save_p = 0;
8726
8727 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8728 /* Don't handle degenerate cases here; they should already
8729 have been handled anyway. */
8730 && cval1 != 0 && cval2 != 0
8731 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8732 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8733 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8734 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8735 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8736 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8737 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8738 {
8739 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8740 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8741
8742 /* We can't just pass T to eval_subst in case cval1 or cval2
8743 was the same as ARG1. */
8744
8745 tree high_result
8746 = fold_build2_loc (loc, code, type,
8747 eval_subst (loc, arg0, cval1, maxval,
8748 cval2, minval),
8749 arg1);
8750 tree equal_result
8751 = fold_build2_loc (loc, code, type,
8752 eval_subst (loc, arg0, cval1, maxval,
8753 cval2, maxval),
8754 arg1);
8755 tree low_result
8756 = fold_build2_loc (loc, code, type,
8757 eval_subst (loc, arg0, cval1, minval,
8758 cval2, maxval),
8759 arg1);
8760
8761 /* All three of these results should be 0 or 1. Confirm they are.
8762 Then use those values to select the proper code to use. */
8763
8764 if (TREE_CODE (high_result) == INTEGER_CST
8765 && TREE_CODE (equal_result) == INTEGER_CST
8766 && TREE_CODE (low_result) == INTEGER_CST)
8767 {
8768 /* Make a 3-bit mask with the high-order bit being the
8769 value for `>', the next for '=', and the low for '<'. */
8770 switch ((integer_onep (high_result) * 4)
8771 + (integer_onep (equal_result) * 2)
8772 + integer_onep (low_result))
8773 {
8774 case 0:
8775 /* Always false. */
8776 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
8777 case 1:
8778 code = LT_EXPR;
8779 break;
8780 case 2:
8781 code = EQ_EXPR;
8782 break;
8783 case 3:
8784 code = LE_EXPR;
8785 break;
8786 case 4:
8787 code = GT_EXPR;
8788 break;
8789 case 5:
8790 code = NE_EXPR;
8791 break;
8792 case 6:
8793 code = GE_EXPR;
8794 break;
8795 case 7:
8796 /* Always true. */
8797 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
8798 }
8799
8800 if (save_p)
8801 {
8802 tem = save_expr (build2 (code, type, cval1, cval2));
8803 SET_EXPR_LOCATION (tem, loc);
8804 return tem;
8805 }
8806 return fold_build2_loc (loc, code, type, cval1, cval2);
8807 }
8808 }
8809 }
8810
8811 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8812 into a single range test. */
8813 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
8814 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
8815 && TREE_CODE (arg1) == INTEGER_CST
8816 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8817 && !integer_zerop (TREE_OPERAND (arg0, 1))
8818 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8819 && !TREE_OVERFLOW (arg1))
8820 {
8821 tem = fold_div_compare (loc, code, type, arg0, arg1);
8822 if (tem != NULL_TREE)
8823 return tem;
8824 }
8825
8826 return NULL_TREE;
8827 }
8828
8829
8830 /* Subroutine of fold_binary. Optimize complex multiplications of the
8831 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8832 argument EXPR represents the expression "z" of type TYPE. */
8833
8834 static tree
8835 fold_mult_zconjz (location_t loc, tree type, tree expr)
8836 {
8837 tree itype = TREE_TYPE (type);
8838 tree rpart, ipart, tem;
8839
8840 if (TREE_CODE (expr) == COMPLEX_EXPR)
8841 {
8842 rpart = TREE_OPERAND (expr, 0);
8843 ipart = TREE_OPERAND (expr, 1);
8844 }
8845 else if (TREE_CODE (expr) == COMPLEX_CST)
8846 {
8847 rpart = TREE_REALPART (expr);
8848 ipart = TREE_IMAGPART (expr);
8849 }
8850 else
8851 {
8852 expr = save_expr (expr);
8853 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
8854 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
8855 }
8856
8857 rpart = save_expr (rpart);
8858 ipart = save_expr (ipart);
8859 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
8860 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
8861 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
8862 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
8863 build_zero_cst (itype));
8864 }
8865
8866
8867 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
8868 CONSTRUCTOR ARG into array ELTS and return true if successful. */
8869
8870 static bool
8871 vec_cst_ctor_to_array (tree arg, tree *elts)
8872 {
8873 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
8874
8875 if (TREE_CODE (arg) == VECTOR_CST)
8876 {
8877 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
8878 elts[i] = VECTOR_CST_ELT (arg, i);
8879 }
8880 else if (TREE_CODE (arg) == CONSTRUCTOR)
8881 {
8882 constructor_elt *elt;
8883
8884 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
8885 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
8886 return false;
8887 else
8888 elts[i] = elt->value;
8889 }
8890 else
8891 return false;
8892 for (; i < nelts; i++)
8893 elts[i]
8894 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
8895 return true;
8896 }
8897
8898 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
8899 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
8900 NULL_TREE otherwise. */
8901
8902 static tree
8903 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
8904 {
8905 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8906 tree *elts;
8907 bool need_ctor = false;
8908
8909 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
8910 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
8911 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
8912 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
8913 return NULL_TREE;
8914
8915 elts = XALLOCAVEC (tree, nelts * 3);
8916 if (!vec_cst_ctor_to_array (arg0, elts)
8917 || !vec_cst_ctor_to_array (arg1, elts + nelts))
8918 return NULL_TREE;
8919
8920 for (i = 0; i < nelts; i++)
8921 {
8922 if (!CONSTANT_CLASS_P (elts[sel[i]]))
8923 need_ctor = true;
8924 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
8925 }
8926
8927 if (need_ctor)
8928 {
8929 vec<constructor_elt, va_gc> *v;
8930 vec_alloc (v, nelts);
8931 for (i = 0; i < nelts; i++)
8932 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
8933 return build_constructor (type, v);
8934 }
8935 else
8936 return build_vector (type, &elts[2 * nelts]);
8937 }
8938
8939 /* Try to fold a pointer difference of type TYPE two address expressions of
8940 array references AREF0 and AREF1 using location LOC. Return a
8941 simplified expression for the difference or NULL_TREE. */
8942
8943 static tree
8944 fold_addr_of_array_ref_difference (location_t loc, tree type,
8945 tree aref0, tree aref1)
8946 {
8947 tree base0 = TREE_OPERAND (aref0, 0);
8948 tree base1 = TREE_OPERAND (aref1, 0);
8949 tree base_offset = build_int_cst (type, 0);
8950
8951 /* If the bases are array references as well, recurse. If the bases
8952 are pointer indirections compute the difference of the pointers.
8953 If the bases are equal, we are set. */
8954 if ((TREE_CODE (base0) == ARRAY_REF
8955 && TREE_CODE (base1) == ARRAY_REF
8956 && (base_offset
8957 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
8958 || (INDIRECT_REF_P (base0)
8959 && INDIRECT_REF_P (base1)
8960 && (base_offset
8961 = fold_binary_loc (loc, MINUS_EXPR, type,
8962 fold_convert (type, TREE_OPERAND (base0, 0)),
8963 fold_convert (type,
8964 TREE_OPERAND (base1, 0)))))
8965 || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
8966 {
8967 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
8968 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
8969 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
8970 tree diff = build2 (MINUS_EXPR, type, op0, op1);
8971 return fold_build2_loc (loc, PLUS_EXPR, type,
8972 base_offset,
8973 fold_build2_loc (loc, MULT_EXPR, type,
8974 diff, esz));
8975 }
8976 return NULL_TREE;
8977 }
8978
8979 /* If the real or vector real constant CST of type TYPE has an exact
8980 inverse, return it, else return NULL. */
8981
8982 tree
8983 exact_inverse (tree type, tree cst)
8984 {
8985 REAL_VALUE_TYPE r;
8986 tree unit_type, *elts;
8987 machine_mode mode;
8988 unsigned vec_nelts, i;
8989
8990 switch (TREE_CODE (cst))
8991 {
8992 case REAL_CST:
8993 r = TREE_REAL_CST (cst);
8994
8995 if (exact_real_inverse (TYPE_MODE (type), &r))
8996 return build_real (type, r);
8997
8998 return NULL_TREE;
8999
9000 case VECTOR_CST:
9001 vec_nelts = VECTOR_CST_NELTS (cst);
9002 elts = XALLOCAVEC (tree, vec_nelts);
9003 unit_type = TREE_TYPE (type);
9004 mode = TYPE_MODE (unit_type);
9005
9006 for (i = 0; i < vec_nelts; i++)
9007 {
9008 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9009 if (!exact_real_inverse (mode, &r))
9010 return NULL_TREE;
9011 elts[i] = build_real (unit_type, r);
9012 }
9013
9014 return build_vector (type, elts);
9015
9016 default:
9017 return NULL_TREE;
9018 }
9019 }
9020
9021 /* Mask out the tz least significant bits of X of type TYPE where
9022 tz is the number of trailing zeroes in Y. */
9023 static wide_int
9024 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
9025 {
9026 int tz = wi::ctz (y);
9027 if (tz > 0)
9028 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
9029 return x;
9030 }
9031
9032 /* Return true when T is an address and is known to be nonzero.
9033 For floating point we further ensure that T is not denormal.
9034 Similar logic is present in nonzero_address in rtlanal.h.
9035
9036 If the return value is based on the assumption that signed overflow
9037 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9038 change *STRICT_OVERFLOW_P. */
9039
9040 static bool
9041 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
9042 {
9043 tree type = TREE_TYPE (t);
9044 enum tree_code code;
9045
9046 /* Doing something useful for floating point would need more work. */
9047 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9048 return false;
9049
9050 code = TREE_CODE (t);
9051 switch (TREE_CODE_CLASS (code))
9052 {
9053 case tcc_unary:
9054 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9055 strict_overflow_p);
9056 case tcc_binary:
9057 case tcc_comparison:
9058 return tree_binary_nonzero_warnv_p (code, type,
9059 TREE_OPERAND (t, 0),
9060 TREE_OPERAND (t, 1),
9061 strict_overflow_p);
9062 case tcc_constant:
9063 case tcc_declaration:
9064 case tcc_reference:
9065 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9066
9067 default:
9068 break;
9069 }
9070
9071 switch (code)
9072 {
9073 case TRUTH_NOT_EXPR:
9074 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9075 strict_overflow_p);
9076
9077 case TRUTH_AND_EXPR:
9078 case TRUTH_OR_EXPR:
9079 case TRUTH_XOR_EXPR:
9080 return tree_binary_nonzero_warnv_p (code, type,
9081 TREE_OPERAND (t, 0),
9082 TREE_OPERAND (t, 1),
9083 strict_overflow_p);
9084
9085 case COND_EXPR:
9086 case CONSTRUCTOR:
9087 case OBJ_TYPE_REF:
9088 case ASSERT_EXPR:
9089 case ADDR_EXPR:
9090 case WITH_SIZE_EXPR:
9091 case SSA_NAME:
9092 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9093
9094 case COMPOUND_EXPR:
9095 case MODIFY_EXPR:
9096 case BIND_EXPR:
9097 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
9098 strict_overflow_p);
9099
9100 case SAVE_EXPR:
9101 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
9102 strict_overflow_p);
9103
9104 case CALL_EXPR:
9105 {
9106 tree fndecl = get_callee_fndecl (t);
9107 if (!fndecl) return false;
9108 if (flag_delete_null_pointer_checks && !flag_check_new
9109 && DECL_IS_OPERATOR_NEW (fndecl)
9110 && !TREE_NOTHROW (fndecl))
9111 return true;
9112 if (flag_delete_null_pointer_checks
9113 && lookup_attribute ("returns_nonnull",
9114 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9115 return true;
9116 return alloca_call_p (t);
9117 }
9118
9119 default:
9120 break;
9121 }
9122 return false;
9123 }
9124
9125 /* Return true when T is an address and is known to be nonzero.
9126 Handle warnings about undefined signed overflow. */
9127
9128 static bool
9129 tree_expr_nonzero_p (tree t)
9130 {
9131 bool ret, strict_overflow_p;
9132
9133 strict_overflow_p = false;
9134 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9135 if (strict_overflow_p)
9136 fold_overflow_warning (("assuming signed overflow does not occur when "
9137 "determining that expression is always "
9138 "non-zero"),
9139 WARN_STRICT_OVERFLOW_MISC);
9140 return ret;
9141 }
9142
9143 /* Return true if T is known not to be equal to an integer W. */
9144
9145 bool
9146 expr_not_equal_to (tree t, const wide_int &w)
9147 {
9148 wide_int min, max, nz;
9149 value_range_type rtype;
9150 switch (TREE_CODE (t))
9151 {
9152 case INTEGER_CST:
9153 return wi::ne_p (t, w);
9154
9155 case SSA_NAME:
9156 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
9157 return false;
9158 rtype = get_range_info (t, &min, &max);
9159 if (rtype == VR_RANGE)
9160 {
9161 if (wi::lt_p (max, w, TYPE_SIGN (TREE_TYPE (t))))
9162 return true;
9163 if (wi::lt_p (w, min, TYPE_SIGN (TREE_TYPE (t))))
9164 return true;
9165 }
9166 else if (rtype == VR_ANTI_RANGE
9167 && wi::le_p (min, w, TYPE_SIGN (TREE_TYPE (t)))
9168 && wi::le_p (w, max, TYPE_SIGN (TREE_TYPE (t))))
9169 return true;
9170 /* If T has some known zero bits and W has any of those bits set,
9171 then T is known not to be equal to W. */
9172 if (wi::ne_p (wi::zext (wi::bit_and_not (w, get_nonzero_bits (t)),
9173 TYPE_PRECISION (TREE_TYPE (t))), 0))
9174 return true;
9175 return false;
9176
9177 default:
9178 return false;
9179 }
9180 }
9181
9182 /* Fold a binary expression of code CODE and type TYPE with operands
9183 OP0 and OP1. LOC is the location of the resulting expression.
9184 Return the folded expression if folding is successful. Otherwise,
9185 return NULL_TREE. */
9186
9187 tree
9188 fold_binary_loc (location_t loc,
9189 enum tree_code code, tree type, tree op0, tree op1)
9190 {
9191 enum tree_code_class kind = TREE_CODE_CLASS (code);
9192 tree arg0, arg1, tem;
9193 tree t1 = NULL_TREE;
9194 bool strict_overflow_p;
9195 unsigned int prec;
9196
9197 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9198 && TREE_CODE_LENGTH (code) == 2
9199 && op0 != NULL_TREE
9200 && op1 != NULL_TREE);
9201
9202 arg0 = op0;
9203 arg1 = op1;
9204
9205 /* Strip any conversions that don't change the mode. This is
9206 safe for every expression, except for a comparison expression
9207 because its signedness is derived from its operands. So, in
9208 the latter case, only strip conversions that don't change the
9209 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9210 preserved.
9211
9212 Note that this is done as an internal manipulation within the
9213 constant folder, in order to find the simplest representation
9214 of the arguments so that their form can be studied. In any
9215 cases, the appropriate type conversions should be put back in
9216 the tree that will get out of the constant folder. */
9217
9218 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9219 {
9220 STRIP_SIGN_NOPS (arg0);
9221 STRIP_SIGN_NOPS (arg1);
9222 }
9223 else
9224 {
9225 STRIP_NOPS (arg0);
9226 STRIP_NOPS (arg1);
9227 }
9228
9229 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9230 constant but we can't do arithmetic on them. */
9231 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
9232 {
9233 tem = const_binop (code, type, arg0, arg1);
9234 if (tem != NULL_TREE)
9235 {
9236 if (TREE_TYPE (tem) != type)
9237 tem = fold_convert_loc (loc, type, tem);
9238 return tem;
9239 }
9240 }
9241
9242 /* If this is a commutative operation, and ARG0 is a constant, move it
9243 to ARG1 to reduce the number of tests below. */
9244 if (commutative_tree_code (code)
9245 && tree_swap_operands_p (arg0, arg1, true))
9246 return fold_build2_loc (loc, code, type, op1, op0);
9247
9248 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9249 to ARG1 to reduce the number of tests below. */
9250 if (kind == tcc_comparison
9251 && tree_swap_operands_p (arg0, arg1, true))
9252 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9253
9254 tem = generic_simplify (loc, code, type, op0, op1);
9255 if (tem)
9256 return tem;
9257
9258 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9259
9260 First check for cases where an arithmetic operation is applied to a
9261 compound, conditional, or comparison operation. Push the arithmetic
9262 operation inside the compound or conditional to see if any folding
9263 can then be done. Convert comparison to conditional for this purpose.
9264 The also optimizes non-constant cases that used to be done in
9265 expand_expr.
9266
9267 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9268 one of the operands is a comparison and the other is a comparison, a
9269 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9270 code below would make the expression more complex. Change it to a
9271 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9272 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9273
9274 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9275 || code == EQ_EXPR || code == NE_EXPR)
9276 && TREE_CODE (type) != VECTOR_TYPE
9277 && ((truth_value_p (TREE_CODE (arg0))
9278 && (truth_value_p (TREE_CODE (arg1))
9279 || (TREE_CODE (arg1) == BIT_AND_EXPR
9280 && integer_onep (TREE_OPERAND (arg1, 1)))))
9281 || (truth_value_p (TREE_CODE (arg1))
9282 && (truth_value_p (TREE_CODE (arg0))
9283 || (TREE_CODE (arg0) == BIT_AND_EXPR
9284 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9285 {
9286 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9287 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9288 : TRUTH_XOR_EXPR,
9289 boolean_type_node,
9290 fold_convert_loc (loc, boolean_type_node, arg0),
9291 fold_convert_loc (loc, boolean_type_node, arg1));
9292
9293 if (code == EQ_EXPR)
9294 tem = invert_truthvalue_loc (loc, tem);
9295
9296 return fold_convert_loc (loc, type, tem);
9297 }
9298
9299 if (TREE_CODE_CLASS (code) == tcc_binary
9300 || TREE_CODE_CLASS (code) == tcc_comparison)
9301 {
9302 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9303 {
9304 tem = fold_build2_loc (loc, code, type,
9305 fold_convert_loc (loc, TREE_TYPE (op0),
9306 TREE_OPERAND (arg0, 1)), op1);
9307 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9308 tem);
9309 }
9310 if (TREE_CODE (arg1) == COMPOUND_EXPR
9311 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9312 {
9313 tem = fold_build2_loc (loc, code, type, op0,
9314 fold_convert_loc (loc, TREE_TYPE (op1),
9315 TREE_OPERAND (arg1, 1)));
9316 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9317 tem);
9318 }
9319
9320 if (TREE_CODE (arg0) == COND_EXPR
9321 || TREE_CODE (arg0) == VEC_COND_EXPR
9322 || COMPARISON_CLASS_P (arg0))
9323 {
9324 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9325 arg0, arg1,
9326 /*cond_first_p=*/1);
9327 if (tem != NULL_TREE)
9328 return tem;
9329 }
9330
9331 if (TREE_CODE (arg1) == COND_EXPR
9332 || TREE_CODE (arg1) == VEC_COND_EXPR
9333 || COMPARISON_CLASS_P (arg1))
9334 {
9335 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9336 arg1, arg0,
9337 /*cond_first_p=*/0);
9338 if (tem != NULL_TREE)
9339 return tem;
9340 }
9341 }
9342
9343 switch (code)
9344 {
9345 case MEM_REF:
9346 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9347 if (TREE_CODE (arg0) == ADDR_EXPR
9348 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9349 {
9350 tree iref = TREE_OPERAND (arg0, 0);
9351 return fold_build2 (MEM_REF, type,
9352 TREE_OPERAND (iref, 0),
9353 int_const_binop (PLUS_EXPR, arg1,
9354 TREE_OPERAND (iref, 1)));
9355 }
9356
9357 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9358 if (TREE_CODE (arg0) == ADDR_EXPR
9359 && handled_component_p (TREE_OPERAND (arg0, 0)))
9360 {
9361 tree base;
9362 HOST_WIDE_INT coffset;
9363 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9364 &coffset);
9365 if (!base)
9366 return NULL_TREE;
9367 return fold_build2 (MEM_REF, type,
9368 build_fold_addr_expr (base),
9369 int_const_binop (PLUS_EXPR, arg1,
9370 size_int (coffset)));
9371 }
9372
9373 return NULL_TREE;
9374
9375 case POINTER_PLUS_EXPR:
9376 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9377 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9378 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9379 return fold_convert_loc (loc, type,
9380 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9381 fold_convert_loc (loc, sizetype,
9382 arg1),
9383 fold_convert_loc (loc, sizetype,
9384 arg0)));
9385
9386 return NULL_TREE;
9387
9388 case PLUS_EXPR:
9389 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
9390 {
9391 /* X + (X / CST) * -CST is X % CST. */
9392 if (TREE_CODE (arg1) == MULT_EXPR
9393 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9394 && operand_equal_p (arg0,
9395 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9396 {
9397 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9398 tree cst1 = TREE_OPERAND (arg1, 1);
9399 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9400 cst1, cst0);
9401 if (sum && integer_zerop (sum))
9402 return fold_convert_loc (loc, type,
9403 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9404 TREE_TYPE (arg0), arg0,
9405 cst0));
9406 }
9407 }
9408
9409 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
9410 one. Make sure the type is not saturating and has the signedness of
9411 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9412 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9413 if ((TREE_CODE (arg0) == MULT_EXPR
9414 || TREE_CODE (arg1) == MULT_EXPR)
9415 && !TYPE_SATURATING (type)
9416 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9417 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9418 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9419 {
9420 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9421 if (tem)
9422 return tem;
9423 }
9424
9425 if (! FLOAT_TYPE_P (type))
9426 {
9427 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9428 (plus (plus (mult) (mult)) (foo)) so that we can
9429 take advantage of the factoring cases below. */
9430 if (ANY_INTEGRAL_TYPE_P (type)
9431 && TYPE_OVERFLOW_WRAPS (type)
9432 && (((TREE_CODE (arg0) == PLUS_EXPR
9433 || TREE_CODE (arg0) == MINUS_EXPR)
9434 && TREE_CODE (arg1) == MULT_EXPR)
9435 || ((TREE_CODE (arg1) == PLUS_EXPR
9436 || TREE_CODE (arg1) == MINUS_EXPR)
9437 && TREE_CODE (arg0) == MULT_EXPR)))
9438 {
9439 tree parg0, parg1, parg, marg;
9440 enum tree_code pcode;
9441
9442 if (TREE_CODE (arg1) == MULT_EXPR)
9443 parg = arg0, marg = arg1;
9444 else
9445 parg = arg1, marg = arg0;
9446 pcode = TREE_CODE (parg);
9447 parg0 = TREE_OPERAND (parg, 0);
9448 parg1 = TREE_OPERAND (parg, 1);
9449 STRIP_NOPS (parg0);
9450 STRIP_NOPS (parg1);
9451
9452 if (TREE_CODE (parg0) == MULT_EXPR
9453 && TREE_CODE (parg1) != MULT_EXPR)
9454 return fold_build2_loc (loc, pcode, type,
9455 fold_build2_loc (loc, PLUS_EXPR, type,
9456 fold_convert_loc (loc, type,
9457 parg0),
9458 fold_convert_loc (loc, type,
9459 marg)),
9460 fold_convert_loc (loc, type, parg1));
9461 if (TREE_CODE (parg0) != MULT_EXPR
9462 && TREE_CODE (parg1) == MULT_EXPR)
9463 return
9464 fold_build2_loc (loc, PLUS_EXPR, type,
9465 fold_convert_loc (loc, type, parg0),
9466 fold_build2_loc (loc, pcode, type,
9467 fold_convert_loc (loc, type, marg),
9468 fold_convert_loc (loc, type,
9469 parg1)));
9470 }
9471 }
9472 else
9473 {
9474 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9475 to __complex__ ( x, y ). This is not the same for SNaNs or
9476 if signed zeros are involved. */
9477 if (!HONOR_SNANS (element_mode (arg0))
9478 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9479 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9480 {
9481 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9482 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9483 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9484 bool arg0rz = false, arg0iz = false;
9485 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9486 || (arg0i && (arg0iz = real_zerop (arg0i))))
9487 {
9488 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9489 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9490 if (arg0rz && arg1i && real_zerop (arg1i))
9491 {
9492 tree rp = arg1r ? arg1r
9493 : build1 (REALPART_EXPR, rtype, arg1);
9494 tree ip = arg0i ? arg0i
9495 : build1 (IMAGPART_EXPR, rtype, arg0);
9496 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9497 }
9498 else if (arg0iz && arg1r && real_zerop (arg1r))
9499 {
9500 tree rp = arg0r ? arg0r
9501 : build1 (REALPART_EXPR, rtype, arg0);
9502 tree ip = arg1i ? arg1i
9503 : build1 (IMAGPART_EXPR, rtype, arg1);
9504 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9505 }
9506 }
9507 }
9508
9509 if (flag_unsafe_math_optimizations
9510 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9511 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9512 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9513 return tem;
9514
9515 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9516 We associate floats only if the user has specified
9517 -fassociative-math. */
9518 if (flag_associative_math
9519 && TREE_CODE (arg1) == PLUS_EXPR
9520 && TREE_CODE (arg0) != MULT_EXPR)
9521 {
9522 tree tree10 = TREE_OPERAND (arg1, 0);
9523 tree tree11 = TREE_OPERAND (arg1, 1);
9524 if (TREE_CODE (tree11) == MULT_EXPR
9525 && TREE_CODE (tree10) == MULT_EXPR)
9526 {
9527 tree tree0;
9528 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
9529 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
9530 }
9531 }
9532 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9533 We associate floats only if the user has specified
9534 -fassociative-math. */
9535 if (flag_associative_math
9536 && TREE_CODE (arg0) == PLUS_EXPR
9537 && TREE_CODE (arg1) != MULT_EXPR)
9538 {
9539 tree tree00 = TREE_OPERAND (arg0, 0);
9540 tree tree01 = TREE_OPERAND (arg0, 1);
9541 if (TREE_CODE (tree01) == MULT_EXPR
9542 && TREE_CODE (tree00) == MULT_EXPR)
9543 {
9544 tree tree0;
9545 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
9546 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
9547 }
9548 }
9549 }
9550
9551 bit_rotate:
9552 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9553 is a rotate of A by C1 bits. */
9554 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9555 is a rotate of A by B bits. */
9556 {
9557 enum tree_code code0, code1;
9558 tree rtype;
9559 code0 = TREE_CODE (arg0);
9560 code1 = TREE_CODE (arg1);
9561 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9562 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9563 && operand_equal_p (TREE_OPERAND (arg0, 0),
9564 TREE_OPERAND (arg1, 0), 0)
9565 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9566 TYPE_UNSIGNED (rtype))
9567 /* Only create rotates in complete modes. Other cases are not
9568 expanded properly. */
9569 && (element_precision (rtype)
9570 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
9571 {
9572 tree tree01, tree11;
9573 enum tree_code code01, code11;
9574
9575 tree01 = TREE_OPERAND (arg0, 1);
9576 tree11 = TREE_OPERAND (arg1, 1);
9577 STRIP_NOPS (tree01);
9578 STRIP_NOPS (tree11);
9579 code01 = TREE_CODE (tree01);
9580 code11 = TREE_CODE (tree11);
9581 if (code01 == INTEGER_CST
9582 && code11 == INTEGER_CST
9583 && (wi::to_widest (tree01) + wi::to_widest (tree11)
9584 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9585 {
9586 tem = build2_loc (loc, LROTATE_EXPR,
9587 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9588 TREE_OPERAND (arg0, 0),
9589 code0 == LSHIFT_EXPR
9590 ? TREE_OPERAND (arg0, 1)
9591 : TREE_OPERAND (arg1, 1));
9592 return fold_convert_loc (loc, type, tem);
9593 }
9594 else if (code11 == MINUS_EXPR)
9595 {
9596 tree tree110, tree111;
9597 tree110 = TREE_OPERAND (tree11, 0);
9598 tree111 = TREE_OPERAND (tree11, 1);
9599 STRIP_NOPS (tree110);
9600 STRIP_NOPS (tree111);
9601 if (TREE_CODE (tree110) == INTEGER_CST
9602 && 0 == compare_tree_int (tree110,
9603 element_precision
9604 (TREE_TYPE (TREE_OPERAND
9605 (arg0, 0))))
9606 && operand_equal_p (tree01, tree111, 0))
9607 return
9608 fold_convert_loc (loc, type,
9609 build2 ((code0 == LSHIFT_EXPR
9610 ? LROTATE_EXPR
9611 : RROTATE_EXPR),
9612 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9613 TREE_OPERAND (arg0, 0),
9614 TREE_OPERAND (arg0, 1)));
9615 }
9616 else if (code01 == MINUS_EXPR)
9617 {
9618 tree tree010, tree011;
9619 tree010 = TREE_OPERAND (tree01, 0);
9620 tree011 = TREE_OPERAND (tree01, 1);
9621 STRIP_NOPS (tree010);
9622 STRIP_NOPS (tree011);
9623 if (TREE_CODE (tree010) == INTEGER_CST
9624 && 0 == compare_tree_int (tree010,
9625 element_precision
9626 (TREE_TYPE (TREE_OPERAND
9627 (arg0, 0))))
9628 && operand_equal_p (tree11, tree011, 0))
9629 return fold_convert_loc
9630 (loc, type,
9631 build2 ((code0 != LSHIFT_EXPR
9632 ? LROTATE_EXPR
9633 : RROTATE_EXPR),
9634 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9635 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1)));
9636 }
9637 }
9638 }
9639
9640 associate:
9641 /* In most languages, can't associate operations on floats through
9642 parentheses. Rather than remember where the parentheses were, we
9643 don't associate floats at all, unless the user has specified
9644 -fassociative-math.
9645 And, we need to make sure type is not saturating. */
9646
9647 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9648 && !TYPE_SATURATING (type))
9649 {
9650 tree var0, con0, lit0, minus_lit0;
9651 tree var1, con1, lit1, minus_lit1;
9652 tree atype = type;
9653 bool ok = true;
9654
9655 /* Split both trees into variables, constants, and literals. Then
9656 associate each group together, the constants with literals,
9657 then the result with variables. This increases the chances of
9658 literals being recombined later and of generating relocatable
9659 expressions for the sum of a constant and literal. */
9660 var0 = split_tree (loc, arg0, type, code,
9661 &con0, &lit0, &minus_lit0, 0);
9662 var1 = split_tree (loc, arg1, type, code,
9663 &con1, &lit1, &minus_lit1, code == MINUS_EXPR);
9664
9665 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9666 if (code == MINUS_EXPR)
9667 code = PLUS_EXPR;
9668
9669 /* With undefined overflow prefer doing association in a type
9670 which wraps on overflow, if that is one of the operand types. */
9671 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9672 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9673 {
9674 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9675 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
9676 atype = TREE_TYPE (arg0);
9677 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9678 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
9679 atype = TREE_TYPE (arg1);
9680 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
9681 }
9682
9683 /* With undefined overflow we can only associate constants with one
9684 variable, and constants whose association doesn't overflow. */
9685 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9686 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
9687 {
9688 if (var0 && var1)
9689 {
9690 tree tmp0 = var0;
9691 tree tmp1 = var1;
9692 bool one_neg = false;
9693
9694 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9695 {
9696 tmp0 = TREE_OPERAND (tmp0, 0);
9697 one_neg = !one_neg;
9698 }
9699 if (CONVERT_EXPR_P (tmp0)
9700 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9701 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9702 <= TYPE_PRECISION (atype)))
9703 tmp0 = TREE_OPERAND (tmp0, 0);
9704 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9705 {
9706 tmp1 = TREE_OPERAND (tmp1, 0);
9707 one_neg = !one_neg;
9708 }
9709 if (CONVERT_EXPR_P (tmp1)
9710 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9711 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9712 <= TYPE_PRECISION (atype)))
9713 tmp1 = TREE_OPERAND (tmp1, 0);
9714 /* The only case we can still associate with two variables
9715 is if they cancel out. */
9716 if (!one_neg
9717 || !operand_equal_p (tmp0, tmp1, 0))
9718 ok = false;
9719 }
9720 }
9721
9722 /* Only do something if we found more than two objects. Otherwise,
9723 nothing has changed and we risk infinite recursion. */
9724 if (ok
9725 && (2 < ((var0 != 0) + (var1 != 0)
9726 + (con0 != 0) + (con1 != 0)
9727 + (lit0 != 0) + (lit1 != 0)
9728 + (minus_lit0 != 0) + (minus_lit1 != 0))))
9729 {
9730 bool any_overflows = false;
9731 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
9732 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
9733 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
9734 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
9735 var0 = associate_trees (loc, var0, var1, code, atype);
9736 con0 = associate_trees (loc, con0, con1, code, atype);
9737 lit0 = associate_trees (loc, lit0, lit1, code, atype);
9738 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
9739 code, atype);
9740
9741 /* Preserve the MINUS_EXPR if the negative part of the literal is
9742 greater than the positive part. Otherwise, the multiplicative
9743 folding code (i.e extract_muldiv) may be fooled in case
9744 unsigned constants are subtracted, like in the following
9745 example: ((X*2 + 4) - 8U)/2. */
9746 if (minus_lit0 && lit0)
9747 {
9748 if (TREE_CODE (lit0) == INTEGER_CST
9749 && TREE_CODE (minus_lit0) == INTEGER_CST
9750 && tree_int_cst_lt (lit0, minus_lit0))
9751 {
9752 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
9753 MINUS_EXPR, atype);
9754 lit0 = 0;
9755 }
9756 else
9757 {
9758 lit0 = associate_trees (loc, lit0, minus_lit0,
9759 MINUS_EXPR, atype);
9760 minus_lit0 = 0;
9761 }
9762 }
9763
9764 /* Don't introduce overflows through reassociation. */
9765 if (!any_overflows
9766 && ((lit0 && TREE_OVERFLOW_P (lit0))
9767 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0))))
9768 return NULL_TREE;
9769
9770 if (minus_lit0)
9771 {
9772 if (con0 == 0)
9773 return
9774 fold_convert_loc (loc, type,
9775 associate_trees (loc, var0, minus_lit0,
9776 MINUS_EXPR, atype));
9777 else
9778 {
9779 con0 = associate_trees (loc, con0, minus_lit0,
9780 MINUS_EXPR, atype);
9781 return
9782 fold_convert_loc (loc, type,
9783 associate_trees (loc, var0, con0,
9784 PLUS_EXPR, atype));
9785 }
9786 }
9787
9788 con0 = associate_trees (loc, con0, lit0, code, atype);
9789 return
9790 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
9791 code, atype));
9792 }
9793 }
9794
9795 return NULL_TREE;
9796
9797 case MINUS_EXPR:
9798 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9799 if (TREE_CODE (arg0) == NEGATE_EXPR
9800 && negate_expr_p (op1)
9801 && reorder_operands_p (arg0, arg1))
9802 return fold_build2_loc (loc, MINUS_EXPR, type,
9803 negate_expr (op1),
9804 fold_convert_loc (loc, type,
9805 TREE_OPERAND (arg0, 0)));
9806
9807 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
9808 __complex__ ( x, -y ). This is not the same for SNaNs or if
9809 signed zeros are involved. */
9810 if (!HONOR_SNANS (element_mode (arg0))
9811 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9812 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9813 {
9814 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9815 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9816 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9817 bool arg0rz = false, arg0iz = false;
9818 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9819 || (arg0i && (arg0iz = real_zerop (arg0i))))
9820 {
9821 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9822 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9823 if (arg0rz && arg1i && real_zerop (arg1i))
9824 {
9825 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9826 arg1r ? arg1r
9827 : build1 (REALPART_EXPR, rtype, arg1));
9828 tree ip = arg0i ? arg0i
9829 : build1 (IMAGPART_EXPR, rtype, arg0);
9830 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9831 }
9832 else if (arg0iz && arg1r && real_zerop (arg1r))
9833 {
9834 tree rp = arg0r ? arg0r
9835 : build1 (REALPART_EXPR, rtype, arg0);
9836 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9837 arg1i ? arg1i
9838 : build1 (IMAGPART_EXPR, rtype, arg1));
9839 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9840 }
9841 }
9842 }
9843
9844 /* A - B -> A + (-B) if B is easily negatable. */
9845 if (negate_expr_p (op1)
9846 && ! TYPE_OVERFLOW_SANITIZED (type)
9847 && ((FLOAT_TYPE_P (type)
9848 /* Avoid this transformation if B is a positive REAL_CST. */
9849 && (TREE_CODE (op1) != REAL_CST
9850 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1))))
9851 || INTEGRAL_TYPE_P (type)))
9852 return fold_build2_loc (loc, PLUS_EXPR, type,
9853 fold_convert_loc (loc, type, arg0),
9854 negate_expr (op1));
9855
9856 /* Fold &a[i] - &a[j] to i-j. */
9857 if (TREE_CODE (arg0) == ADDR_EXPR
9858 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9859 && TREE_CODE (arg1) == ADDR_EXPR
9860 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9861 {
9862 tree tem = fold_addr_of_array_ref_difference (loc, type,
9863 TREE_OPERAND (arg0, 0),
9864 TREE_OPERAND (arg1, 0));
9865 if (tem)
9866 return tem;
9867 }
9868
9869 if (FLOAT_TYPE_P (type)
9870 && flag_unsafe_math_optimizations
9871 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9872 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9873 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9874 return tem;
9875
9876 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
9877 one. Make sure the type is not saturating and has the signedness of
9878 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9879 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9880 if ((TREE_CODE (arg0) == MULT_EXPR
9881 || TREE_CODE (arg1) == MULT_EXPR)
9882 && !TYPE_SATURATING (type)
9883 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9884 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9885 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9886 {
9887 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9888 if (tem)
9889 return tem;
9890 }
9891
9892 goto associate;
9893
9894 case MULT_EXPR:
9895 if (! FLOAT_TYPE_P (type))
9896 {
9897 /* Transform x * -C into -x * C if x is easily negatable. */
9898 if (TREE_CODE (op1) == INTEGER_CST
9899 && tree_int_cst_sgn (op1) == -1
9900 && negate_expr_p (op0)
9901 && (tem = negate_expr (op1)) != op1
9902 && ! TREE_OVERFLOW (tem))
9903 return fold_build2_loc (loc, MULT_EXPR, type,
9904 fold_convert_loc (loc, type,
9905 negate_expr (op0)), tem);
9906
9907 /* (A + A) * C -> A * 2 * C */
9908 if (TREE_CODE (arg0) == PLUS_EXPR
9909 && TREE_CODE (arg1) == INTEGER_CST
9910 && operand_equal_p (TREE_OPERAND (arg0, 0),
9911 TREE_OPERAND (arg0, 1), 0))
9912 return fold_build2_loc (loc, MULT_EXPR, type,
9913 omit_one_operand_loc (loc, type,
9914 TREE_OPERAND (arg0, 0),
9915 TREE_OPERAND (arg0, 1)),
9916 fold_build2_loc (loc, MULT_EXPR, type,
9917 build_int_cst (type, 2) , arg1));
9918
9919 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
9920 sign-changing only. */
9921 if (TREE_CODE (arg1) == INTEGER_CST
9922 && TREE_CODE (arg0) == EXACT_DIV_EXPR
9923 && operand_equal_p (arg1, TREE_OPERAND (arg0, 1), 0))
9924 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
9925
9926 strict_overflow_p = false;
9927 if (TREE_CODE (arg1) == INTEGER_CST
9928 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
9929 &strict_overflow_p)))
9930 {
9931 if (strict_overflow_p)
9932 fold_overflow_warning (("assuming signed overflow does not "
9933 "occur when simplifying "
9934 "multiplication"),
9935 WARN_STRICT_OVERFLOW_MISC);
9936 return fold_convert_loc (loc, type, tem);
9937 }
9938
9939 /* Optimize z * conj(z) for integer complex numbers. */
9940 if (TREE_CODE (arg0) == CONJ_EXPR
9941 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9942 return fold_mult_zconjz (loc, type, arg1);
9943 if (TREE_CODE (arg1) == CONJ_EXPR
9944 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9945 return fold_mult_zconjz (loc, type, arg0);
9946 }
9947 else
9948 {
9949 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
9950 This is not the same for NaNs or if signed zeros are
9951 involved. */
9952 if (!HONOR_NANS (arg0)
9953 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9954 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9955 && TREE_CODE (arg1) == COMPLEX_CST
9956 && real_zerop (TREE_REALPART (arg1)))
9957 {
9958 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9959 if (real_onep (TREE_IMAGPART (arg1)))
9960 return
9961 fold_build2_loc (loc, COMPLEX_EXPR, type,
9962 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
9963 rtype, arg0)),
9964 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
9965 else if (real_minus_onep (TREE_IMAGPART (arg1)))
9966 return
9967 fold_build2_loc (loc, COMPLEX_EXPR, type,
9968 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
9969 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
9970 rtype, arg0)));
9971 }
9972
9973 /* Optimize z * conj(z) for floating point complex numbers.
9974 Guarded by flag_unsafe_math_optimizations as non-finite
9975 imaginary components don't produce scalar results. */
9976 if (flag_unsafe_math_optimizations
9977 && TREE_CODE (arg0) == CONJ_EXPR
9978 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9979 return fold_mult_zconjz (loc, type, arg1);
9980 if (flag_unsafe_math_optimizations
9981 && TREE_CODE (arg1) == CONJ_EXPR
9982 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9983 return fold_mult_zconjz (loc, type, arg0);
9984
9985 if (flag_unsafe_math_optimizations)
9986 {
9987
9988 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
9989 if (!in_gimple_form
9990 && optimize
9991 && operand_equal_p (arg0, arg1, 0))
9992 {
9993 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
9994
9995 if (powfn)
9996 {
9997 tree arg = build_real (type, dconst2);
9998 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
9999 }
10000 }
10001 }
10002 }
10003 goto associate;
10004
10005 case BIT_IOR_EXPR:
10006 /* Canonicalize (X & C1) | C2. */
10007 if (TREE_CODE (arg0) == BIT_AND_EXPR
10008 && TREE_CODE (arg1) == INTEGER_CST
10009 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10010 {
10011 int width = TYPE_PRECISION (type), w;
10012 wide_int c1 = TREE_OPERAND (arg0, 1);
10013 wide_int c2 = arg1;
10014
10015 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10016 if ((c1 & c2) == c1)
10017 return omit_one_operand_loc (loc, type, arg1,
10018 TREE_OPERAND (arg0, 0));
10019
10020 wide_int msk = wi::mask (width, false,
10021 TYPE_PRECISION (TREE_TYPE (arg1)));
10022
10023 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10024 if (msk.and_not (c1 | c2) == 0)
10025 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10026 TREE_OPERAND (arg0, 0), arg1);
10027
10028 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10029 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10030 mode which allows further optimizations. */
10031 c1 &= msk;
10032 c2 &= msk;
10033 wide_int c3 = c1.and_not (c2);
10034 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
10035 {
10036 wide_int mask = wi::mask (w, false,
10037 TYPE_PRECISION (type));
10038 if (((c1 | c2) & mask) == mask && c1.and_not (mask) == 0)
10039 {
10040 c3 = mask;
10041 break;
10042 }
10043 }
10044
10045 if (c3 != c1)
10046 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10047 fold_build2_loc (loc, BIT_AND_EXPR, type,
10048 TREE_OPERAND (arg0, 0),
10049 wide_int_to_tree (type,
10050 c3)),
10051 arg1);
10052 }
10053
10054 /* See if this can be simplified into a rotate first. If that
10055 is unsuccessful continue in the association code. */
10056 goto bit_rotate;
10057
10058 case BIT_XOR_EXPR:
10059 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10060 if (TREE_CODE (arg0) == BIT_AND_EXPR
10061 && INTEGRAL_TYPE_P (type)
10062 && integer_onep (TREE_OPERAND (arg0, 1))
10063 && integer_onep (arg1))
10064 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
10065 build_zero_cst (TREE_TYPE (arg0)));
10066
10067 /* See if this can be simplified into a rotate first. If that
10068 is unsuccessful continue in the association code. */
10069 goto bit_rotate;
10070
10071 case BIT_AND_EXPR:
10072 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10073 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10074 && INTEGRAL_TYPE_P (type)
10075 && integer_onep (TREE_OPERAND (arg0, 1))
10076 && integer_onep (arg1))
10077 {
10078 tree tem2;
10079 tem = TREE_OPERAND (arg0, 0);
10080 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
10081 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
10082 tem, tem2);
10083 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
10084 build_zero_cst (TREE_TYPE (tem)));
10085 }
10086 /* Fold ~X & 1 as (X & 1) == 0. */
10087 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10088 && INTEGRAL_TYPE_P (type)
10089 && integer_onep (arg1))
10090 {
10091 tree tem2;
10092 tem = TREE_OPERAND (arg0, 0);
10093 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
10094 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
10095 tem, tem2);
10096 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
10097 build_zero_cst (TREE_TYPE (tem)));
10098 }
10099 /* Fold !X & 1 as X == 0. */
10100 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10101 && integer_onep (arg1))
10102 {
10103 tem = TREE_OPERAND (arg0, 0);
10104 return fold_build2_loc (loc, EQ_EXPR, type, tem,
10105 build_zero_cst (TREE_TYPE (tem)));
10106 }
10107
10108 /* Fold (X ^ Y) & Y as ~X & Y. */
10109 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10110 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10111 {
10112 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10113 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10114 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10115 fold_convert_loc (loc, type, arg1));
10116 }
10117 /* Fold (X ^ Y) & X as ~Y & X. */
10118 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10119 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10120 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10121 {
10122 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10123 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10124 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10125 fold_convert_loc (loc, type, arg1));
10126 }
10127 /* Fold X & (X ^ Y) as X & ~Y. */
10128 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10129 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10130 {
10131 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10132 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10133 fold_convert_loc (loc, type, arg0),
10134 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
10135 }
10136 /* Fold X & (Y ^ X) as ~Y & X. */
10137 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10138 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10139 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10140 {
10141 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10142 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10143 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10144 fold_convert_loc (loc, type, arg0));
10145 }
10146
10147 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
10148 multiple of 1 << CST. */
10149 if (TREE_CODE (arg1) == INTEGER_CST)
10150 {
10151 wide_int cst1 = arg1;
10152 wide_int ncst1 = -cst1;
10153 if ((cst1 & ncst1) == ncst1
10154 && multiple_of_p (type, arg0,
10155 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
10156 return fold_convert_loc (loc, type, arg0);
10157 }
10158
10159 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
10160 bits from CST2. */
10161 if (TREE_CODE (arg1) == INTEGER_CST
10162 && TREE_CODE (arg0) == MULT_EXPR
10163 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10164 {
10165 wide_int warg1 = arg1;
10166 wide_int masked = mask_with_tz (type, warg1, TREE_OPERAND (arg0, 1));
10167
10168 if (masked == 0)
10169 return omit_two_operands_loc (loc, type, build_zero_cst (type),
10170 arg0, arg1);
10171 else if (masked != warg1)
10172 {
10173 /* Avoid the transform if arg1 is a mask of some
10174 mode which allows further optimizations. */
10175 int pop = wi::popcount (warg1);
10176 if (!(pop >= BITS_PER_UNIT
10177 && exact_log2 (pop) != -1
10178 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
10179 return fold_build2_loc (loc, code, type, op0,
10180 wide_int_to_tree (type, masked));
10181 }
10182 }
10183
10184 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
10185 ((A & N) + B) & M -> (A + B) & M
10186 Similarly if (N & M) == 0,
10187 ((A | N) + B) & M -> (A + B) & M
10188 and for - instead of + (or unary - instead of +)
10189 and/or ^ instead of |.
10190 If B is constant and (B & M) == 0, fold into A & M. */
10191 if (TREE_CODE (arg1) == INTEGER_CST)
10192 {
10193 wide_int cst1 = arg1;
10194 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
10195 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10196 && (TREE_CODE (arg0) == PLUS_EXPR
10197 || TREE_CODE (arg0) == MINUS_EXPR
10198 || TREE_CODE (arg0) == NEGATE_EXPR)
10199 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
10200 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
10201 {
10202 tree pmop[2];
10203 int which = 0;
10204 wide_int cst0;
10205
10206 /* Now we know that arg0 is (C + D) or (C - D) or
10207 -C and arg1 (M) is == (1LL << cst) - 1.
10208 Store C into PMOP[0] and D into PMOP[1]. */
10209 pmop[0] = TREE_OPERAND (arg0, 0);
10210 pmop[1] = NULL;
10211 if (TREE_CODE (arg0) != NEGATE_EXPR)
10212 {
10213 pmop[1] = TREE_OPERAND (arg0, 1);
10214 which = 1;
10215 }
10216
10217 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
10218 which = -1;
10219
10220 for (; which >= 0; which--)
10221 switch (TREE_CODE (pmop[which]))
10222 {
10223 case BIT_AND_EXPR:
10224 case BIT_IOR_EXPR:
10225 case BIT_XOR_EXPR:
10226 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
10227 != INTEGER_CST)
10228 break;
10229 cst0 = TREE_OPERAND (pmop[which], 1);
10230 cst0 &= cst1;
10231 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
10232 {
10233 if (cst0 != cst1)
10234 break;
10235 }
10236 else if (cst0 != 0)
10237 break;
10238 /* If C or D is of the form (A & N) where
10239 (N & M) == M, or of the form (A | N) or
10240 (A ^ N) where (N & M) == 0, replace it with A. */
10241 pmop[which] = TREE_OPERAND (pmop[which], 0);
10242 break;
10243 case INTEGER_CST:
10244 /* If C or D is a N where (N & M) == 0, it can be
10245 omitted (assumed 0). */
10246 if ((TREE_CODE (arg0) == PLUS_EXPR
10247 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
10248 && (cst1 & pmop[which]) == 0)
10249 pmop[which] = NULL;
10250 break;
10251 default:
10252 break;
10253 }
10254
10255 /* Only build anything new if we optimized one or both arguments
10256 above. */
10257 if (pmop[0] != TREE_OPERAND (arg0, 0)
10258 || (TREE_CODE (arg0) != NEGATE_EXPR
10259 && pmop[1] != TREE_OPERAND (arg0, 1)))
10260 {
10261 tree utype = TREE_TYPE (arg0);
10262 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10263 {
10264 /* Perform the operations in a type that has defined
10265 overflow behavior. */
10266 utype = unsigned_type_for (TREE_TYPE (arg0));
10267 if (pmop[0] != NULL)
10268 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
10269 if (pmop[1] != NULL)
10270 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
10271 }
10272
10273 if (TREE_CODE (arg0) == NEGATE_EXPR)
10274 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
10275 else if (TREE_CODE (arg0) == PLUS_EXPR)
10276 {
10277 if (pmop[0] != NULL && pmop[1] != NULL)
10278 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
10279 pmop[0], pmop[1]);
10280 else if (pmop[0] != NULL)
10281 tem = pmop[0];
10282 else if (pmop[1] != NULL)
10283 tem = pmop[1];
10284 else
10285 return build_int_cst (type, 0);
10286 }
10287 else if (pmop[0] == NULL)
10288 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
10289 else
10290 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
10291 pmop[0], pmop[1]);
10292 /* TEM is now the new binary +, - or unary - replacement. */
10293 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
10294 fold_convert_loc (loc, utype, arg1));
10295 return fold_convert_loc (loc, type, tem);
10296 }
10297 }
10298 }
10299
10300 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10301 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10302 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10303 {
10304 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10305
10306 wide_int mask = wide_int::from (arg1, prec, UNSIGNED);
10307 if (mask == -1)
10308 return
10309 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10310 }
10311
10312 goto associate;
10313
10314 case RDIV_EXPR:
10315 /* Don't touch a floating-point divide by zero unless the mode
10316 of the constant can represent infinity. */
10317 if (TREE_CODE (arg1) == REAL_CST
10318 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10319 && real_zerop (arg1))
10320 return NULL_TREE;
10321
10322 /* (-A) / (-B) -> A / B */
10323 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10324 return fold_build2_loc (loc, RDIV_EXPR, type,
10325 TREE_OPERAND (arg0, 0),
10326 negate_expr (arg1));
10327 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10328 return fold_build2_loc (loc, RDIV_EXPR, type,
10329 negate_expr (arg0),
10330 TREE_OPERAND (arg1, 0));
10331 return NULL_TREE;
10332
10333 case TRUNC_DIV_EXPR:
10334 /* Fall through */
10335
10336 case FLOOR_DIV_EXPR:
10337 /* Simplify A / (B << N) where A and B are positive and B is
10338 a power of 2, to A >> (N + log2(B)). */
10339 strict_overflow_p = false;
10340 if (TREE_CODE (arg1) == LSHIFT_EXPR
10341 && (TYPE_UNSIGNED (type)
10342 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
10343 {
10344 tree sval = TREE_OPERAND (arg1, 0);
10345 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10346 {
10347 tree sh_cnt = TREE_OPERAND (arg1, 1);
10348 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
10349 wi::exact_log2 (sval));
10350
10351 if (strict_overflow_p)
10352 fold_overflow_warning (("assuming signed overflow does not "
10353 "occur when simplifying A / (B << N)"),
10354 WARN_STRICT_OVERFLOW_MISC);
10355
10356 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
10357 sh_cnt, pow2);
10358 return fold_build2_loc (loc, RSHIFT_EXPR, type,
10359 fold_convert_loc (loc, type, arg0), sh_cnt);
10360 }
10361 }
10362
10363 /* Fall through */
10364
10365 case ROUND_DIV_EXPR:
10366 case CEIL_DIV_EXPR:
10367 case EXACT_DIV_EXPR:
10368 if (integer_zerop (arg1))
10369 return NULL_TREE;
10370
10371 /* Convert -A / -B to A / B when the type is signed and overflow is
10372 undefined. */
10373 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10374 && TREE_CODE (arg0) == NEGATE_EXPR
10375 && negate_expr_p (op1))
10376 {
10377 if (INTEGRAL_TYPE_P (type))
10378 fold_overflow_warning (("assuming signed overflow does not occur "
10379 "when distributing negation across "
10380 "division"),
10381 WARN_STRICT_OVERFLOW_MISC);
10382 return fold_build2_loc (loc, code, type,
10383 fold_convert_loc (loc, type,
10384 TREE_OPERAND (arg0, 0)),
10385 negate_expr (op1));
10386 }
10387 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10388 && TREE_CODE (arg1) == NEGATE_EXPR
10389 && negate_expr_p (op0))
10390 {
10391 if (INTEGRAL_TYPE_P (type))
10392 fold_overflow_warning (("assuming signed overflow does not occur "
10393 "when distributing negation across "
10394 "division"),
10395 WARN_STRICT_OVERFLOW_MISC);
10396 return fold_build2_loc (loc, code, type,
10397 negate_expr (op0),
10398 fold_convert_loc (loc, type,
10399 TREE_OPERAND (arg1, 0)));
10400 }
10401
10402 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10403 operation, EXACT_DIV_EXPR.
10404
10405 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10406 At one time others generated faster code, it's not clear if they do
10407 after the last round to changes to the DIV code in expmed.c. */
10408 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10409 && multiple_of_p (type, arg0, arg1))
10410 return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
10411 fold_convert (type, arg0),
10412 fold_convert (type, arg1));
10413
10414 strict_overflow_p = false;
10415 if (TREE_CODE (arg1) == INTEGER_CST
10416 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10417 &strict_overflow_p)))
10418 {
10419 if (strict_overflow_p)
10420 fold_overflow_warning (("assuming signed overflow does not occur "
10421 "when simplifying division"),
10422 WARN_STRICT_OVERFLOW_MISC);
10423 return fold_convert_loc (loc, type, tem);
10424 }
10425
10426 return NULL_TREE;
10427
10428 case CEIL_MOD_EXPR:
10429 case FLOOR_MOD_EXPR:
10430 case ROUND_MOD_EXPR:
10431 case TRUNC_MOD_EXPR:
10432 strict_overflow_p = false;
10433 if (TREE_CODE (arg1) == INTEGER_CST
10434 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10435 &strict_overflow_p)))
10436 {
10437 if (strict_overflow_p)
10438 fold_overflow_warning (("assuming signed overflow does not occur "
10439 "when simplifying modulus"),
10440 WARN_STRICT_OVERFLOW_MISC);
10441 return fold_convert_loc (loc, type, tem);
10442 }
10443
10444 return NULL_TREE;
10445
10446 case LROTATE_EXPR:
10447 case RROTATE_EXPR:
10448 case RSHIFT_EXPR:
10449 case LSHIFT_EXPR:
10450 /* Since negative shift count is not well-defined,
10451 don't try to compute it in the compiler. */
10452 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10453 return NULL_TREE;
10454
10455 prec = element_precision (type);
10456
10457 /* If we have a rotate of a bit operation with the rotate count and
10458 the second operand of the bit operation both constant,
10459 permute the two operations. */
10460 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10461 && (TREE_CODE (arg0) == BIT_AND_EXPR
10462 || TREE_CODE (arg0) == BIT_IOR_EXPR
10463 || TREE_CODE (arg0) == BIT_XOR_EXPR)
10464 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10465 return fold_build2_loc (loc, TREE_CODE (arg0), type,
10466 fold_build2_loc (loc, code, type,
10467 TREE_OPERAND (arg0, 0), arg1),
10468 fold_build2_loc (loc, code, type,
10469 TREE_OPERAND (arg0, 1), arg1));
10470
10471 /* Two consecutive rotates adding up to the some integer
10472 multiple of the precision of the type can be ignored. */
10473 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10474 && TREE_CODE (arg0) == RROTATE_EXPR
10475 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10476 && wi::umod_trunc (wi::add (arg1, TREE_OPERAND (arg0, 1)),
10477 prec) == 0)
10478 return TREE_OPERAND (arg0, 0);
10479
10480 return NULL_TREE;
10481
10482 case MIN_EXPR:
10483 case MAX_EXPR:
10484 goto associate;
10485
10486 case TRUTH_ANDIF_EXPR:
10487 /* Note that the operands of this must be ints
10488 and their values must be 0 or 1.
10489 ("true" is a fixed value perhaps depending on the language.) */
10490 /* If first arg is constant zero, return it. */
10491 if (integer_zerop (arg0))
10492 return fold_convert_loc (loc, type, arg0);
10493 case TRUTH_AND_EXPR:
10494 /* If either arg is constant true, drop it. */
10495 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10496 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10497 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10498 /* Preserve sequence points. */
10499 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10500 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10501 /* If second arg is constant zero, result is zero, but first arg
10502 must be evaluated. */
10503 if (integer_zerop (arg1))
10504 return omit_one_operand_loc (loc, type, arg1, arg0);
10505 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10506 case will be handled here. */
10507 if (integer_zerop (arg0))
10508 return omit_one_operand_loc (loc, type, arg0, arg1);
10509
10510 /* !X && X is always false. */
10511 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10512 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10513 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
10514 /* X && !X is always false. */
10515 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10516 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10517 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10518
10519 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10520 means A >= Y && A != MAX, but in this case we know that
10521 A < X <= MAX. */
10522
10523 if (!TREE_SIDE_EFFECTS (arg0)
10524 && !TREE_SIDE_EFFECTS (arg1))
10525 {
10526 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
10527 if (tem && !operand_equal_p (tem, arg0, 0))
10528 return fold_build2_loc (loc, code, type, tem, arg1);
10529
10530 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
10531 if (tem && !operand_equal_p (tem, arg1, 0))
10532 return fold_build2_loc (loc, code, type, arg0, tem);
10533 }
10534
10535 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10536 != NULL_TREE)
10537 return tem;
10538
10539 return NULL_TREE;
10540
10541 case TRUTH_ORIF_EXPR:
10542 /* Note that the operands of this must be ints
10543 and their values must be 0 or true.
10544 ("true" is a fixed value perhaps depending on the language.) */
10545 /* If first arg is constant true, return it. */
10546 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10547 return fold_convert_loc (loc, type, arg0);
10548 case TRUTH_OR_EXPR:
10549 /* If either arg is constant zero, drop it. */
10550 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10551 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10552 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10553 /* Preserve sequence points. */
10554 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10555 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10556 /* If second arg is constant true, result is true, but we must
10557 evaluate first arg. */
10558 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10559 return omit_one_operand_loc (loc, type, arg1, arg0);
10560 /* Likewise for first arg, but note this only occurs here for
10561 TRUTH_OR_EXPR. */
10562 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10563 return omit_one_operand_loc (loc, type, arg0, arg1);
10564
10565 /* !X || X is always true. */
10566 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10567 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10568 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10569 /* X || !X is always true. */
10570 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10571 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10572 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10573
10574 /* (X && !Y) || (!X && Y) is X ^ Y */
10575 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
10576 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
10577 {
10578 tree a0, a1, l0, l1, n0, n1;
10579
10580 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10581 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10582
10583 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10584 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10585
10586 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
10587 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
10588
10589 if ((operand_equal_p (n0, a0, 0)
10590 && operand_equal_p (n1, a1, 0))
10591 || (operand_equal_p (n0, a1, 0)
10592 && operand_equal_p (n1, a0, 0)))
10593 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
10594 }
10595
10596 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10597 != NULL_TREE)
10598 return tem;
10599
10600 return NULL_TREE;
10601
10602 case TRUTH_XOR_EXPR:
10603 /* If the second arg is constant zero, drop it. */
10604 if (integer_zerop (arg1))
10605 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10606 /* If the second arg is constant true, this is a logical inversion. */
10607 if (integer_onep (arg1))
10608 {
10609 tem = invert_truthvalue_loc (loc, arg0);
10610 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
10611 }
10612 /* Identical arguments cancel to zero. */
10613 if (operand_equal_p (arg0, arg1, 0))
10614 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10615
10616 /* !X ^ X is always true. */
10617 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10618 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10619 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10620
10621 /* X ^ !X is always true. */
10622 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10623 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10624 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10625
10626 return NULL_TREE;
10627
10628 case EQ_EXPR:
10629 case NE_EXPR:
10630 STRIP_NOPS (arg0);
10631 STRIP_NOPS (arg1);
10632
10633 tem = fold_comparison (loc, code, type, op0, op1);
10634 if (tem != NULL_TREE)
10635 return tem;
10636
10637 /* bool_var != 1 becomes !bool_var. */
10638 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10639 && code == NE_EXPR)
10640 return fold_convert_loc (loc, type,
10641 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10642 TREE_TYPE (arg0), arg0));
10643
10644 /* bool_var == 0 becomes !bool_var. */
10645 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10646 && code == EQ_EXPR)
10647 return fold_convert_loc (loc, type,
10648 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10649 TREE_TYPE (arg0), arg0));
10650
10651 /* !exp != 0 becomes !exp */
10652 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
10653 && code == NE_EXPR)
10654 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10655
10656 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
10657 if ((TREE_CODE (arg0) == PLUS_EXPR
10658 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
10659 || TREE_CODE (arg0) == MINUS_EXPR)
10660 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
10661 0)),
10662 arg1, 0)
10663 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10664 || POINTER_TYPE_P (TREE_TYPE (arg0))))
10665 {
10666 tree val = TREE_OPERAND (arg0, 1);
10667 val = fold_build2_loc (loc, code, type, val,
10668 build_int_cst (TREE_TYPE (val), 0));
10669 return omit_two_operands_loc (loc, type, val,
10670 TREE_OPERAND (arg0, 0), arg1);
10671 }
10672
10673 /* Transform comparisons of the form X CMP X +- Y to Y CMP 0. */
10674 if ((TREE_CODE (arg1) == PLUS_EXPR
10675 || TREE_CODE (arg1) == POINTER_PLUS_EXPR
10676 || TREE_CODE (arg1) == MINUS_EXPR)
10677 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg1,
10678 0)),
10679 arg0, 0)
10680 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10681 || POINTER_TYPE_P (TREE_TYPE (arg1))))
10682 {
10683 tree val = TREE_OPERAND (arg1, 1);
10684 val = fold_build2_loc (loc, code, type, val,
10685 build_int_cst (TREE_TYPE (val), 0));
10686 return omit_two_operands_loc (loc, type, val,
10687 TREE_OPERAND (arg1, 0), arg0);
10688 }
10689
10690 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
10691 if (TREE_CODE (arg0) == MINUS_EXPR
10692 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
10693 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
10694 1)),
10695 arg1, 0)
10696 && wi::extract_uhwi (TREE_OPERAND (arg0, 0), 0, 1) == 1)
10697 return omit_two_operands_loc (loc, type,
10698 code == NE_EXPR
10699 ? boolean_true_node : boolean_false_node,
10700 TREE_OPERAND (arg0, 1), arg1);
10701
10702 /* Transform comparisons of the form X CMP C - X if C % 2 == 1. */
10703 if (TREE_CODE (arg1) == MINUS_EXPR
10704 && TREE_CODE (TREE_OPERAND (arg1, 0)) == INTEGER_CST
10705 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg1,
10706 1)),
10707 arg0, 0)
10708 && wi::extract_uhwi (TREE_OPERAND (arg1, 0), 0, 1) == 1)
10709 return omit_two_operands_loc (loc, type,
10710 code == NE_EXPR
10711 ? boolean_true_node : boolean_false_node,
10712 TREE_OPERAND (arg1, 1), arg0);
10713
10714 /* If this is an EQ or NE comparison with zero and ARG0 is
10715 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10716 two operations, but the latter can be done in one less insn
10717 on machines that have only two-operand insns or on which a
10718 constant cannot be the first operand. */
10719 if (TREE_CODE (arg0) == BIT_AND_EXPR
10720 && integer_zerop (arg1))
10721 {
10722 tree arg00 = TREE_OPERAND (arg0, 0);
10723 tree arg01 = TREE_OPERAND (arg0, 1);
10724 if (TREE_CODE (arg00) == LSHIFT_EXPR
10725 && integer_onep (TREE_OPERAND (arg00, 0)))
10726 {
10727 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
10728 arg01, TREE_OPERAND (arg00, 1));
10729 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10730 build_int_cst (TREE_TYPE (arg0), 1));
10731 return fold_build2_loc (loc, code, type,
10732 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10733 arg1);
10734 }
10735 else if (TREE_CODE (arg01) == LSHIFT_EXPR
10736 && integer_onep (TREE_OPERAND (arg01, 0)))
10737 {
10738 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
10739 arg00, TREE_OPERAND (arg01, 1));
10740 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10741 build_int_cst (TREE_TYPE (arg0), 1));
10742 return fold_build2_loc (loc, code, type,
10743 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10744 arg1);
10745 }
10746 }
10747
10748 /* If this is an NE or EQ comparison of zero against the result of a
10749 signed MOD operation whose second operand is a power of 2, make
10750 the MOD operation unsigned since it is simpler and equivalent. */
10751 if (integer_zerop (arg1)
10752 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10753 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10754 || TREE_CODE (arg0) == CEIL_MOD_EXPR
10755 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10756 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10757 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10758 {
10759 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
10760 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
10761 fold_convert_loc (loc, newtype,
10762 TREE_OPERAND (arg0, 0)),
10763 fold_convert_loc (loc, newtype,
10764 TREE_OPERAND (arg0, 1)));
10765
10766 return fold_build2_loc (loc, code, type, newmod,
10767 fold_convert_loc (loc, newtype, arg1));
10768 }
10769
10770 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10771 C1 is a valid shift constant, and C2 is a power of two, i.e.
10772 a single bit. */
10773 if (TREE_CODE (arg0) == BIT_AND_EXPR
10774 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10775 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10776 == INTEGER_CST
10777 && integer_pow2p (TREE_OPERAND (arg0, 1))
10778 && integer_zerop (arg1))
10779 {
10780 tree itype = TREE_TYPE (arg0);
10781 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10782 prec = TYPE_PRECISION (itype);
10783
10784 /* Check for a valid shift count. */
10785 if (wi::ltu_p (arg001, prec))
10786 {
10787 tree arg01 = TREE_OPERAND (arg0, 1);
10788 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10789 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10790 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10791 can be rewritten as (X & (C2 << C1)) != 0. */
10792 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
10793 {
10794 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
10795 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
10796 return fold_build2_loc (loc, code, type, tem,
10797 fold_convert_loc (loc, itype, arg1));
10798 }
10799 /* Otherwise, for signed (arithmetic) shifts,
10800 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10801 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10802 else if (!TYPE_UNSIGNED (itype))
10803 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10804 arg000, build_int_cst (itype, 0));
10805 /* Otherwise, of unsigned (logical) shifts,
10806 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10807 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10808 else
10809 return omit_one_operand_loc (loc, type,
10810 code == EQ_EXPR ? integer_one_node
10811 : integer_zero_node,
10812 arg000);
10813 }
10814 }
10815
10816 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
10817 Similarly for NE_EXPR. */
10818 if (TREE_CODE (arg0) == BIT_AND_EXPR
10819 && TREE_CODE (arg1) == INTEGER_CST
10820 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10821 {
10822 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
10823 TREE_TYPE (TREE_OPERAND (arg0, 1)),
10824 TREE_OPERAND (arg0, 1));
10825 tree dandnotc
10826 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
10827 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
10828 notc);
10829 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10830 if (integer_nonzerop (dandnotc))
10831 return omit_one_operand_loc (loc, type, rslt, arg0);
10832 }
10833
10834 /* If this is a comparison of a field, we may be able to simplify it. */
10835 if ((TREE_CODE (arg0) == COMPONENT_REF
10836 || TREE_CODE (arg0) == BIT_FIELD_REF)
10837 /* Handle the constant case even without -O
10838 to make sure the warnings are given. */
10839 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
10840 {
10841 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
10842 if (t1)
10843 return t1;
10844 }
10845
10846 /* Optimize comparisons of strlen vs zero to a compare of the
10847 first character of the string vs zero. To wit,
10848 strlen(ptr) == 0 => *ptr == 0
10849 strlen(ptr) != 0 => *ptr != 0
10850 Other cases should reduce to one of these two (or a constant)
10851 due to the return value of strlen being unsigned. */
10852 if (TREE_CODE (arg0) == CALL_EXPR
10853 && integer_zerop (arg1))
10854 {
10855 tree fndecl = get_callee_fndecl (arg0);
10856
10857 if (fndecl
10858 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
10859 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
10860 && call_expr_nargs (arg0) == 1
10861 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
10862 {
10863 tree iref = build_fold_indirect_ref_loc (loc,
10864 CALL_EXPR_ARG (arg0, 0));
10865 return fold_build2_loc (loc, code, type, iref,
10866 build_int_cst (TREE_TYPE (iref), 0));
10867 }
10868 }
10869
10870 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10871 of X. Similarly fold (X >> C) == 0 into X >= 0. */
10872 if (TREE_CODE (arg0) == RSHIFT_EXPR
10873 && integer_zerop (arg1)
10874 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10875 {
10876 tree arg00 = TREE_OPERAND (arg0, 0);
10877 tree arg01 = TREE_OPERAND (arg0, 1);
10878 tree itype = TREE_TYPE (arg00);
10879 if (wi::eq_p (arg01, element_precision (itype) - 1))
10880 {
10881 if (TYPE_UNSIGNED (itype))
10882 {
10883 itype = signed_type_for (itype);
10884 arg00 = fold_convert_loc (loc, itype, arg00);
10885 }
10886 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
10887 type, arg00, build_zero_cst (itype));
10888 }
10889 }
10890
10891 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
10892 (X & C) == 0 when C is a single bit. */
10893 if (TREE_CODE (arg0) == BIT_AND_EXPR
10894 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
10895 && integer_zerop (arg1)
10896 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10897 {
10898 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
10899 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
10900 TREE_OPERAND (arg0, 1));
10901 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
10902 type, tem,
10903 fold_convert_loc (loc, TREE_TYPE (arg0),
10904 arg1));
10905 }
10906
10907 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
10908 constant C is a power of two, i.e. a single bit. */
10909 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10910 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10911 && integer_zerop (arg1)
10912 && integer_pow2p (TREE_OPERAND (arg0, 1))
10913 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10914 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10915 {
10916 tree arg00 = TREE_OPERAND (arg0, 0);
10917 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10918 arg00, build_int_cst (TREE_TYPE (arg00), 0));
10919 }
10920
10921 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
10922 when is C is a power of two, i.e. a single bit. */
10923 if (TREE_CODE (arg0) == BIT_AND_EXPR
10924 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
10925 && integer_zerop (arg1)
10926 && integer_pow2p (TREE_OPERAND (arg0, 1))
10927 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10928 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10929 {
10930 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10931 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
10932 arg000, TREE_OPERAND (arg0, 1));
10933 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10934 tem, build_int_cst (TREE_TYPE (tem), 0));
10935 }
10936
10937 if (integer_zerop (arg1)
10938 && tree_expr_nonzero_p (arg0))
10939 {
10940 tree res = constant_boolean_node (code==NE_EXPR, type);
10941 return omit_one_operand_loc (loc, type, res, arg0);
10942 }
10943
10944 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
10945 if (TREE_CODE (arg0) == BIT_AND_EXPR
10946 && TREE_CODE (arg1) == BIT_AND_EXPR)
10947 {
10948 tree arg00 = TREE_OPERAND (arg0, 0);
10949 tree arg01 = TREE_OPERAND (arg0, 1);
10950 tree arg10 = TREE_OPERAND (arg1, 0);
10951 tree arg11 = TREE_OPERAND (arg1, 1);
10952 tree itype = TREE_TYPE (arg0);
10953
10954 if (operand_equal_p (arg01, arg11, 0))
10955 return fold_build2_loc (loc, code, type,
10956 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10957 fold_build2_loc (loc,
10958 BIT_XOR_EXPR, itype,
10959 arg00, arg10),
10960 arg01),
10961 build_zero_cst (itype));
10962
10963 if (operand_equal_p (arg01, arg10, 0))
10964 return fold_build2_loc (loc, code, type,
10965 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10966 fold_build2_loc (loc,
10967 BIT_XOR_EXPR, itype,
10968 arg00, arg11),
10969 arg01),
10970 build_zero_cst (itype));
10971
10972 if (operand_equal_p (arg00, arg11, 0))
10973 return fold_build2_loc (loc, code, type,
10974 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10975 fold_build2_loc (loc,
10976 BIT_XOR_EXPR, itype,
10977 arg01, arg10),
10978 arg00),
10979 build_zero_cst (itype));
10980
10981 if (operand_equal_p (arg00, arg10, 0))
10982 return fold_build2_loc (loc, code, type,
10983 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10984 fold_build2_loc (loc,
10985 BIT_XOR_EXPR, itype,
10986 arg01, arg11),
10987 arg00),
10988 build_zero_cst (itype));
10989 }
10990
10991 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10992 && TREE_CODE (arg1) == BIT_XOR_EXPR)
10993 {
10994 tree arg00 = TREE_OPERAND (arg0, 0);
10995 tree arg01 = TREE_OPERAND (arg0, 1);
10996 tree arg10 = TREE_OPERAND (arg1, 0);
10997 tree arg11 = TREE_OPERAND (arg1, 1);
10998 tree itype = TREE_TYPE (arg0);
10999
11000 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
11001 operand_equal_p guarantees no side-effects so we don't need
11002 to use omit_one_operand on Z. */
11003 if (operand_equal_p (arg01, arg11, 0))
11004 return fold_build2_loc (loc, code, type, arg00,
11005 fold_convert_loc (loc, TREE_TYPE (arg00),
11006 arg10));
11007 if (operand_equal_p (arg01, arg10, 0))
11008 return fold_build2_loc (loc, code, type, arg00,
11009 fold_convert_loc (loc, TREE_TYPE (arg00),
11010 arg11));
11011 if (operand_equal_p (arg00, arg11, 0))
11012 return fold_build2_loc (loc, code, type, arg01,
11013 fold_convert_loc (loc, TREE_TYPE (arg01),
11014 arg10));
11015 if (operand_equal_p (arg00, arg10, 0))
11016 return fold_build2_loc (loc, code, type, arg01,
11017 fold_convert_loc (loc, TREE_TYPE (arg01),
11018 arg11));
11019
11020 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
11021 if (TREE_CODE (arg01) == INTEGER_CST
11022 && TREE_CODE (arg11) == INTEGER_CST)
11023 {
11024 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
11025 fold_convert_loc (loc, itype, arg11));
11026 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
11027 return fold_build2_loc (loc, code, type, tem,
11028 fold_convert_loc (loc, itype, arg10));
11029 }
11030 }
11031
11032 /* Attempt to simplify equality/inequality comparisons of complex
11033 values. Only lower the comparison if the result is known or
11034 can be simplified to a single scalar comparison. */
11035 if ((TREE_CODE (arg0) == COMPLEX_EXPR
11036 || TREE_CODE (arg0) == COMPLEX_CST)
11037 && (TREE_CODE (arg1) == COMPLEX_EXPR
11038 || TREE_CODE (arg1) == COMPLEX_CST))
11039 {
11040 tree real0, imag0, real1, imag1;
11041 tree rcond, icond;
11042
11043 if (TREE_CODE (arg0) == COMPLEX_EXPR)
11044 {
11045 real0 = TREE_OPERAND (arg0, 0);
11046 imag0 = TREE_OPERAND (arg0, 1);
11047 }
11048 else
11049 {
11050 real0 = TREE_REALPART (arg0);
11051 imag0 = TREE_IMAGPART (arg0);
11052 }
11053
11054 if (TREE_CODE (arg1) == COMPLEX_EXPR)
11055 {
11056 real1 = TREE_OPERAND (arg1, 0);
11057 imag1 = TREE_OPERAND (arg1, 1);
11058 }
11059 else
11060 {
11061 real1 = TREE_REALPART (arg1);
11062 imag1 = TREE_IMAGPART (arg1);
11063 }
11064
11065 rcond = fold_binary_loc (loc, code, type, real0, real1);
11066 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
11067 {
11068 if (integer_zerop (rcond))
11069 {
11070 if (code == EQ_EXPR)
11071 return omit_two_operands_loc (loc, type, boolean_false_node,
11072 imag0, imag1);
11073 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
11074 }
11075 else
11076 {
11077 if (code == NE_EXPR)
11078 return omit_two_operands_loc (loc, type, boolean_true_node,
11079 imag0, imag1);
11080 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
11081 }
11082 }
11083
11084 icond = fold_binary_loc (loc, code, type, imag0, imag1);
11085 if (icond && TREE_CODE (icond) == INTEGER_CST)
11086 {
11087 if (integer_zerop (icond))
11088 {
11089 if (code == EQ_EXPR)
11090 return omit_two_operands_loc (loc, type, boolean_false_node,
11091 real0, real1);
11092 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
11093 }
11094 else
11095 {
11096 if (code == NE_EXPR)
11097 return omit_two_operands_loc (loc, type, boolean_true_node,
11098 real0, real1);
11099 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
11100 }
11101 }
11102 }
11103
11104 return NULL_TREE;
11105
11106 case LT_EXPR:
11107 case GT_EXPR:
11108 case LE_EXPR:
11109 case GE_EXPR:
11110 tem = fold_comparison (loc, code, type, op0, op1);
11111 if (tem != NULL_TREE)
11112 return tem;
11113
11114 /* Transform comparisons of the form X +- C CMP X. */
11115 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11116 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11117 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
11118 && !HONOR_SNANS (arg0))
11119 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11120 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
11121 {
11122 tree arg01 = TREE_OPERAND (arg0, 1);
11123 enum tree_code code0 = TREE_CODE (arg0);
11124 int is_positive;
11125
11126 if (TREE_CODE (arg01) == REAL_CST)
11127 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
11128 else
11129 is_positive = tree_int_cst_sgn (arg01);
11130
11131 /* (X - c) > X becomes false. */
11132 if (code == GT_EXPR
11133 && ((code0 == MINUS_EXPR && is_positive >= 0)
11134 || (code0 == PLUS_EXPR && is_positive <= 0)))
11135 {
11136 if (TREE_CODE (arg01) == INTEGER_CST
11137 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11138 fold_overflow_warning (("assuming signed overflow does not "
11139 "occur when assuming that (X - c) > X "
11140 "is always false"),
11141 WARN_STRICT_OVERFLOW_ALL);
11142 return constant_boolean_node (0, type);
11143 }
11144
11145 /* Likewise (X + c) < X becomes false. */
11146 if (code == LT_EXPR
11147 && ((code0 == PLUS_EXPR && is_positive >= 0)
11148 || (code0 == MINUS_EXPR && is_positive <= 0)))
11149 {
11150 if (TREE_CODE (arg01) == INTEGER_CST
11151 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11152 fold_overflow_warning (("assuming signed overflow does not "
11153 "occur when assuming that "
11154 "(X + c) < X is always false"),
11155 WARN_STRICT_OVERFLOW_ALL);
11156 return constant_boolean_node (0, type);
11157 }
11158
11159 /* Convert (X - c) <= X to true. */
11160 if (!HONOR_NANS (arg1)
11161 && code == LE_EXPR
11162 && ((code0 == MINUS_EXPR && is_positive >= 0)
11163 || (code0 == PLUS_EXPR && is_positive <= 0)))
11164 {
11165 if (TREE_CODE (arg01) == INTEGER_CST
11166 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11167 fold_overflow_warning (("assuming signed overflow does not "
11168 "occur when assuming that "
11169 "(X - c) <= X is always true"),
11170 WARN_STRICT_OVERFLOW_ALL);
11171 return constant_boolean_node (1, type);
11172 }
11173
11174 /* Convert (X + c) >= X to true. */
11175 if (!HONOR_NANS (arg1)
11176 && code == GE_EXPR
11177 && ((code0 == PLUS_EXPR && is_positive >= 0)
11178 || (code0 == MINUS_EXPR && is_positive <= 0)))
11179 {
11180 if (TREE_CODE (arg01) == INTEGER_CST
11181 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11182 fold_overflow_warning (("assuming signed overflow does not "
11183 "occur when assuming that "
11184 "(X + c) >= X is always true"),
11185 WARN_STRICT_OVERFLOW_ALL);
11186 return constant_boolean_node (1, type);
11187 }
11188
11189 if (TREE_CODE (arg01) == INTEGER_CST)
11190 {
11191 /* Convert X + c > X and X - c < X to true for integers. */
11192 if (code == GT_EXPR
11193 && ((code0 == PLUS_EXPR && is_positive > 0)
11194 || (code0 == MINUS_EXPR && is_positive < 0)))
11195 {
11196 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11197 fold_overflow_warning (("assuming signed overflow does "
11198 "not occur when assuming that "
11199 "(X + c) > X is always true"),
11200 WARN_STRICT_OVERFLOW_ALL);
11201 return constant_boolean_node (1, type);
11202 }
11203
11204 if (code == LT_EXPR
11205 && ((code0 == MINUS_EXPR && is_positive > 0)
11206 || (code0 == PLUS_EXPR && is_positive < 0)))
11207 {
11208 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11209 fold_overflow_warning (("assuming signed overflow does "
11210 "not occur when assuming that "
11211 "(X - c) < X is always true"),
11212 WARN_STRICT_OVERFLOW_ALL);
11213 return constant_boolean_node (1, type);
11214 }
11215
11216 /* Convert X + c <= X and X - c >= X to false for integers. */
11217 if (code == LE_EXPR
11218 && ((code0 == PLUS_EXPR && is_positive > 0)
11219 || (code0 == MINUS_EXPR && is_positive < 0)))
11220 {
11221 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11222 fold_overflow_warning (("assuming signed overflow does "
11223 "not occur when assuming that "
11224 "(X + c) <= X is always false"),
11225 WARN_STRICT_OVERFLOW_ALL);
11226 return constant_boolean_node (0, type);
11227 }
11228
11229 if (code == GE_EXPR
11230 && ((code0 == MINUS_EXPR && is_positive > 0)
11231 || (code0 == PLUS_EXPR && is_positive < 0)))
11232 {
11233 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11234 fold_overflow_warning (("assuming signed overflow does "
11235 "not occur when assuming that "
11236 "(X - c) >= X is always false"),
11237 WARN_STRICT_OVERFLOW_ALL);
11238 return constant_boolean_node (0, type);
11239 }
11240 }
11241 }
11242
11243 /* If we are comparing an ABS_EXPR with a constant, we can
11244 convert all the cases into explicit comparisons, but they may
11245 well not be faster than doing the ABS and one comparison.
11246 But ABS (X) <= C is a range comparison, which becomes a subtraction
11247 and a comparison, and is probably faster. */
11248 if (code == LE_EXPR
11249 && TREE_CODE (arg1) == INTEGER_CST
11250 && TREE_CODE (arg0) == ABS_EXPR
11251 && ! TREE_SIDE_EFFECTS (arg0)
11252 && (0 != (tem = negate_expr (arg1)))
11253 && TREE_CODE (tem) == INTEGER_CST
11254 && !TREE_OVERFLOW (tem))
11255 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
11256 build2 (GE_EXPR, type,
11257 TREE_OPERAND (arg0, 0), tem),
11258 build2 (LE_EXPR, type,
11259 TREE_OPERAND (arg0, 0), arg1));
11260
11261 /* Convert ABS_EXPR<x> >= 0 to true. */
11262 strict_overflow_p = false;
11263 if (code == GE_EXPR
11264 && (integer_zerop (arg1)
11265 || (! HONOR_NANS (arg0)
11266 && real_zerop (arg1)))
11267 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11268 {
11269 if (strict_overflow_p)
11270 fold_overflow_warning (("assuming signed overflow does not occur "
11271 "when simplifying comparison of "
11272 "absolute value and zero"),
11273 WARN_STRICT_OVERFLOW_CONDITIONAL);
11274 return omit_one_operand_loc (loc, type,
11275 constant_boolean_node (true, type),
11276 arg0);
11277 }
11278
11279 /* Convert ABS_EXPR<x> < 0 to false. */
11280 strict_overflow_p = false;
11281 if (code == LT_EXPR
11282 && (integer_zerop (arg1) || real_zerop (arg1))
11283 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11284 {
11285 if (strict_overflow_p)
11286 fold_overflow_warning (("assuming signed overflow does not occur "
11287 "when simplifying comparison of "
11288 "absolute value and zero"),
11289 WARN_STRICT_OVERFLOW_CONDITIONAL);
11290 return omit_one_operand_loc (loc, type,
11291 constant_boolean_node (false, type),
11292 arg0);
11293 }
11294
11295 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11296 and similarly for >= into !=. */
11297 if ((code == LT_EXPR || code == GE_EXPR)
11298 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11299 && TREE_CODE (arg1) == LSHIFT_EXPR
11300 && integer_onep (TREE_OPERAND (arg1, 0)))
11301 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11302 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11303 TREE_OPERAND (arg1, 1)),
11304 build_zero_cst (TREE_TYPE (arg0)));
11305
11306 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
11307 otherwise Y might be >= # of bits in X's type and thus e.g.
11308 (unsigned char) (1 << Y) for Y 15 might be 0.
11309 If the cast is widening, then 1 << Y should have unsigned type,
11310 otherwise if Y is number of bits in the signed shift type minus 1,
11311 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
11312 31 might be 0xffffffff80000000. */
11313 if ((code == LT_EXPR || code == GE_EXPR)
11314 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11315 && CONVERT_EXPR_P (arg1)
11316 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
11317 && (element_precision (TREE_TYPE (arg1))
11318 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
11319 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
11320 || (element_precision (TREE_TYPE (arg1))
11321 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
11322 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11323 {
11324 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11325 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
11326 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11327 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
11328 build_zero_cst (TREE_TYPE (arg0)));
11329 }
11330
11331 return NULL_TREE;
11332
11333 case UNORDERED_EXPR:
11334 case ORDERED_EXPR:
11335 case UNLT_EXPR:
11336 case UNLE_EXPR:
11337 case UNGT_EXPR:
11338 case UNGE_EXPR:
11339 case UNEQ_EXPR:
11340 case LTGT_EXPR:
11341 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
11342 {
11343 tree targ0 = strip_float_extensions (arg0);
11344 tree targ1 = strip_float_extensions (arg1);
11345 tree newtype = TREE_TYPE (targ0);
11346
11347 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
11348 newtype = TREE_TYPE (targ1);
11349
11350 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
11351 return fold_build2_loc (loc, code, type,
11352 fold_convert_loc (loc, newtype, targ0),
11353 fold_convert_loc (loc, newtype, targ1));
11354 }
11355
11356 return NULL_TREE;
11357
11358 case COMPOUND_EXPR:
11359 /* When pedantic, a compound expression can be neither an lvalue
11360 nor an integer constant expression. */
11361 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11362 return NULL_TREE;
11363 /* Don't let (0, 0) be null pointer constant. */
11364 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11365 : fold_convert_loc (loc, type, arg1);
11366 return pedantic_non_lvalue_loc (loc, tem);
11367
11368 case ASSERT_EXPR:
11369 /* An ASSERT_EXPR should never be passed to fold_binary. */
11370 gcc_unreachable ();
11371
11372 default:
11373 return NULL_TREE;
11374 } /* switch (code) */
11375 }
11376
11377 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
11378 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
11379 of GOTO_EXPR. */
11380
11381 static tree
11382 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
11383 {
11384 switch (TREE_CODE (*tp))
11385 {
11386 case LABEL_EXPR:
11387 return *tp;
11388
11389 case GOTO_EXPR:
11390 *walk_subtrees = 0;
11391
11392 /* ... fall through ... */
11393
11394 default:
11395 return NULL_TREE;
11396 }
11397 }
11398
11399 /* Return whether the sub-tree ST contains a label which is accessible from
11400 outside the sub-tree. */
11401
11402 static bool
11403 contains_label_p (tree st)
11404 {
11405 return
11406 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
11407 }
11408
11409 /* Fold a ternary expression of code CODE and type TYPE with operands
11410 OP0, OP1, and OP2. Return the folded expression if folding is
11411 successful. Otherwise, return NULL_TREE. */
11412
11413 tree
11414 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
11415 tree op0, tree op1, tree op2)
11416 {
11417 tree tem;
11418 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
11419 enum tree_code_class kind = TREE_CODE_CLASS (code);
11420
11421 gcc_assert (IS_EXPR_CODE_CLASS (kind)
11422 && TREE_CODE_LENGTH (code) == 3);
11423
11424 /* If this is a commutative operation, and OP0 is a constant, move it
11425 to OP1 to reduce the number of tests below. */
11426 if (commutative_ternary_tree_code (code)
11427 && tree_swap_operands_p (op0, op1, true))
11428 return fold_build3_loc (loc, code, type, op1, op0, op2);
11429
11430 tem = generic_simplify (loc, code, type, op0, op1, op2);
11431 if (tem)
11432 return tem;
11433
11434 /* Strip any conversions that don't change the mode. This is safe
11435 for every expression, except for a comparison expression because
11436 its signedness is derived from its operands. So, in the latter
11437 case, only strip conversions that don't change the signedness.
11438
11439 Note that this is done as an internal manipulation within the
11440 constant folder, in order to find the simplest representation of
11441 the arguments so that their form can be studied. In any cases,
11442 the appropriate type conversions should be put back in the tree
11443 that will get out of the constant folder. */
11444 if (op0)
11445 {
11446 arg0 = op0;
11447 STRIP_NOPS (arg0);
11448 }
11449
11450 if (op1)
11451 {
11452 arg1 = op1;
11453 STRIP_NOPS (arg1);
11454 }
11455
11456 if (op2)
11457 {
11458 arg2 = op2;
11459 STRIP_NOPS (arg2);
11460 }
11461
11462 switch (code)
11463 {
11464 case COMPONENT_REF:
11465 if (TREE_CODE (arg0) == CONSTRUCTOR
11466 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11467 {
11468 unsigned HOST_WIDE_INT idx;
11469 tree field, value;
11470 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11471 if (field == arg1)
11472 return value;
11473 }
11474 return NULL_TREE;
11475
11476 case COND_EXPR:
11477 case VEC_COND_EXPR:
11478 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11479 so all simple results must be passed through pedantic_non_lvalue. */
11480 if (TREE_CODE (arg0) == INTEGER_CST)
11481 {
11482 tree unused_op = integer_zerop (arg0) ? op1 : op2;
11483 tem = integer_zerop (arg0) ? op2 : op1;
11484 /* Only optimize constant conditions when the selected branch
11485 has the same type as the COND_EXPR. This avoids optimizing
11486 away "c ? x : throw", where the throw has a void type.
11487 Avoid throwing away that operand which contains label. */
11488 if ((!TREE_SIDE_EFFECTS (unused_op)
11489 || !contains_label_p (unused_op))
11490 && (! VOID_TYPE_P (TREE_TYPE (tem))
11491 || VOID_TYPE_P (type)))
11492 return pedantic_non_lvalue_loc (loc, tem);
11493 return NULL_TREE;
11494 }
11495 else if (TREE_CODE (arg0) == VECTOR_CST)
11496 {
11497 if ((TREE_CODE (arg1) == VECTOR_CST
11498 || TREE_CODE (arg1) == CONSTRUCTOR)
11499 && (TREE_CODE (arg2) == VECTOR_CST
11500 || TREE_CODE (arg2) == CONSTRUCTOR))
11501 {
11502 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
11503 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
11504 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
11505 for (i = 0; i < nelts; i++)
11506 {
11507 tree val = VECTOR_CST_ELT (arg0, i);
11508 if (integer_all_onesp (val))
11509 sel[i] = i;
11510 else if (integer_zerop (val))
11511 sel[i] = nelts + i;
11512 else /* Currently unreachable. */
11513 return NULL_TREE;
11514 }
11515 tree t = fold_vec_perm (type, arg1, arg2, sel);
11516 if (t != NULL_TREE)
11517 return t;
11518 }
11519 }
11520
11521 /* If we have A op B ? A : C, we may be able to convert this to a
11522 simpler expression, depending on the operation and the values
11523 of B and C. Signed zeros prevent all of these transformations,
11524 for reasons given above each one.
11525
11526 Also try swapping the arguments and inverting the conditional. */
11527 if (COMPARISON_CLASS_P (arg0)
11528 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11529 arg1, TREE_OPERAND (arg0, 1))
11530 && !HONOR_SIGNED_ZEROS (element_mode (arg1)))
11531 {
11532 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
11533 if (tem)
11534 return tem;
11535 }
11536
11537 if (COMPARISON_CLASS_P (arg0)
11538 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11539 op2,
11540 TREE_OPERAND (arg0, 1))
11541 && !HONOR_SIGNED_ZEROS (element_mode (op2)))
11542 {
11543 location_t loc0 = expr_location_or (arg0, loc);
11544 tem = fold_invert_truthvalue (loc0, arg0);
11545 if (tem && COMPARISON_CLASS_P (tem))
11546 {
11547 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
11548 if (tem)
11549 return tem;
11550 }
11551 }
11552
11553 /* If the second operand is simpler than the third, swap them
11554 since that produces better jump optimization results. */
11555 if (truth_value_p (TREE_CODE (arg0))
11556 && tree_swap_operands_p (op1, op2, false))
11557 {
11558 location_t loc0 = expr_location_or (arg0, loc);
11559 /* See if this can be inverted. If it can't, possibly because
11560 it was a floating-point inequality comparison, don't do
11561 anything. */
11562 tem = fold_invert_truthvalue (loc0, arg0);
11563 if (tem)
11564 return fold_build3_loc (loc, code, type, tem, op2, op1);
11565 }
11566
11567 /* Convert A ? 1 : 0 to simply A. */
11568 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
11569 : (integer_onep (op1)
11570 && !VECTOR_TYPE_P (type)))
11571 && integer_zerop (op2)
11572 /* If we try to convert OP0 to our type, the
11573 call to fold will try to move the conversion inside
11574 a COND, which will recurse. In that case, the COND_EXPR
11575 is probably the best choice, so leave it alone. */
11576 && type == TREE_TYPE (arg0))
11577 return pedantic_non_lvalue_loc (loc, arg0);
11578
11579 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11580 over COND_EXPR in cases such as floating point comparisons. */
11581 if (integer_zerop (op1)
11582 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
11583 : (integer_onep (op2)
11584 && !VECTOR_TYPE_P (type)))
11585 && truth_value_p (TREE_CODE (arg0)))
11586 return pedantic_non_lvalue_loc (loc,
11587 fold_convert_loc (loc, type,
11588 invert_truthvalue_loc (loc,
11589 arg0)));
11590
11591 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11592 if (TREE_CODE (arg0) == LT_EXPR
11593 && integer_zerop (TREE_OPERAND (arg0, 1))
11594 && integer_zerop (op2)
11595 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11596 {
11597 /* sign_bit_p looks through both zero and sign extensions,
11598 but for this optimization only sign extensions are
11599 usable. */
11600 tree tem2 = TREE_OPERAND (arg0, 0);
11601 while (tem != tem2)
11602 {
11603 if (TREE_CODE (tem2) != NOP_EXPR
11604 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
11605 {
11606 tem = NULL_TREE;
11607 break;
11608 }
11609 tem2 = TREE_OPERAND (tem2, 0);
11610 }
11611 /* sign_bit_p only checks ARG1 bits within A's precision.
11612 If <sign bit of A> has wider type than A, bits outside
11613 of A's precision in <sign bit of A> need to be checked.
11614 If they are all 0, this optimization needs to be done
11615 in unsigned A's type, if they are all 1 in signed A's type,
11616 otherwise this can't be done. */
11617 if (tem
11618 && TYPE_PRECISION (TREE_TYPE (tem))
11619 < TYPE_PRECISION (TREE_TYPE (arg1))
11620 && TYPE_PRECISION (TREE_TYPE (tem))
11621 < TYPE_PRECISION (type))
11622 {
11623 int inner_width, outer_width;
11624 tree tem_type;
11625
11626 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
11627 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
11628 if (outer_width > TYPE_PRECISION (type))
11629 outer_width = TYPE_PRECISION (type);
11630
11631 wide_int mask = wi::shifted_mask
11632 (inner_width, outer_width - inner_width, false,
11633 TYPE_PRECISION (TREE_TYPE (arg1)));
11634
11635 wide_int common = mask & arg1;
11636 if (common == mask)
11637 {
11638 tem_type = signed_type_for (TREE_TYPE (tem));
11639 tem = fold_convert_loc (loc, tem_type, tem);
11640 }
11641 else if (common == 0)
11642 {
11643 tem_type = unsigned_type_for (TREE_TYPE (tem));
11644 tem = fold_convert_loc (loc, tem_type, tem);
11645 }
11646 else
11647 tem = NULL;
11648 }
11649
11650 if (tem)
11651 return
11652 fold_convert_loc (loc, type,
11653 fold_build2_loc (loc, BIT_AND_EXPR,
11654 TREE_TYPE (tem), tem,
11655 fold_convert_loc (loc,
11656 TREE_TYPE (tem),
11657 arg1)));
11658 }
11659
11660 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11661 already handled above. */
11662 if (TREE_CODE (arg0) == BIT_AND_EXPR
11663 && integer_onep (TREE_OPERAND (arg0, 1))
11664 && integer_zerop (op2)
11665 && integer_pow2p (arg1))
11666 {
11667 tree tem = TREE_OPERAND (arg0, 0);
11668 STRIP_NOPS (tem);
11669 if (TREE_CODE (tem) == RSHIFT_EXPR
11670 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
11671 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
11672 tree_to_uhwi (TREE_OPERAND (tem, 1)))
11673 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11674 TREE_OPERAND (tem, 0), arg1);
11675 }
11676
11677 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11678 is probably obsolete because the first operand should be a
11679 truth value (that's why we have the two cases above), but let's
11680 leave it in until we can confirm this for all front-ends. */
11681 if (integer_zerop (op2)
11682 && TREE_CODE (arg0) == NE_EXPR
11683 && integer_zerop (TREE_OPERAND (arg0, 1))
11684 && integer_pow2p (arg1)
11685 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11686 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11687 arg1, OEP_ONLY_CONST))
11688 return pedantic_non_lvalue_loc (loc,
11689 fold_convert_loc (loc, type,
11690 TREE_OPERAND (arg0, 0)));
11691
11692 /* Disable the transformations below for vectors, since
11693 fold_binary_op_with_conditional_arg may undo them immediately,
11694 yielding an infinite loop. */
11695 if (code == VEC_COND_EXPR)
11696 return NULL_TREE;
11697
11698 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11699 if (integer_zerop (op2)
11700 && truth_value_p (TREE_CODE (arg0))
11701 && truth_value_p (TREE_CODE (arg1))
11702 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11703 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
11704 : TRUTH_ANDIF_EXPR,
11705 type, fold_convert_loc (loc, type, arg0), arg1);
11706
11707 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11708 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
11709 && truth_value_p (TREE_CODE (arg0))
11710 && truth_value_p (TREE_CODE (arg1))
11711 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11712 {
11713 location_t loc0 = expr_location_or (arg0, loc);
11714 /* Only perform transformation if ARG0 is easily inverted. */
11715 tem = fold_invert_truthvalue (loc0, arg0);
11716 if (tem)
11717 return fold_build2_loc (loc, code == VEC_COND_EXPR
11718 ? BIT_IOR_EXPR
11719 : TRUTH_ORIF_EXPR,
11720 type, fold_convert_loc (loc, type, tem),
11721 arg1);
11722 }
11723
11724 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11725 if (integer_zerop (arg1)
11726 && truth_value_p (TREE_CODE (arg0))
11727 && truth_value_p (TREE_CODE (op2))
11728 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11729 {
11730 location_t loc0 = expr_location_or (arg0, loc);
11731 /* Only perform transformation if ARG0 is easily inverted. */
11732 tem = fold_invert_truthvalue (loc0, arg0);
11733 if (tem)
11734 return fold_build2_loc (loc, code == VEC_COND_EXPR
11735 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
11736 type, fold_convert_loc (loc, type, tem),
11737 op2);
11738 }
11739
11740 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11741 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
11742 && truth_value_p (TREE_CODE (arg0))
11743 && truth_value_p (TREE_CODE (op2))
11744 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11745 return fold_build2_loc (loc, code == VEC_COND_EXPR
11746 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
11747 type, fold_convert_loc (loc, type, arg0), op2);
11748
11749 return NULL_TREE;
11750
11751 case CALL_EXPR:
11752 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
11753 of fold_ternary on them. */
11754 gcc_unreachable ();
11755
11756 case BIT_FIELD_REF:
11757 if ((TREE_CODE (arg0) == VECTOR_CST
11758 || (TREE_CODE (arg0) == CONSTRUCTOR
11759 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
11760 && (type == TREE_TYPE (TREE_TYPE (arg0))
11761 || (TREE_CODE (type) == VECTOR_TYPE
11762 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
11763 {
11764 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
11765 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
11766 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
11767 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
11768
11769 if (n != 0
11770 && (idx % width) == 0
11771 && (n % width) == 0
11772 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
11773 {
11774 idx = idx / width;
11775 n = n / width;
11776
11777 if (TREE_CODE (arg0) == VECTOR_CST)
11778 {
11779 if (n == 1)
11780 return VECTOR_CST_ELT (arg0, idx);
11781
11782 tree *vals = XALLOCAVEC (tree, n);
11783 for (unsigned i = 0; i < n; ++i)
11784 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
11785 return build_vector (type, vals);
11786 }
11787
11788 /* Constructor elements can be subvectors. */
11789 unsigned HOST_WIDE_INT k = 1;
11790 if (CONSTRUCTOR_NELTS (arg0) != 0)
11791 {
11792 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
11793 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
11794 k = TYPE_VECTOR_SUBPARTS (cons_elem);
11795 }
11796
11797 /* We keep an exact subset of the constructor elements. */
11798 if ((idx % k) == 0 && (n % k) == 0)
11799 {
11800 if (CONSTRUCTOR_NELTS (arg0) == 0)
11801 return build_constructor (type, NULL);
11802 idx /= k;
11803 n /= k;
11804 if (n == 1)
11805 {
11806 if (idx < CONSTRUCTOR_NELTS (arg0))
11807 return CONSTRUCTOR_ELT (arg0, idx)->value;
11808 return build_zero_cst (type);
11809 }
11810
11811 vec<constructor_elt, va_gc> *vals;
11812 vec_alloc (vals, n);
11813 for (unsigned i = 0;
11814 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
11815 ++i)
11816 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
11817 CONSTRUCTOR_ELT
11818 (arg0, idx + i)->value);
11819 return build_constructor (type, vals);
11820 }
11821 /* The bitfield references a single constructor element. */
11822 else if (idx + n <= (idx / k + 1) * k)
11823 {
11824 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
11825 return build_zero_cst (type);
11826 else if (n == k)
11827 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
11828 else
11829 return fold_build3_loc (loc, code, type,
11830 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
11831 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
11832 }
11833 }
11834 }
11835
11836 /* A bit-field-ref that referenced the full argument can be stripped. */
11837 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11838 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1)
11839 && integer_zerop (op2))
11840 return fold_convert_loc (loc, type, arg0);
11841
11842 /* On constants we can use native encode/interpret to constant
11843 fold (nearly) all BIT_FIELD_REFs. */
11844 if (CONSTANT_CLASS_P (arg0)
11845 && can_native_interpret_type_p (type)
11846 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0)))
11847 /* This limitation should not be necessary, we just need to
11848 round this up to mode size. */
11849 && tree_to_uhwi (op1) % BITS_PER_UNIT == 0
11850 /* Need bit-shifting of the buffer to relax the following. */
11851 && tree_to_uhwi (op2) % BITS_PER_UNIT == 0)
11852 {
11853 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11854 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
11855 unsigned HOST_WIDE_INT clen;
11856 clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0)));
11857 /* ??? We cannot tell native_encode_expr to start at
11858 some random byte only. So limit us to a reasonable amount
11859 of work. */
11860 if (clen <= 4096)
11861 {
11862 unsigned char *b = XALLOCAVEC (unsigned char, clen);
11863 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
11864 if (len > 0
11865 && len * BITS_PER_UNIT >= bitpos + bitsize)
11866 {
11867 tree v = native_interpret_expr (type,
11868 b + bitpos / BITS_PER_UNIT,
11869 bitsize / BITS_PER_UNIT);
11870 if (v)
11871 return v;
11872 }
11873 }
11874 }
11875
11876 return NULL_TREE;
11877
11878 case FMA_EXPR:
11879 /* For integers we can decompose the FMA if possible. */
11880 if (TREE_CODE (arg0) == INTEGER_CST
11881 && TREE_CODE (arg1) == INTEGER_CST)
11882 return fold_build2_loc (loc, PLUS_EXPR, type,
11883 const_binop (MULT_EXPR, arg0, arg1), arg2);
11884 if (integer_zerop (arg2))
11885 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11886
11887 return fold_fma (loc, type, arg0, arg1, arg2);
11888
11889 case VEC_PERM_EXPR:
11890 if (TREE_CODE (arg2) == VECTOR_CST)
11891 {
11892 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask, mask2;
11893 unsigned char *sel = XALLOCAVEC (unsigned char, 2 * nelts);
11894 unsigned char *sel2 = sel + nelts;
11895 bool need_mask_canon = false;
11896 bool need_mask_canon2 = false;
11897 bool all_in_vec0 = true;
11898 bool all_in_vec1 = true;
11899 bool maybe_identity = true;
11900 bool single_arg = (op0 == op1);
11901 bool changed = false;
11902
11903 mask2 = 2 * nelts - 1;
11904 mask = single_arg ? (nelts - 1) : mask2;
11905 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
11906 for (i = 0; i < nelts; i++)
11907 {
11908 tree val = VECTOR_CST_ELT (arg2, i);
11909 if (TREE_CODE (val) != INTEGER_CST)
11910 return NULL_TREE;
11911
11912 /* Make sure that the perm value is in an acceptable
11913 range. */
11914 wide_int t = val;
11915 need_mask_canon |= wi::gtu_p (t, mask);
11916 need_mask_canon2 |= wi::gtu_p (t, mask2);
11917 sel[i] = t.to_uhwi () & mask;
11918 sel2[i] = t.to_uhwi () & mask2;
11919
11920 if (sel[i] < nelts)
11921 all_in_vec1 = false;
11922 else
11923 all_in_vec0 = false;
11924
11925 if ((sel[i] & (nelts-1)) != i)
11926 maybe_identity = false;
11927 }
11928
11929 if (maybe_identity)
11930 {
11931 if (all_in_vec0)
11932 return op0;
11933 if (all_in_vec1)
11934 return op1;
11935 }
11936
11937 if (all_in_vec0)
11938 op1 = op0;
11939 else if (all_in_vec1)
11940 {
11941 op0 = op1;
11942 for (i = 0; i < nelts; i++)
11943 sel[i] -= nelts;
11944 need_mask_canon = true;
11945 }
11946
11947 if ((TREE_CODE (op0) == VECTOR_CST
11948 || TREE_CODE (op0) == CONSTRUCTOR)
11949 && (TREE_CODE (op1) == VECTOR_CST
11950 || TREE_CODE (op1) == CONSTRUCTOR))
11951 {
11952 tree t = fold_vec_perm (type, op0, op1, sel);
11953 if (t != NULL_TREE)
11954 return t;
11955 }
11956
11957 if (op0 == op1 && !single_arg)
11958 changed = true;
11959
11960 /* Some targets are deficient and fail to expand a single
11961 argument permutation while still allowing an equivalent
11962 2-argument version. */
11963 if (need_mask_canon && arg2 == op2
11964 && !can_vec_perm_p (TYPE_MODE (type), false, sel)
11965 && can_vec_perm_p (TYPE_MODE (type), false, sel2))
11966 {
11967 need_mask_canon = need_mask_canon2;
11968 sel = sel2;
11969 }
11970
11971 if (need_mask_canon && arg2 == op2)
11972 {
11973 tree *tsel = XALLOCAVEC (tree, nelts);
11974 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
11975 for (i = 0; i < nelts; i++)
11976 tsel[i] = build_int_cst (eltype, sel[i]);
11977 op2 = build_vector (TREE_TYPE (arg2), tsel);
11978 changed = true;
11979 }
11980
11981 if (changed)
11982 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
11983 }
11984 return NULL_TREE;
11985
11986 default:
11987 return NULL_TREE;
11988 } /* switch (code) */
11989 }
11990
11991 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
11992 of an array (or vector). */
11993
11994 tree
11995 get_array_ctor_element_at_index (tree ctor, offset_int access_index)
11996 {
11997 tree index_type = NULL_TREE;
11998 offset_int low_bound = 0;
11999
12000 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
12001 {
12002 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
12003 if (domain_type && TYPE_MIN_VALUE (domain_type))
12004 {
12005 /* Static constructors for variably sized objects makes no sense. */
12006 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
12007 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
12008 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
12009 }
12010 }
12011
12012 if (index_type)
12013 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
12014 TYPE_SIGN (index_type));
12015
12016 offset_int index = low_bound - 1;
12017 if (index_type)
12018 index = wi::ext (index, TYPE_PRECISION (index_type),
12019 TYPE_SIGN (index_type));
12020
12021 offset_int max_index;
12022 unsigned HOST_WIDE_INT cnt;
12023 tree cfield, cval;
12024
12025 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
12026 {
12027 /* Array constructor might explicitly set index, or specify a range,
12028 or leave index NULL meaning that it is next index after previous
12029 one. */
12030 if (cfield)
12031 {
12032 if (TREE_CODE (cfield) == INTEGER_CST)
12033 max_index = index = wi::to_offset (cfield);
12034 else
12035 {
12036 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
12037 index = wi::to_offset (TREE_OPERAND (cfield, 0));
12038 max_index = wi::to_offset (TREE_OPERAND (cfield, 1));
12039 }
12040 }
12041 else
12042 {
12043 index += 1;
12044 if (index_type)
12045 index = wi::ext (index, TYPE_PRECISION (index_type),
12046 TYPE_SIGN (index_type));
12047 max_index = index;
12048 }
12049
12050 /* Do we have match? */
12051 if (wi::cmpu (access_index, index) >= 0
12052 && wi::cmpu (access_index, max_index) <= 0)
12053 return cval;
12054 }
12055 return NULL_TREE;
12056 }
12057
12058 /* Perform constant folding and related simplification of EXPR.
12059 The related simplifications include x*1 => x, x*0 => 0, etc.,
12060 and application of the associative law.
12061 NOP_EXPR conversions may be removed freely (as long as we
12062 are careful not to change the type of the overall expression).
12063 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
12064 but we can constant-fold them if they have constant operands. */
12065
12066 #ifdef ENABLE_FOLD_CHECKING
12067 # define fold(x) fold_1 (x)
12068 static tree fold_1 (tree);
12069 static
12070 #endif
12071 tree
12072 fold (tree expr)
12073 {
12074 const tree t = expr;
12075 enum tree_code code = TREE_CODE (t);
12076 enum tree_code_class kind = TREE_CODE_CLASS (code);
12077 tree tem;
12078 location_t loc = EXPR_LOCATION (expr);
12079
12080 /* Return right away if a constant. */
12081 if (kind == tcc_constant)
12082 return t;
12083
12084 /* CALL_EXPR-like objects with variable numbers of operands are
12085 treated specially. */
12086 if (kind == tcc_vl_exp)
12087 {
12088 if (code == CALL_EXPR)
12089 {
12090 tem = fold_call_expr (loc, expr, false);
12091 return tem ? tem : expr;
12092 }
12093 return expr;
12094 }
12095
12096 if (IS_EXPR_CODE_CLASS (kind))
12097 {
12098 tree type = TREE_TYPE (t);
12099 tree op0, op1, op2;
12100
12101 switch (TREE_CODE_LENGTH (code))
12102 {
12103 case 1:
12104 op0 = TREE_OPERAND (t, 0);
12105 tem = fold_unary_loc (loc, code, type, op0);
12106 return tem ? tem : expr;
12107 case 2:
12108 op0 = TREE_OPERAND (t, 0);
12109 op1 = TREE_OPERAND (t, 1);
12110 tem = fold_binary_loc (loc, code, type, op0, op1);
12111 return tem ? tem : expr;
12112 case 3:
12113 op0 = TREE_OPERAND (t, 0);
12114 op1 = TREE_OPERAND (t, 1);
12115 op2 = TREE_OPERAND (t, 2);
12116 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12117 return tem ? tem : expr;
12118 default:
12119 break;
12120 }
12121 }
12122
12123 switch (code)
12124 {
12125 case ARRAY_REF:
12126 {
12127 tree op0 = TREE_OPERAND (t, 0);
12128 tree op1 = TREE_OPERAND (t, 1);
12129
12130 if (TREE_CODE (op1) == INTEGER_CST
12131 && TREE_CODE (op0) == CONSTRUCTOR
12132 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
12133 {
12134 tree val = get_array_ctor_element_at_index (op0,
12135 wi::to_offset (op1));
12136 if (val)
12137 return val;
12138 }
12139
12140 return t;
12141 }
12142
12143 /* Return a VECTOR_CST if possible. */
12144 case CONSTRUCTOR:
12145 {
12146 tree type = TREE_TYPE (t);
12147 if (TREE_CODE (type) != VECTOR_TYPE)
12148 return t;
12149
12150 unsigned i;
12151 tree val;
12152 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
12153 if (! CONSTANT_CLASS_P (val))
12154 return t;
12155
12156 return build_vector_from_ctor (type, CONSTRUCTOR_ELTS (t));
12157 }
12158
12159 case CONST_DECL:
12160 return fold (DECL_INITIAL (t));
12161
12162 default:
12163 return t;
12164 } /* switch (code) */
12165 }
12166
12167 #ifdef ENABLE_FOLD_CHECKING
12168 #undef fold
12169
12170 static void fold_checksum_tree (const_tree, struct md5_ctx *,
12171 hash_table<nofree_ptr_hash<const tree_node> > *);
12172 static void fold_check_failed (const_tree, const_tree);
12173 void print_fold_checksum (const_tree);
12174
12175 /* When --enable-checking=fold, compute a digest of expr before
12176 and after actual fold call to see if fold did not accidentally
12177 change original expr. */
12178
12179 tree
12180 fold (tree expr)
12181 {
12182 tree ret;
12183 struct md5_ctx ctx;
12184 unsigned char checksum_before[16], checksum_after[16];
12185 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12186
12187 md5_init_ctx (&ctx);
12188 fold_checksum_tree (expr, &ctx, &ht);
12189 md5_finish_ctx (&ctx, checksum_before);
12190 ht.empty ();
12191
12192 ret = fold_1 (expr);
12193
12194 md5_init_ctx (&ctx);
12195 fold_checksum_tree (expr, &ctx, &ht);
12196 md5_finish_ctx (&ctx, checksum_after);
12197
12198 if (memcmp (checksum_before, checksum_after, 16))
12199 fold_check_failed (expr, ret);
12200
12201 return ret;
12202 }
12203
12204 void
12205 print_fold_checksum (const_tree expr)
12206 {
12207 struct md5_ctx ctx;
12208 unsigned char checksum[16], cnt;
12209 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12210
12211 md5_init_ctx (&ctx);
12212 fold_checksum_tree (expr, &ctx, &ht);
12213 md5_finish_ctx (&ctx, checksum);
12214 for (cnt = 0; cnt < 16; ++cnt)
12215 fprintf (stderr, "%02x", checksum[cnt]);
12216 putc ('\n', stderr);
12217 }
12218
12219 static void
12220 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
12221 {
12222 internal_error ("fold check: original tree changed by fold");
12223 }
12224
12225 static void
12226 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
12227 hash_table<nofree_ptr_hash <const tree_node> > *ht)
12228 {
12229 const tree_node **slot;
12230 enum tree_code code;
12231 union tree_node buf;
12232 int i, len;
12233
12234 recursive_label:
12235 if (expr == NULL)
12236 return;
12237 slot = ht->find_slot (expr, INSERT);
12238 if (*slot != NULL)
12239 return;
12240 *slot = expr;
12241 code = TREE_CODE (expr);
12242 if (TREE_CODE_CLASS (code) == tcc_declaration
12243 && HAS_DECL_ASSEMBLER_NAME_P (expr))
12244 {
12245 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
12246 memcpy ((char *) &buf, expr, tree_size (expr));
12247 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
12248 buf.decl_with_vis.symtab_node = NULL;
12249 expr = (tree) &buf;
12250 }
12251 else if (TREE_CODE_CLASS (code) == tcc_type
12252 && (TYPE_POINTER_TO (expr)
12253 || TYPE_REFERENCE_TO (expr)
12254 || TYPE_CACHED_VALUES_P (expr)
12255 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
12256 || TYPE_NEXT_VARIANT (expr)))
12257 {
12258 /* Allow these fields to be modified. */
12259 tree tmp;
12260 memcpy ((char *) &buf, expr, tree_size (expr));
12261 expr = tmp = (tree) &buf;
12262 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
12263 TYPE_POINTER_TO (tmp) = NULL;
12264 TYPE_REFERENCE_TO (tmp) = NULL;
12265 TYPE_NEXT_VARIANT (tmp) = NULL;
12266 if (TYPE_CACHED_VALUES_P (tmp))
12267 {
12268 TYPE_CACHED_VALUES_P (tmp) = 0;
12269 TYPE_CACHED_VALUES (tmp) = NULL;
12270 }
12271 }
12272 md5_process_bytes (expr, tree_size (expr), ctx);
12273 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
12274 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
12275 if (TREE_CODE_CLASS (code) != tcc_type
12276 && TREE_CODE_CLASS (code) != tcc_declaration
12277 && code != TREE_LIST
12278 && code != SSA_NAME
12279 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
12280 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
12281 switch (TREE_CODE_CLASS (code))
12282 {
12283 case tcc_constant:
12284 switch (code)
12285 {
12286 case STRING_CST:
12287 md5_process_bytes (TREE_STRING_POINTER (expr),
12288 TREE_STRING_LENGTH (expr), ctx);
12289 break;
12290 case COMPLEX_CST:
12291 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
12292 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
12293 break;
12294 case VECTOR_CST:
12295 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
12296 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
12297 break;
12298 default:
12299 break;
12300 }
12301 break;
12302 case tcc_exceptional:
12303 switch (code)
12304 {
12305 case TREE_LIST:
12306 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
12307 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
12308 expr = TREE_CHAIN (expr);
12309 goto recursive_label;
12310 break;
12311 case TREE_VEC:
12312 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
12313 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
12314 break;
12315 default:
12316 break;
12317 }
12318 break;
12319 case tcc_expression:
12320 case tcc_reference:
12321 case tcc_comparison:
12322 case tcc_unary:
12323 case tcc_binary:
12324 case tcc_statement:
12325 case tcc_vl_exp:
12326 len = TREE_OPERAND_LENGTH (expr);
12327 for (i = 0; i < len; ++i)
12328 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
12329 break;
12330 case tcc_declaration:
12331 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
12332 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
12333 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
12334 {
12335 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
12336 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
12337 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
12338 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
12339 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
12340 }
12341
12342 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
12343 {
12344 if (TREE_CODE (expr) == FUNCTION_DECL)
12345 {
12346 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
12347 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
12348 }
12349 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
12350 }
12351 break;
12352 case tcc_type:
12353 if (TREE_CODE (expr) == ENUMERAL_TYPE)
12354 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
12355 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
12356 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
12357 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
12358 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
12359 if (INTEGRAL_TYPE_P (expr)
12360 || SCALAR_FLOAT_TYPE_P (expr))
12361 {
12362 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12363 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
12364 }
12365 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
12366 if (TREE_CODE (expr) == RECORD_TYPE
12367 || TREE_CODE (expr) == UNION_TYPE
12368 || TREE_CODE (expr) == QUAL_UNION_TYPE)
12369 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
12370 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12371 break;
12372 default:
12373 break;
12374 }
12375 }
12376
12377 /* Helper function for outputting the checksum of a tree T. When
12378 debugging with gdb, you can "define mynext" to be "next" followed
12379 by "call debug_fold_checksum (op0)", then just trace down till the
12380 outputs differ. */
12381
12382 DEBUG_FUNCTION void
12383 debug_fold_checksum (const_tree t)
12384 {
12385 int i;
12386 unsigned char checksum[16];
12387 struct md5_ctx ctx;
12388 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12389
12390 md5_init_ctx (&ctx);
12391 fold_checksum_tree (t, &ctx, &ht);
12392 md5_finish_ctx (&ctx, checksum);
12393 ht.empty ();
12394
12395 for (i = 0; i < 16; i++)
12396 fprintf (stderr, "%d ", checksum[i]);
12397
12398 fprintf (stderr, "\n");
12399 }
12400
12401 #endif
12402
12403 /* Fold a unary tree expression with code CODE of type TYPE with an
12404 operand OP0. LOC is the location of the resulting expression.
12405 Return a folded expression if successful. Otherwise, return a tree
12406 expression with code CODE of type TYPE with an operand OP0. */
12407
12408 tree
12409 fold_build1_stat_loc (location_t loc,
12410 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
12411 {
12412 tree tem;
12413 #ifdef ENABLE_FOLD_CHECKING
12414 unsigned char checksum_before[16], checksum_after[16];
12415 struct md5_ctx ctx;
12416 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12417
12418 md5_init_ctx (&ctx);
12419 fold_checksum_tree (op0, &ctx, &ht);
12420 md5_finish_ctx (&ctx, checksum_before);
12421 ht.empty ();
12422 #endif
12423
12424 tem = fold_unary_loc (loc, code, type, op0);
12425 if (!tem)
12426 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
12427
12428 #ifdef ENABLE_FOLD_CHECKING
12429 md5_init_ctx (&ctx);
12430 fold_checksum_tree (op0, &ctx, &ht);
12431 md5_finish_ctx (&ctx, checksum_after);
12432
12433 if (memcmp (checksum_before, checksum_after, 16))
12434 fold_check_failed (op0, tem);
12435 #endif
12436 return tem;
12437 }
12438
12439 /* Fold a binary tree expression with code CODE of type TYPE with
12440 operands OP0 and OP1. LOC is the location of the resulting
12441 expression. Return a folded expression if successful. Otherwise,
12442 return a tree expression with code CODE of type TYPE with operands
12443 OP0 and OP1. */
12444
12445 tree
12446 fold_build2_stat_loc (location_t loc,
12447 enum tree_code code, tree type, tree op0, tree op1
12448 MEM_STAT_DECL)
12449 {
12450 tree tem;
12451 #ifdef ENABLE_FOLD_CHECKING
12452 unsigned char checksum_before_op0[16],
12453 checksum_before_op1[16],
12454 checksum_after_op0[16],
12455 checksum_after_op1[16];
12456 struct md5_ctx ctx;
12457 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12458
12459 md5_init_ctx (&ctx);
12460 fold_checksum_tree (op0, &ctx, &ht);
12461 md5_finish_ctx (&ctx, checksum_before_op0);
12462 ht.empty ();
12463
12464 md5_init_ctx (&ctx);
12465 fold_checksum_tree (op1, &ctx, &ht);
12466 md5_finish_ctx (&ctx, checksum_before_op1);
12467 ht.empty ();
12468 #endif
12469
12470 tem = fold_binary_loc (loc, code, type, op0, op1);
12471 if (!tem)
12472 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
12473
12474 #ifdef ENABLE_FOLD_CHECKING
12475 md5_init_ctx (&ctx);
12476 fold_checksum_tree (op0, &ctx, &ht);
12477 md5_finish_ctx (&ctx, checksum_after_op0);
12478 ht.empty ();
12479
12480 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12481 fold_check_failed (op0, tem);
12482
12483 md5_init_ctx (&ctx);
12484 fold_checksum_tree (op1, &ctx, &ht);
12485 md5_finish_ctx (&ctx, checksum_after_op1);
12486
12487 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12488 fold_check_failed (op1, tem);
12489 #endif
12490 return tem;
12491 }
12492
12493 /* Fold a ternary tree expression with code CODE of type TYPE with
12494 operands OP0, OP1, and OP2. Return a folded expression if
12495 successful. Otherwise, return a tree expression with code CODE of
12496 type TYPE with operands OP0, OP1, and OP2. */
12497
12498 tree
12499 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
12500 tree op0, tree op1, tree op2 MEM_STAT_DECL)
12501 {
12502 tree tem;
12503 #ifdef ENABLE_FOLD_CHECKING
12504 unsigned char checksum_before_op0[16],
12505 checksum_before_op1[16],
12506 checksum_before_op2[16],
12507 checksum_after_op0[16],
12508 checksum_after_op1[16],
12509 checksum_after_op2[16];
12510 struct md5_ctx ctx;
12511 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12512
12513 md5_init_ctx (&ctx);
12514 fold_checksum_tree (op0, &ctx, &ht);
12515 md5_finish_ctx (&ctx, checksum_before_op0);
12516 ht.empty ();
12517
12518 md5_init_ctx (&ctx);
12519 fold_checksum_tree (op1, &ctx, &ht);
12520 md5_finish_ctx (&ctx, checksum_before_op1);
12521 ht.empty ();
12522
12523 md5_init_ctx (&ctx);
12524 fold_checksum_tree (op2, &ctx, &ht);
12525 md5_finish_ctx (&ctx, checksum_before_op2);
12526 ht.empty ();
12527 #endif
12528
12529 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
12530 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12531 if (!tem)
12532 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
12533
12534 #ifdef ENABLE_FOLD_CHECKING
12535 md5_init_ctx (&ctx);
12536 fold_checksum_tree (op0, &ctx, &ht);
12537 md5_finish_ctx (&ctx, checksum_after_op0);
12538 ht.empty ();
12539
12540 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12541 fold_check_failed (op0, tem);
12542
12543 md5_init_ctx (&ctx);
12544 fold_checksum_tree (op1, &ctx, &ht);
12545 md5_finish_ctx (&ctx, checksum_after_op1);
12546 ht.empty ();
12547
12548 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12549 fold_check_failed (op1, tem);
12550
12551 md5_init_ctx (&ctx);
12552 fold_checksum_tree (op2, &ctx, &ht);
12553 md5_finish_ctx (&ctx, checksum_after_op2);
12554
12555 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12556 fold_check_failed (op2, tem);
12557 #endif
12558 return tem;
12559 }
12560
12561 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
12562 arguments in ARGARRAY, and a null static chain.
12563 Return a folded expression if successful. Otherwise, return a CALL_EXPR
12564 of type TYPE from the given operands as constructed by build_call_array. */
12565
12566 tree
12567 fold_build_call_array_loc (location_t loc, tree type, tree fn,
12568 int nargs, tree *argarray)
12569 {
12570 tree tem;
12571 #ifdef ENABLE_FOLD_CHECKING
12572 unsigned char checksum_before_fn[16],
12573 checksum_before_arglist[16],
12574 checksum_after_fn[16],
12575 checksum_after_arglist[16];
12576 struct md5_ctx ctx;
12577 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12578 int i;
12579
12580 md5_init_ctx (&ctx);
12581 fold_checksum_tree (fn, &ctx, &ht);
12582 md5_finish_ctx (&ctx, checksum_before_fn);
12583 ht.empty ();
12584
12585 md5_init_ctx (&ctx);
12586 for (i = 0; i < nargs; i++)
12587 fold_checksum_tree (argarray[i], &ctx, &ht);
12588 md5_finish_ctx (&ctx, checksum_before_arglist);
12589 ht.empty ();
12590 #endif
12591
12592 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
12593 if (!tem)
12594 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
12595
12596 #ifdef ENABLE_FOLD_CHECKING
12597 md5_init_ctx (&ctx);
12598 fold_checksum_tree (fn, &ctx, &ht);
12599 md5_finish_ctx (&ctx, checksum_after_fn);
12600 ht.empty ();
12601
12602 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
12603 fold_check_failed (fn, tem);
12604
12605 md5_init_ctx (&ctx);
12606 for (i = 0; i < nargs; i++)
12607 fold_checksum_tree (argarray[i], &ctx, &ht);
12608 md5_finish_ctx (&ctx, checksum_after_arglist);
12609
12610 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
12611 fold_check_failed (NULL_TREE, tem);
12612 #endif
12613 return tem;
12614 }
12615
12616 /* Perform constant folding and related simplification of initializer
12617 expression EXPR. These behave identically to "fold_buildN" but ignore
12618 potential run-time traps and exceptions that fold must preserve. */
12619
12620 #define START_FOLD_INIT \
12621 int saved_signaling_nans = flag_signaling_nans;\
12622 int saved_trapping_math = flag_trapping_math;\
12623 int saved_rounding_math = flag_rounding_math;\
12624 int saved_trapv = flag_trapv;\
12625 int saved_folding_initializer = folding_initializer;\
12626 flag_signaling_nans = 0;\
12627 flag_trapping_math = 0;\
12628 flag_rounding_math = 0;\
12629 flag_trapv = 0;\
12630 folding_initializer = 1;
12631
12632 #define END_FOLD_INIT \
12633 flag_signaling_nans = saved_signaling_nans;\
12634 flag_trapping_math = saved_trapping_math;\
12635 flag_rounding_math = saved_rounding_math;\
12636 flag_trapv = saved_trapv;\
12637 folding_initializer = saved_folding_initializer;
12638
12639 tree
12640 fold_build1_initializer_loc (location_t loc, enum tree_code code,
12641 tree type, tree op)
12642 {
12643 tree result;
12644 START_FOLD_INIT;
12645
12646 result = fold_build1_loc (loc, code, type, op);
12647
12648 END_FOLD_INIT;
12649 return result;
12650 }
12651
12652 tree
12653 fold_build2_initializer_loc (location_t loc, enum tree_code code,
12654 tree type, tree op0, tree op1)
12655 {
12656 tree result;
12657 START_FOLD_INIT;
12658
12659 result = fold_build2_loc (loc, code, type, op0, op1);
12660
12661 END_FOLD_INIT;
12662 return result;
12663 }
12664
12665 tree
12666 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
12667 int nargs, tree *argarray)
12668 {
12669 tree result;
12670 START_FOLD_INIT;
12671
12672 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
12673
12674 END_FOLD_INIT;
12675 return result;
12676 }
12677
12678 #undef START_FOLD_INIT
12679 #undef END_FOLD_INIT
12680
12681 /* Determine if first argument is a multiple of second argument. Return 0 if
12682 it is not, or we cannot easily determined it to be.
12683
12684 An example of the sort of thing we care about (at this point; this routine
12685 could surely be made more general, and expanded to do what the *_DIV_EXPR's
12686 fold cases do now) is discovering that
12687
12688 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12689
12690 is a multiple of
12691
12692 SAVE_EXPR (J * 8)
12693
12694 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12695
12696 This code also handles discovering that
12697
12698 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12699
12700 is a multiple of 8 so we don't have to worry about dealing with a
12701 possible remainder.
12702
12703 Note that we *look* inside a SAVE_EXPR only to determine how it was
12704 calculated; it is not safe for fold to do much of anything else with the
12705 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12706 at run time. For example, the latter example above *cannot* be implemented
12707 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12708 evaluation time of the original SAVE_EXPR is not necessarily the same at
12709 the time the new expression is evaluated. The only optimization of this
12710 sort that would be valid is changing
12711
12712 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12713
12714 divided by 8 to
12715
12716 SAVE_EXPR (I) * SAVE_EXPR (J)
12717
12718 (where the same SAVE_EXPR (J) is used in the original and the
12719 transformed version). */
12720
12721 int
12722 multiple_of_p (tree type, const_tree top, const_tree bottom)
12723 {
12724 if (operand_equal_p (top, bottom, 0))
12725 return 1;
12726
12727 if (TREE_CODE (type) != INTEGER_TYPE)
12728 return 0;
12729
12730 switch (TREE_CODE (top))
12731 {
12732 case BIT_AND_EXPR:
12733 /* Bitwise and provides a power of two multiple. If the mask is
12734 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
12735 if (!integer_pow2p (bottom))
12736 return 0;
12737 /* FALLTHRU */
12738
12739 case MULT_EXPR:
12740 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12741 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12742
12743 case PLUS_EXPR:
12744 case MINUS_EXPR:
12745 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12746 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12747
12748 case LSHIFT_EXPR:
12749 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
12750 {
12751 tree op1, t1;
12752
12753 op1 = TREE_OPERAND (top, 1);
12754 /* const_binop may not detect overflow correctly,
12755 so check for it explicitly here. */
12756 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
12757 && 0 != (t1 = fold_convert (type,
12758 const_binop (LSHIFT_EXPR,
12759 size_one_node,
12760 op1)))
12761 && !TREE_OVERFLOW (t1))
12762 return multiple_of_p (type, t1, bottom);
12763 }
12764 return 0;
12765
12766 case NOP_EXPR:
12767 /* Can't handle conversions from non-integral or wider integral type. */
12768 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
12769 || (TYPE_PRECISION (type)
12770 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
12771 return 0;
12772
12773 /* .. fall through ... */
12774
12775 case SAVE_EXPR:
12776 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
12777
12778 case COND_EXPR:
12779 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12780 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
12781
12782 case INTEGER_CST:
12783 if (TREE_CODE (bottom) != INTEGER_CST
12784 || integer_zerop (bottom)
12785 || (TYPE_UNSIGNED (type)
12786 && (tree_int_cst_sgn (top) < 0
12787 || tree_int_cst_sgn (bottom) < 0)))
12788 return 0;
12789 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
12790 SIGNED);
12791
12792 default:
12793 return 0;
12794 }
12795 }
12796
12797 #define tree_expr_nonnegative_warnv_p(X, Y) \
12798 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
12799
12800 #define RECURSE(X) \
12801 ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
12802
12803 /* Return true if CODE or TYPE is known to be non-negative. */
12804
12805 static bool
12806 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
12807 {
12808 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
12809 && truth_value_p (code))
12810 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
12811 have a signed:1 type (where the value is -1 and 0). */
12812 return true;
12813 return false;
12814 }
12815
12816 /* Return true if (CODE OP0) is known to be non-negative. If the return
12817 value is based on the assumption that signed overflow is undefined,
12818 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12819 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12820
12821 bool
12822 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12823 bool *strict_overflow_p, int depth)
12824 {
12825 if (TYPE_UNSIGNED (type))
12826 return true;
12827
12828 switch (code)
12829 {
12830 case ABS_EXPR:
12831 /* We can't return 1 if flag_wrapv is set because
12832 ABS_EXPR<INT_MIN> = INT_MIN. */
12833 if (!ANY_INTEGRAL_TYPE_P (type))
12834 return true;
12835 if (TYPE_OVERFLOW_UNDEFINED (type))
12836 {
12837 *strict_overflow_p = true;
12838 return true;
12839 }
12840 break;
12841
12842 case NON_LVALUE_EXPR:
12843 case FLOAT_EXPR:
12844 case FIX_TRUNC_EXPR:
12845 return RECURSE (op0);
12846
12847 CASE_CONVERT:
12848 {
12849 tree inner_type = TREE_TYPE (op0);
12850 tree outer_type = type;
12851
12852 if (TREE_CODE (outer_type) == REAL_TYPE)
12853 {
12854 if (TREE_CODE (inner_type) == REAL_TYPE)
12855 return RECURSE (op0);
12856 if (INTEGRAL_TYPE_P (inner_type))
12857 {
12858 if (TYPE_UNSIGNED (inner_type))
12859 return true;
12860 return RECURSE (op0);
12861 }
12862 }
12863 else if (INTEGRAL_TYPE_P (outer_type))
12864 {
12865 if (TREE_CODE (inner_type) == REAL_TYPE)
12866 return RECURSE (op0);
12867 if (INTEGRAL_TYPE_P (inner_type))
12868 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
12869 && TYPE_UNSIGNED (inner_type);
12870 }
12871 }
12872 break;
12873
12874 default:
12875 return tree_simple_nonnegative_warnv_p (code, type);
12876 }
12877
12878 /* We don't know sign of `t', so be conservative and return false. */
12879 return false;
12880 }
12881
12882 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
12883 value is based on the assumption that signed overflow is undefined,
12884 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12885 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12886
12887 bool
12888 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12889 tree op1, bool *strict_overflow_p,
12890 int depth)
12891 {
12892 if (TYPE_UNSIGNED (type))
12893 return true;
12894
12895 switch (code)
12896 {
12897 case POINTER_PLUS_EXPR:
12898 case PLUS_EXPR:
12899 if (FLOAT_TYPE_P (type))
12900 return RECURSE (op0) && RECURSE (op1);
12901
12902 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12903 both unsigned and at least 2 bits shorter than the result. */
12904 if (TREE_CODE (type) == INTEGER_TYPE
12905 && TREE_CODE (op0) == NOP_EXPR
12906 && TREE_CODE (op1) == NOP_EXPR)
12907 {
12908 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
12909 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
12910 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12911 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12912 {
12913 unsigned int prec = MAX (TYPE_PRECISION (inner1),
12914 TYPE_PRECISION (inner2)) + 1;
12915 return prec < TYPE_PRECISION (type);
12916 }
12917 }
12918 break;
12919
12920 case MULT_EXPR:
12921 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12922 {
12923 /* x * x is always non-negative for floating point x
12924 or without overflow. */
12925 if (operand_equal_p (op0, op1, 0)
12926 || (RECURSE (op0) && RECURSE (op1)))
12927 {
12928 if (ANY_INTEGRAL_TYPE_P (type)
12929 && TYPE_OVERFLOW_UNDEFINED (type))
12930 *strict_overflow_p = true;
12931 return true;
12932 }
12933 }
12934
12935 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12936 both unsigned and their total bits is shorter than the result. */
12937 if (TREE_CODE (type) == INTEGER_TYPE
12938 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
12939 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
12940 {
12941 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
12942 ? TREE_TYPE (TREE_OPERAND (op0, 0))
12943 : TREE_TYPE (op0);
12944 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
12945 ? TREE_TYPE (TREE_OPERAND (op1, 0))
12946 : TREE_TYPE (op1);
12947
12948 bool unsigned0 = TYPE_UNSIGNED (inner0);
12949 bool unsigned1 = TYPE_UNSIGNED (inner1);
12950
12951 if (TREE_CODE (op0) == INTEGER_CST)
12952 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
12953
12954 if (TREE_CODE (op1) == INTEGER_CST)
12955 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
12956
12957 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
12958 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
12959 {
12960 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
12961 ? tree_int_cst_min_precision (op0, UNSIGNED)
12962 : TYPE_PRECISION (inner0);
12963
12964 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
12965 ? tree_int_cst_min_precision (op1, UNSIGNED)
12966 : TYPE_PRECISION (inner1);
12967
12968 return precision0 + precision1 < TYPE_PRECISION (type);
12969 }
12970 }
12971 return false;
12972
12973 case BIT_AND_EXPR:
12974 case MAX_EXPR:
12975 return RECURSE (op0) || RECURSE (op1);
12976
12977 case BIT_IOR_EXPR:
12978 case BIT_XOR_EXPR:
12979 case MIN_EXPR:
12980 case RDIV_EXPR:
12981 case TRUNC_DIV_EXPR:
12982 case CEIL_DIV_EXPR:
12983 case FLOOR_DIV_EXPR:
12984 case ROUND_DIV_EXPR:
12985 return RECURSE (op0) && RECURSE (op1);
12986
12987 case TRUNC_MOD_EXPR:
12988 return RECURSE (op0);
12989
12990 case FLOOR_MOD_EXPR:
12991 return RECURSE (op1);
12992
12993 case CEIL_MOD_EXPR:
12994 case ROUND_MOD_EXPR:
12995 default:
12996 return tree_simple_nonnegative_warnv_p (code, type);
12997 }
12998
12999 /* We don't know sign of `t', so be conservative and return false. */
13000 return false;
13001 }
13002
13003 /* Return true if T is known to be non-negative. If the return
13004 value is based on the assumption that signed overflow is undefined,
13005 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13006 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13007
13008 bool
13009 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13010 {
13011 if (TYPE_UNSIGNED (TREE_TYPE (t)))
13012 return true;
13013
13014 switch (TREE_CODE (t))
13015 {
13016 case INTEGER_CST:
13017 return tree_int_cst_sgn (t) >= 0;
13018
13019 case REAL_CST:
13020 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
13021
13022 case FIXED_CST:
13023 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
13024
13025 case COND_EXPR:
13026 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
13027
13028 case SSA_NAME:
13029 /* Limit the depth of recursion to avoid quadratic behavior.
13030 This is expected to catch almost all occurrences in practice.
13031 If this code misses important cases that unbounded recursion
13032 would not, passes that need this information could be revised
13033 to provide it through dataflow propagation. */
13034 return (!name_registered_for_update_p (t)
13035 && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
13036 && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
13037 strict_overflow_p, depth));
13038
13039 default:
13040 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
13041 }
13042 }
13043
13044 /* Return true if T is known to be non-negative. If the return
13045 value is based on the assumption that signed overflow is undefined,
13046 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13047 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13048
13049 bool
13050 tree_call_nonnegative_warnv_p (tree type, combined_fn fn, tree arg0, tree arg1,
13051 bool *strict_overflow_p, int depth)
13052 {
13053 switch (fn)
13054 {
13055 CASE_CFN_ACOS:
13056 CASE_CFN_ACOSH:
13057 CASE_CFN_CABS:
13058 CASE_CFN_COSH:
13059 CASE_CFN_ERFC:
13060 CASE_CFN_EXP:
13061 CASE_CFN_EXP10:
13062 CASE_CFN_EXP2:
13063 CASE_CFN_FABS:
13064 CASE_CFN_FDIM:
13065 CASE_CFN_HYPOT:
13066 CASE_CFN_POW10:
13067 CASE_CFN_FFS:
13068 CASE_CFN_PARITY:
13069 CASE_CFN_POPCOUNT:
13070 CASE_CFN_CLZ:
13071 CASE_CFN_CLRSB:
13072 case CFN_BUILT_IN_BSWAP32:
13073 case CFN_BUILT_IN_BSWAP64:
13074 /* Always true. */
13075 return true;
13076
13077 CASE_CFN_SQRT:
13078 /* sqrt(-0.0) is -0.0. */
13079 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
13080 return true;
13081 return RECURSE (arg0);
13082
13083 CASE_CFN_ASINH:
13084 CASE_CFN_ATAN:
13085 CASE_CFN_ATANH:
13086 CASE_CFN_CBRT:
13087 CASE_CFN_CEIL:
13088 CASE_CFN_ERF:
13089 CASE_CFN_EXPM1:
13090 CASE_CFN_FLOOR:
13091 CASE_CFN_FMOD:
13092 CASE_CFN_FREXP:
13093 CASE_CFN_ICEIL:
13094 CASE_CFN_IFLOOR:
13095 CASE_CFN_IRINT:
13096 CASE_CFN_IROUND:
13097 CASE_CFN_LCEIL:
13098 CASE_CFN_LDEXP:
13099 CASE_CFN_LFLOOR:
13100 CASE_CFN_LLCEIL:
13101 CASE_CFN_LLFLOOR:
13102 CASE_CFN_LLRINT:
13103 CASE_CFN_LLROUND:
13104 CASE_CFN_LRINT:
13105 CASE_CFN_LROUND:
13106 CASE_CFN_MODF:
13107 CASE_CFN_NEARBYINT:
13108 CASE_CFN_RINT:
13109 CASE_CFN_ROUND:
13110 CASE_CFN_SCALB:
13111 CASE_CFN_SCALBLN:
13112 CASE_CFN_SCALBN:
13113 CASE_CFN_SIGNBIT:
13114 CASE_CFN_SIGNIFICAND:
13115 CASE_CFN_SINH:
13116 CASE_CFN_TANH:
13117 CASE_CFN_TRUNC:
13118 /* True if the 1st argument is nonnegative. */
13119 return RECURSE (arg0);
13120
13121 CASE_CFN_FMAX:
13122 /* True if the 1st OR 2nd arguments are nonnegative. */
13123 return RECURSE (arg0) || RECURSE (arg1);
13124
13125 CASE_CFN_FMIN:
13126 /* True if the 1st AND 2nd arguments are nonnegative. */
13127 return RECURSE (arg0) && RECURSE (arg1);
13128
13129 CASE_CFN_COPYSIGN:
13130 /* True if the 2nd argument is nonnegative. */
13131 return RECURSE (arg1);
13132
13133 CASE_CFN_POWI:
13134 /* True if the 1st argument is nonnegative or the second
13135 argument is an even integer. */
13136 if (TREE_CODE (arg1) == INTEGER_CST
13137 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
13138 return true;
13139 return RECURSE (arg0);
13140
13141 CASE_CFN_POW:
13142 /* True if the 1st argument is nonnegative or the second
13143 argument is an even integer valued real. */
13144 if (TREE_CODE (arg1) == REAL_CST)
13145 {
13146 REAL_VALUE_TYPE c;
13147 HOST_WIDE_INT n;
13148
13149 c = TREE_REAL_CST (arg1);
13150 n = real_to_integer (&c);
13151 if ((n & 1) == 0)
13152 {
13153 REAL_VALUE_TYPE cint;
13154 real_from_integer (&cint, VOIDmode, n, SIGNED);
13155 if (real_identical (&c, &cint))
13156 return true;
13157 }
13158 }
13159 return RECURSE (arg0);
13160
13161 default:
13162 break;
13163 }
13164 return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
13165 }
13166
13167 /* Return true if T is known to be non-negative. If the return
13168 value is based on the assumption that signed overflow is undefined,
13169 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13170 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13171
13172 static bool
13173 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13174 {
13175 enum tree_code code = TREE_CODE (t);
13176 if (TYPE_UNSIGNED (TREE_TYPE (t)))
13177 return true;
13178
13179 switch (code)
13180 {
13181 case TARGET_EXPR:
13182 {
13183 tree temp = TARGET_EXPR_SLOT (t);
13184 t = TARGET_EXPR_INITIAL (t);
13185
13186 /* If the initializer is non-void, then it's a normal expression
13187 that will be assigned to the slot. */
13188 if (!VOID_TYPE_P (t))
13189 return RECURSE (t);
13190
13191 /* Otherwise, the initializer sets the slot in some way. One common
13192 way is an assignment statement at the end of the initializer. */
13193 while (1)
13194 {
13195 if (TREE_CODE (t) == BIND_EXPR)
13196 t = expr_last (BIND_EXPR_BODY (t));
13197 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
13198 || TREE_CODE (t) == TRY_CATCH_EXPR)
13199 t = expr_last (TREE_OPERAND (t, 0));
13200 else if (TREE_CODE (t) == STATEMENT_LIST)
13201 t = expr_last (t);
13202 else
13203 break;
13204 }
13205 if (TREE_CODE (t) == MODIFY_EXPR
13206 && TREE_OPERAND (t, 0) == temp)
13207 return RECURSE (TREE_OPERAND (t, 1));
13208
13209 return false;
13210 }
13211
13212 case CALL_EXPR:
13213 {
13214 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
13215 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
13216
13217 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
13218 get_call_combined_fn (t),
13219 arg0,
13220 arg1,
13221 strict_overflow_p, depth);
13222 }
13223 case COMPOUND_EXPR:
13224 case MODIFY_EXPR:
13225 return RECURSE (TREE_OPERAND (t, 1));
13226
13227 case BIND_EXPR:
13228 return RECURSE (expr_last (TREE_OPERAND (t, 1)));
13229
13230 case SAVE_EXPR:
13231 return RECURSE (TREE_OPERAND (t, 0));
13232
13233 default:
13234 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
13235 }
13236 }
13237
13238 #undef RECURSE
13239 #undef tree_expr_nonnegative_warnv_p
13240
13241 /* Return true if T is known to be non-negative. If the return
13242 value is based on the assumption that signed overflow is undefined,
13243 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13244 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13245
13246 bool
13247 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13248 {
13249 enum tree_code code;
13250 if (t == error_mark_node)
13251 return false;
13252
13253 code = TREE_CODE (t);
13254 switch (TREE_CODE_CLASS (code))
13255 {
13256 case tcc_binary:
13257 case tcc_comparison:
13258 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13259 TREE_TYPE (t),
13260 TREE_OPERAND (t, 0),
13261 TREE_OPERAND (t, 1),
13262 strict_overflow_p, depth);
13263
13264 case tcc_unary:
13265 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13266 TREE_TYPE (t),
13267 TREE_OPERAND (t, 0),
13268 strict_overflow_p, depth);
13269
13270 case tcc_constant:
13271 case tcc_declaration:
13272 case tcc_reference:
13273 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13274
13275 default:
13276 break;
13277 }
13278
13279 switch (code)
13280 {
13281 case TRUTH_AND_EXPR:
13282 case TRUTH_OR_EXPR:
13283 case TRUTH_XOR_EXPR:
13284 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13285 TREE_TYPE (t),
13286 TREE_OPERAND (t, 0),
13287 TREE_OPERAND (t, 1),
13288 strict_overflow_p, depth);
13289 case TRUTH_NOT_EXPR:
13290 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13291 TREE_TYPE (t),
13292 TREE_OPERAND (t, 0),
13293 strict_overflow_p, depth);
13294
13295 case COND_EXPR:
13296 case CONSTRUCTOR:
13297 case OBJ_TYPE_REF:
13298 case ASSERT_EXPR:
13299 case ADDR_EXPR:
13300 case WITH_SIZE_EXPR:
13301 case SSA_NAME:
13302 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13303
13304 default:
13305 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
13306 }
13307 }
13308
13309 /* Return true if `t' is known to be non-negative. Handle warnings
13310 about undefined signed overflow. */
13311
13312 bool
13313 tree_expr_nonnegative_p (tree t)
13314 {
13315 bool ret, strict_overflow_p;
13316
13317 strict_overflow_p = false;
13318 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
13319 if (strict_overflow_p)
13320 fold_overflow_warning (("assuming signed overflow does not occur when "
13321 "determining that expression is always "
13322 "non-negative"),
13323 WARN_STRICT_OVERFLOW_MISC);
13324 return ret;
13325 }
13326
13327
13328 /* Return true when (CODE OP0) is an address and is known to be nonzero.
13329 For floating point we further ensure that T is not denormal.
13330 Similar logic is present in nonzero_address in rtlanal.h.
13331
13332 If the return value is based on the assumption that signed overflow
13333 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13334 change *STRICT_OVERFLOW_P. */
13335
13336 bool
13337 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
13338 bool *strict_overflow_p)
13339 {
13340 switch (code)
13341 {
13342 case ABS_EXPR:
13343 return tree_expr_nonzero_warnv_p (op0,
13344 strict_overflow_p);
13345
13346 case NOP_EXPR:
13347 {
13348 tree inner_type = TREE_TYPE (op0);
13349 tree outer_type = type;
13350
13351 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
13352 && tree_expr_nonzero_warnv_p (op0,
13353 strict_overflow_p));
13354 }
13355 break;
13356
13357 case NON_LVALUE_EXPR:
13358 return tree_expr_nonzero_warnv_p (op0,
13359 strict_overflow_p);
13360
13361 default:
13362 break;
13363 }
13364
13365 return false;
13366 }
13367
13368 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
13369 For floating point we further ensure that T is not denormal.
13370 Similar logic is present in nonzero_address in rtlanal.h.
13371
13372 If the return value is based on the assumption that signed overflow
13373 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13374 change *STRICT_OVERFLOW_P. */
13375
13376 bool
13377 tree_binary_nonzero_warnv_p (enum tree_code code,
13378 tree type,
13379 tree op0,
13380 tree op1, bool *strict_overflow_p)
13381 {
13382 bool sub_strict_overflow_p;
13383 switch (code)
13384 {
13385 case POINTER_PLUS_EXPR:
13386 case PLUS_EXPR:
13387 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
13388 {
13389 /* With the presence of negative values it is hard
13390 to say something. */
13391 sub_strict_overflow_p = false;
13392 if (!tree_expr_nonnegative_warnv_p (op0,
13393 &sub_strict_overflow_p)
13394 || !tree_expr_nonnegative_warnv_p (op1,
13395 &sub_strict_overflow_p))
13396 return false;
13397 /* One of operands must be positive and the other non-negative. */
13398 /* We don't set *STRICT_OVERFLOW_P here: even if this value
13399 overflows, on a twos-complement machine the sum of two
13400 nonnegative numbers can never be zero. */
13401 return (tree_expr_nonzero_warnv_p (op0,
13402 strict_overflow_p)
13403 || tree_expr_nonzero_warnv_p (op1,
13404 strict_overflow_p));
13405 }
13406 break;
13407
13408 case MULT_EXPR:
13409 if (TYPE_OVERFLOW_UNDEFINED (type))
13410 {
13411 if (tree_expr_nonzero_warnv_p (op0,
13412 strict_overflow_p)
13413 && tree_expr_nonzero_warnv_p (op1,
13414 strict_overflow_p))
13415 {
13416 *strict_overflow_p = true;
13417 return true;
13418 }
13419 }
13420 break;
13421
13422 case MIN_EXPR:
13423 sub_strict_overflow_p = false;
13424 if (tree_expr_nonzero_warnv_p (op0,
13425 &sub_strict_overflow_p)
13426 && tree_expr_nonzero_warnv_p (op1,
13427 &sub_strict_overflow_p))
13428 {
13429 if (sub_strict_overflow_p)
13430 *strict_overflow_p = true;
13431 }
13432 break;
13433
13434 case MAX_EXPR:
13435 sub_strict_overflow_p = false;
13436 if (tree_expr_nonzero_warnv_p (op0,
13437 &sub_strict_overflow_p))
13438 {
13439 if (sub_strict_overflow_p)
13440 *strict_overflow_p = true;
13441
13442 /* When both operands are nonzero, then MAX must be too. */
13443 if (tree_expr_nonzero_warnv_p (op1,
13444 strict_overflow_p))
13445 return true;
13446
13447 /* MAX where operand 0 is positive is positive. */
13448 return tree_expr_nonnegative_warnv_p (op0,
13449 strict_overflow_p);
13450 }
13451 /* MAX where operand 1 is positive is positive. */
13452 else if (tree_expr_nonzero_warnv_p (op1,
13453 &sub_strict_overflow_p)
13454 && tree_expr_nonnegative_warnv_p (op1,
13455 &sub_strict_overflow_p))
13456 {
13457 if (sub_strict_overflow_p)
13458 *strict_overflow_p = true;
13459 return true;
13460 }
13461 break;
13462
13463 case BIT_IOR_EXPR:
13464 return (tree_expr_nonzero_warnv_p (op1,
13465 strict_overflow_p)
13466 || tree_expr_nonzero_warnv_p (op0,
13467 strict_overflow_p));
13468
13469 default:
13470 break;
13471 }
13472
13473 return false;
13474 }
13475
13476 /* Return true when T is an address and is known to be nonzero.
13477 For floating point we further ensure that T is not denormal.
13478 Similar logic is present in nonzero_address in rtlanal.h.
13479
13480 If the return value is based on the assumption that signed overflow
13481 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13482 change *STRICT_OVERFLOW_P. */
13483
13484 bool
13485 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
13486 {
13487 bool sub_strict_overflow_p;
13488 switch (TREE_CODE (t))
13489 {
13490 case INTEGER_CST:
13491 return !integer_zerop (t);
13492
13493 case ADDR_EXPR:
13494 {
13495 tree base = TREE_OPERAND (t, 0);
13496
13497 if (!DECL_P (base))
13498 base = get_base_address (base);
13499
13500 if (!base)
13501 return false;
13502
13503 /* For objects in symbol table check if we know they are non-zero.
13504 Don't do anything for variables and functions before symtab is built;
13505 it is quite possible that they will be declared weak later. */
13506 if (DECL_P (base) && decl_in_symtab_p (base))
13507 {
13508 struct symtab_node *symbol;
13509
13510 symbol = symtab_node::get_create (base);
13511 if (symbol)
13512 return symbol->nonzero_address ();
13513 else
13514 return false;
13515 }
13516
13517 /* Function local objects are never NULL. */
13518 if (DECL_P (base)
13519 && (DECL_CONTEXT (base)
13520 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
13521 && auto_var_in_fn_p (base, DECL_CONTEXT (base))))
13522 return true;
13523
13524 /* Constants are never weak. */
13525 if (CONSTANT_CLASS_P (base))
13526 return true;
13527
13528 return false;
13529 }
13530
13531 case COND_EXPR:
13532 sub_strict_overflow_p = false;
13533 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13534 &sub_strict_overflow_p)
13535 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
13536 &sub_strict_overflow_p))
13537 {
13538 if (sub_strict_overflow_p)
13539 *strict_overflow_p = true;
13540 return true;
13541 }
13542 break;
13543
13544 default:
13545 break;
13546 }
13547 return false;
13548 }
13549
13550 #define integer_valued_real_p(X) \
13551 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
13552
13553 #define RECURSE(X) \
13554 ((integer_valued_real_p) (X, depth + 1))
13555
13556 /* Return true if the floating point result of (CODE OP0) has an
13557 integer value. We also allow +Inf, -Inf and NaN to be considered
13558 integer values. Return false for signaling NaN.
13559
13560 DEPTH is the current nesting depth of the query. */
13561
13562 bool
13563 integer_valued_real_unary_p (tree_code code, tree op0, int depth)
13564 {
13565 switch (code)
13566 {
13567 case FLOAT_EXPR:
13568 return true;
13569
13570 case ABS_EXPR:
13571 return RECURSE (op0);
13572
13573 CASE_CONVERT:
13574 {
13575 tree type = TREE_TYPE (op0);
13576 if (TREE_CODE (type) == INTEGER_TYPE)
13577 return true;
13578 if (TREE_CODE (type) == REAL_TYPE)
13579 return RECURSE (op0);
13580 break;
13581 }
13582
13583 default:
13584 break;
13585 }
13586 return false;
13587 }
13588
13589 /* Return true if the floating point result of (CODE OP0 OP1) has an
13590 integer value. We also allow +Inf, -Inf and NaN to be considered
13591 integer values. Return false for signaling NaN.
13592
13593 DEPTH is the current nesting depth of the query. */
13594
13595 bool
13596 integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth)
13597 {
13598 switch (code)
13599 {
13600 case PLUS_EXPR:
13601 case MINUS_EXPR:
13602 case MULT_EXPR:
13603 case MIN_EXPR:
13604 case MAX_EXPR:
13605 return RECURSE (op0) && RECURSE (op1);
13606
13607 default:
13608 break;
13609 }
13610 return false;
13611 }
13612
13613 /* Return true if the floating point result of calling FNDECL with arguments
13614 ARG0 and ARG1 has an integer value. We also allow +Inf, -Inf and NaN to be
13615 considered integer values. Return false for signaling NaN. If FNDECL
13616 takes fewer than 2 arguments, the remaining ARGn are null.
13617
13618 DEPTH is the current nesting depth of the query. */
13619
13620 bool
13621 integer_valued_real_call_p (combined_fn fn, tree arg0, tree arg1, int depth)
13622 {
13623 switch (fn)
13624 {
13625 CASE_CFN_CEIL:
13626 CASE_CFN_FLOOR:
13627 CASE_CFN_NEARBYINT:
13628 CASE_CFN_RINT:
13629 CASE_CFN_ROUND:
13630 CASE_CFN_TRUNC:
13631 return true;
13632
13633 CASE_CFN_FMIN:
13634 CASE_CFN_FMAX:
13635 return RECURSE (arg0) && RECURSE (arg1);
13636
13637 default:
13638 break;
13639 }
13640 return false;
13641 }
13642
13643 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
13644 has an integer value. We also allow +Inf, -Inf and NaN to be
13645 considered integer values. Return false for signaling NaN.
13646
13647 DEPTH is the current nesting depth of the query. */
13648
13649 bool
13650 integer_valued_real_single_p (tree t, int depth)
13651 {
13652 switch (TREE_CODE (t))
13653 {
13654 case REAL_CST:
13655 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
13656
13657 case COND_EXPR:
13658 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
13659
13660 case SSA_NAME:
13661 /* Limit the depth of recursion to avoid quadratic behavior.
13662 This is expected to catch almost all occurrences in practice.
13663 If this code misses important cases that unbounded recursion
13664 would not, passes that need this information could be revised
13665 to provide it through dataflow propagation. */
13666 return (!name_registered_for_update_p (t)
13667 && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
13668 && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t),
13669 depth));
13670
13671 default:
13672 break;
13673 }
13674 return false;
13675 }
13676
13677 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
13678 has an integer value. We also allow +Inf, -Inf and NaN to be
13679 considered integer values. Return false for signaling NaN.
13680
13681 DEPTH is the current nesting depth of the query. */
13682
13683 static bool
13684 integer_valued_real_invalid_p (tree t, int depth)
13685 {
13686 switch (TREE_CODE (t))
13687 {
13688 case COMPOUND_EXPR:
13689 case MODIFY_EXPR:
13690 case BIND_EXPR:
13691 return RECURSE (TREE_OPERAND (t, 1));
13692
13693 case SAVE_EXPR:
13694 return RECURSE (TREE_OPERAND (t, 0));
13695
13696 default:
13697 break;
13698 }
13699 return false;
13700 }
13701
13702 #undef RECURSE
13703 #undef integer_valued_real_p
13704
13705 /* Return true if the floating point expression T has an integer value.
13706 We also allow +Inf, -Inf and NaN to be considered integer values.
13707 Return false for signaling NaN.
13708
13709 DEPTH is the current nesting depth of the query. */
13710
13711 bool
13712 integer_valued_real_p (tree t, int depth)
13713 {
13714 if (t == error_mark_node)
13715 return false;
13716
13717 tree_code code = TREE_CODE (t);
13718 switch (TREE_CODE_CLASS (code))
13719 {
13720 case tcc_binary:
13721 case tcc_comparison:
13722 return integer_valued_real_binary_p (code, TREE_OPERAND (t, 0),
13723 TREE_OPERAND (t, 1), depth);
13724
13725 case tcc_unary:
13726 return integer_valued_real_unary_p (code, TREE_OPERAND (t, 0), depth);
13727
13728 case tcc_constant:
13729 case tcc_declaration:
13730 case tcc_reference:
13731 return integer_valued_real_single_p (t, depth);
13732
13733 default:
13734 break;
13735 }
13736
13737 switch (code)
13738 {
13739 case COND_EXPR:
13740 case SSA_NAME:
13741 return integer_valued_real_single_p (t, depth);
13742
13743 case CALL_EXPR:
13744 {
13745 tree arg0 = (call_expr_nargs (t) > 0
13746 ? CALL_EXPR_ARG (t, 0)
13747 : NULL_TREE);
13748 tree arg1 = (call_expr_nargs (t) > 1
13749 ? CALL_EXPR_ARG (t, 1)
13750 : NULL_TREE);
13751 return integer_valued_real_call_p (get_call_combined_fn (t),
13752 arg0, arg1, depth);
13753 }
13754
13755 default:
13756 return integer_valued_real_invalid_p (t, depth);
13757 }
13758 }
13759
13760 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
13761 attempt to fold the expression to a constant without modifying TYPE,
13762 OP0 or OP1.
13763
13764 If the expression could be simplified to a constant, then return
13765 the constant. If the expression would not be simplified to a
13766 constant, then return NULL_TREE. */
13767
13768 tree
13769 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
13770 {
13771 tree tem = fold_binary (code, type, op0, op1);
13772 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13773 }
13774
13775 /* Given the components of a unary expression CODE, TYPE and OP0,
13776 attempt to fold the expression to a constant without modifying
13777 TYPE or OP0.
13778
13779 If the expression could be simplified to a constant, then return
13780 the constant. If the expression would not be simplified to a
13781 constant, then return NULL_TREE. */
13782
13783 tree
13784 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
13785 {
13786 tree tem = fold_unary (code, type, op0);
13787 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13788 }
13789
13790 /* If EXP represents referencing an element in a constant string
13791 (either via pointer arithmetic or array indexing), return the
13792 tree representing the value accessed, otherwise return NULL. */
13793
13794 tree
13795 fold_read_from_constant_string (tree exp)
13796 {
13797 if ((TREE_CODE (exp) == INDIRECT_REF
13798 || TREE_CODE (exp) == ARRAY_REF)
13799 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
13800 {
13801 tree exp1 = TREE_OPERAND (exp, 0);
13802 tree index;
13803 tree string;
13804 location_t loc = EXPR_LOCATION (exp);
13805
13806 if (TREE_CODE (exp) == INDIRECT_REF)
13807 string = string_constant (exp1, &index);
13808 else
13809 {
13810 tree low_bound = array_ref_low_bound (exp);
13811 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
13812
13813 /* Optimize the special-case of a zero lower bound.
13814
13815 We convert the low_bound to sizetype to avoid some problems
13816 with constant folding. (E.g. suppose the lower bound is 1,
13817 and its mode is QI. Without the conversion,l (ARRAY
13818 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
13819 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
13820 if (! integer_zerop (low_bound))
13821 index = size_diffop_loc (loc, index,
13822 fold_convert_loc (loc, sizetype, low_bound));
13823
13824 string = exp1;
13825 }
13826
13827 if (string
13828 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
13829 && TREE_CODE (string) == STRING_CST
13830 && TREE_CODE (index) == INTEGER_CST
13831 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
13832 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
13833 == MODE_INT)
13834 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
13835 return build_int_cst_type (TREE_TYPE (exp),
13836 (TREE_STRING_POINTER (string)
13837 [TREE_INT_CST_LOW (index)]));
13838 }
13839 return NULL;
13840 }
13841
13842 /* Return the tree for neg (ARG0) when ARG0 is known to be either
13843 an integer constant, real, or fixed-point constant.
13844
13845 TYPE is the type of the result. */
13846
13847 static tree
13848 fold_negate_const (tree arg0, tree type)
13849 {
13850 tree t = NULL_TREE;
13851
13852 switch (TREE_CODE (arg0))
13853 {
13854 case INTEGER_CST:
13855 {
13856 bool overflow;
13857 wide_int val = wi::neg (arg0, &overflow);
13858 t = force_fit_type (type, val, 1,
13859 (overflow | TREE_OVERFLOW (arg0))
13860 && !TYPE_UNSIGNED (type));
13861 break;
13862 }
13863
13864 case REAL_CST:
13865 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13866 break;
13867
13868 case FIXED_CST:
13869 {
13870 FIXED_VALUE_TYPE f;
13871 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
13872 &(TREE_FIXED_CST (arg0)), NULL,
13873 TYPE_SATURATING (type));
13874 t = build_fixed (type, f);
13875 /* Propagate overflow flags. */
13876 if (overflow_p | TREE_OVERFLOW (arg0))
13877 TREE_OVERFLOW (t) = 1;
13878 break;
13879 }
13880
13881 default:
13882 gcc_unreachable ();
13883 }
13884
13885 return t;
13886 }
13887
13888 /* Return the tree for abs (ARG0) when ARG0 is known to be either
13889 an integer constant or real constant.
13890
13891 TYPE is the type of the result. */
13892
13893 tree
13894 fold_abs_const (tree arg0, tree type)
13895 {
13896 tree t = NULL_TREE;
13897
13898 switch (TREE_CODE (arg0))
13899 {
13900 case INTEGER_CST:
13901 {
13902 /* If the value is unsigned or non-negative, then the absolute value
13903 is the same as the ordinary value. */
13904 if (!wi::neg_p (arg0, TYPE_SIGN (type)))
13905 t = arg0;
13906
13907 /* If the value is negative, then the absolute value is
13908 its negation. */
13909 else
13910 {
13911 bool overflow;
13912 wide_int val = wi::neg (arg0, &overflow);
13913 t = force_fit_type (type, val, -1,
13914 overflow | TREE_OVERFLOW (arg0));
13915 }
13916 }
13917 break;
13918
13919 case REAL_CST:
13920 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
13921 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13922 else
13923 t = arg0;
13924 break;
13925
13926 default:
13927 gcc_unreachable ();
13928 }
13929
13930 return t;
13931 }
13932
13933 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13934 constant. TYPE is the type of the result. */
13935
13936 static tree
13937 fold_not_const (const_tree arg0, tree type)
13938 {
13939 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
13940
13941 return force_fit_type (type, wi::bit_not (arg0), 0, TREE_OVERFLOW (arg0));
13942 }
13943
13944 /* Given CODE, a relational operator, the target type, TYPE and two
13945 constant operands OP0 and OP1, return the result of the
13946 relational operation. If the result is not a compile time
13947 constant, then return NULL_TREE. */
13948
13949 static tree
13950 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
13951 {
13952 int result, invert;
13953
13954 /* From here on, the only cases we handle are when the result is
13955 known to be a constant. */
13956
13957 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
13958 {
13959 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
13960 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
13961
13962 /* Handle the cases where either operand is a NaN. */
13963 if (real_isnan (c0) || real_isnan (c1))
13964 {
13965 switch (code)
13966 {
13967 case EQ_EXPR:
13968 case ORDERED_EXPR:
13969 result = 0;
13970 break;
13971
13972 case NE_EXPR:
13973 case UNORDERED_EXPR:
13974 case UNLT_EXPR:
13975 case UNLE_EXPR:
13976 case UNGT_EXPR:
13977 case UNGE_EXPR:
13978 case UNEQ_EXPR:
13979 result = 1;
13980 break;
13981
13982 case LT_EXPR:
13983 case LE_EXPR:
13984 case GT_EXPR:
13985 case GE_EXPR:
13986 case LTGT_EXPR:
13987 if (flag_trapping_math)
13988 return NULL_TREE;
13989 result = 0;
13990 break;
13991
13992 default:
13993 gcc_unreachable ();
13994 }
13995
13996 return constant_boolean_node (result, type);
13997 }
13998
13999 return constant_boolean_node (real_compare (code, c0, c1), type);
14000 }
14001
14002 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
14003 {
14004 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
14005 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
14006 return constant_boolean_node (fixed_compare (code, c0, c1), type);
14007 }
14008
14009 /* Handle equality/inequality of complex constants. */
14010 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
14011 {
14012 tree rcond = fold_relational_const (code, type,
14013 TREE_REALPART (op0),
14014 TREE_REALPART (op1));
14015 tree icond = fold_relational_const (code, type,
14016 TREE_IMAGPART (op0),
14017 TREE_IMAGPART (op1));
14018 if (code == EQ_EXPR)
14019 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
14020 else if (code == NE_EXPR)
14021 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
14022 else
14023 return NULL_TREE;
14024 }
14025
14026 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
14027 {
14028 if (!VECTOR_TYPE_P (type))
14029 {
14030 /* Have vector comparison with scalar boolean result. */
14031 bool result = true;
14032 gcc_assert ((code == EQ_EXPR || code == NE_EXPR)
14033 && VECTOR_CST_NELTS (op0) == VECTOR_CST_NELTS (op1));
14034 for (unsigned i = 0; i < VECTOR_CST_NELTS (op0); i++)
14035 {
14036 tree elem0 = VECTOR_CST_ELT (op0, i);
14037 tree elem1 = VECTOR_CST_ELT (op1, i);
14038 tree tmp = fold_relational_const (code, type, elem0, elem1);
14039 result &= integer_onep (tmp);
14040 }
14041 if (code == NE_EXPR)
14042 result = !result;
14043 return constant_boolean_node (result, type);
14044 }
14045 unsigned count = VECTOR_CST_NELTS (op0);
14046 tree *elts = XALLOCAVEC (tree, count);
14047 gcc_assert (VECTOR_CST_NELTS (op1) == count
14048 && TYPE_VECTOR_SUBPARTS (type) == count);
14049
14050 for (unsigned i = 0; i < count; i++)
14051 {
14052 tree elem_type = TREE_TYPE (type);
14053 tree elem0 = VECTOR_CST_ELT (op0, i);
14054 tree elem1 = VECTOR_CST_ELT (op1, i);
14055
14056 tree tem = fold_relational_const (code, elem_type,
14057 elem0, elem1);
14058
14059 if (tem == NULL_TREE)
14060 return NULL_TREE;
14061
14062 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
14063 }
14064
14065 return build_vector (type, elts);
14066 }
14067
14068 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
14069
14070 To compute GT, swap the arguments and do LT.
14071 To compute GE, do LT and invert the result.
14072 To compute LE, swap the arguments, do LT and invert the result.
14073 To compute NE, do EQ and invert the result.
14074
14075 Therefore, the code below must handle only EQ and LT. */
14076
14077 if (code == LE_EXPR || code == GT_EXPR)
14078 {
14079 std::swap (op0, op1);
14080 code = swap_tree_comparison (code);
14081 }
14082
14083 /* Note that it is safe to invert for real values here because we
14084 have already handled the one case that it matters. */
14085
14086 invert = 0;
14087 if (code == NE_EXPR || code == GE_EXPR)
14088 {
14089 invert = 1;
14090 code = invert_tree_comparison (code, false);
14091 }
14092
14093 /* Compute a result for LT or EQ if args permit;
14094 Otherwise return T. */
14095 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
14096 {
14097 if (code == EQ_EXPR)
14098 result = tree_int_cst_equal (op0, op1);
14099 else
14100 result = tree_int_cst_lt (op0, op1);
14101 }
14102 else
14103 return NULL_TREE;
14104
14105 if (invert)
14106 result ^= 1;
14107 return constant_boolean_node (result, type);
14108 }
14109
14110 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
14111 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
14112 itself. */
14113
14114 tree
14115 fold_build_cleanup_point_expr (tree type, tree expr)
14116 {
14117 /* If the expression does not have side effects then we don't have to wrap
14118 it with a cleanup point expression. */
14119 if (!TREE_SIDE_EFFECTS (expr))
14120 return expr;
14121
14122 /* If the expression is a return, check to see if the expression inside the
14123 return has no side effects or the right hand side of the modify expression
14124 inside the return. If either don't have side effects set we don't need to
14125 wrap the expression in a cleanup point expression. Note we don't check the
14126 left hand side of the modify because it should always be a return decl. */
14127 if (TREE_CODE (expr) == RETURN_EXPR)
14128 {
14129 tree op = TREE_OPERAND (expr, 0);
14130 if (!op || !TREE_SIDE_EFFECTS (op))
14131 return expr;
14132 op = TREE_OPERAND (op, 1);
14133 if (!TREE_SIDE_EFFECTS (op))
14134 return expr;
14135 }
14136
14137 return build1 (CLEANUP_POINT_EXPR, type, expr);
14138 }
14139
14140 /* Given a pointer value OP0 and a type TYPE, return a simplified version
14141 of an indirection through OP0, or NULL_TREE if no simplification is
14142 possible. */
14143
14144 tree
14145 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
14146 {
14147 tree sub = op0;
14148 tree subtype;
14149
14150 STRIP_NOPS (sub);
14151 subtype = TREE_TYPE (sub);
14152 if (!POINTER_TYPE_P (subtype))
14153 return NULL_TREE;
14154
14155 if (TREE_CODE (sub) == ADDR_EXPR)
14156 {
14157 tree op = TREE_OPERAND (sub, 0);
14158 tree optype = TREE_TYPE (op);
14159 /* *&CONST_DECL -> to the value of the const decl. */
14160 if (TREE_CODE (op) == CONST_DECL)
14161 return DECL_INITIAL (op);
14162 /* *&p => p; make sure to handle *&"str"[cst] here. */
14163 if (type == optype)
14164 {
14165 tree fop = fold_read_from_constant_string (op);
14166 if (fop)
14167 return fop;
14168 else
14169 return op;
14170 }
14171 /* *(foo *)&fooarray => fooarray[0] */
14172 else if (TREE_CODE (optype) == ARRAY_TYPE
14173 && type == TREE_TYPE (optype)
14174 && (!in_gimple_form
14175 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14176 {
14177 tree type_domain = TYPE_DOMAIN (optype);
14178 tree min_val = size_zero_node;
14179 if (type_domain && TYPE_MIN_VALUE (type_domain))
14180 min_val = TYPE_MIN_VALUE (type_domain);
14181 if (in_gimple_form
14182 && TREE_CODE (min_val) != INTEGER_CST)
14183 return NULL_TREE;
14184 return build4_loc (loc, ARRAY_REF, type, op, min_val,
14185 NULL_TREE, NULL_TREE);
14186 }
14187 /* *(foo *)&complexfoo => __real__ complexfoo */
14188 else if (TREE_CODE (optype) == COMPLEX_TYPE
14189 && type == TREE_TYPE (optype))
14190 return fold_build1_loc (loc, REALPART_EXPR, type, op);
14191 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
14192 else if (TREE_CODE (optype) == VECTOR_TYPE
14193 && type == TREE_TYPE (optype))
14194 {
14195 tree part_width = TYPE_SIZE (type);
14196 tree index = bitsize_int (0);
14197 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
14198 }
14199 }
14200
14201 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
14202 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
14203 {
14204 tree op00 = TREE_OPERAND (sub, 0);
14205 tree op01 = TREE_OPERAND (sub, 1);
14206
14207 STRIP_NOPS (op00);
14208 if (TREE_CODE (op00) == ADDR_EXPR)
14209 {
14210 tree op00type;
14211 op00 = TREE_OPERAND (op00, 0);
14212 op00type = TREE_TYPE (op00);
14213
14214 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
14215 if (TREE_CODE (op00type) == VECTOR_TYPE
14216 && type == TREE_TYPE (op00type))
14217 {
14218 HOST_WIDE_INT offset = tree_to_shwi (op01);
14219 tree part_width = TYPE_SIZE (type);
14220 unsigned HOST_WIDE_INT part_widthi = tree_to_shwi (part_width)/BITS_PER_UNIT;
14221 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
14222 tree index = bitsize_int (indexi);
14223
14224 if (offset / part_widthi < TYPE_VECTOR_SUBPARTS (op00type))
14225 return fold_build3_loc (loc,
14226 BIT_FIELD_REF, type, op00,
14227 part_width, index);
14228
14229 }
14230 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
14231 else if (TREE_CODE (op00type) == COMPLEX_TYPE
14232 && type == TREE_TYPE (op00type))
14233 {
14234 tree size = TYPE_SIZE_UNIT (type);
14235 if (tree_int_cst_equal (size, op01))
14236 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
14237 }
14238 /* ((foo *)&fooarray)[1] => fooarray[1] */
14239 else if (TREE_CODE (op00type) == ARRAY_TYPE
14240 && type == TREE_TYPE (op00type))
14241 {
14242 tree type_domain = TYPE_DOMAIN (op00type);
14243 tree min_val = size_zero_node;
14244 if (type_domain && TYPE_MIN_VALUE (type_domain))
14245 min_val = TYPE_MIN_VALUE (type_domain);
14246 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
14247 TYPE_SIZE_UNIT (type));
14248 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
14249 return build4_loc (loc, ARRAY_REF, type, op00, op01,
14250 NULL_TREE, NULL_TREE);
14251 }
14252 }
14253 }
14254
14255 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
14256 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
14257 && type == TREE_TYPE (TREE_TYPE (subtype))
14258 && (!in_gimple_form
14259 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14260 {
14261 tree type_domain;
14262 tree min_val = size_zero_node;
14263 sub = build_fold_indirect_ref_loc (loc, sub);
14264 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
14265 if (type_domain && TYPE_MIN_VALUE (type_domain))
14266 min_val = TYPE_MIN_VALUE (type_domain);
14267 if (in_gimple_form
14268 && TREE_CODE (min_val) != INTEGER_CST)
14269 return NULL_TREE;
14270 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
14271 NULL_TREE);
14272 }
14273
14274 return NULL_TREE;
14275 }
14276
14277 /* Builds an expression for an indirection through T, simplifying some
14278 cases. */
14279
14280 tree
14281 build_fold_indirect_ref_loc (location_t loc, tree t)
14282 {
14283 tree type = TREE_TYPE (TREE_TYPE (t));
14284 tree sub = fold_indirect_ref_1 (loc, type, t);
14285
14286 if (sub)
14287 return sub;
14288
14289 return build1_loc (loc, INDIRECT_REF, type, t);
14290 }
14291
14292 /* Given an INDIRECT_REF T, return either T or a simplified version. */
14293
14294 tree
14295 fold_indirect_ref_loc (location_t loc, tree t)
14296 {
14297 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
14298
14299 if (sub)
14300 return sub;
14301 else
14302 return t;
14303 }
14304
14305 /* Strip non-trapping, non-side-effecting tree nodes from an expression
14306 whose result is ignored. The type of the returned tree need not be
14307 the same as the original expression. */
14308
14309 tree
14310 fold_ignored_result (tree t)
14311 {
14312 if (!TREE_SIDE_EFFECTS (t))
14313 return integer_zero_node;
14314
14315 for (;;)
14316 switch (TREE_CODE_CLASS (TREE_CODE (t)))
14317 {
14318 case tcc_unary:
14319 t = TREE_OPERAND (t, 0);
14320 break;
14321
14322 case tcc_binary:
14323 case tcc_comparison:
14324 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14325 t = TREE_OPERAND (t, 0);
14326 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
14327 t = TREE_OPERAND (t, 1);
14328 else
14329 return t;
14330 break;
14331
14332 case tcc_expression:
14333 switch (TREE_CODE (t))
14334 {
14335 case COMPOUND_EXPR:
14336 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14337 return t;
14338 t = TREE_OPERAND (t, 0);
14339 break;
14340
14341 case COND_EXPR:
14342 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
14343 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
14344 return t;
14345 t = TREE_OPERAND (t, 0);
14346 break;
14347
14348 default:
14349 return t;
14350 }
14351 break;
14352
14353 default:
14354 return t;
14355 }
14356 }
14357
14358 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
14359
14360 tree
14361 round_up_loc (location_t loc, tree value, unsigned int divisor)
14362 {
14363 tree div = NULL_TREE;
14364
14365 if (divisor == 1)
14366 return value;
14367
14368 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14369 have to do anything. Only do this when we are not given a const,
14370 because in that case, this check is more expensive than just
14371 doing it. */
14372 if (TREE_CODE (value) != INTEGER_CST)
14373 {
14374 div = build_int_cst (TREE_TYPE (value), divisor);
14375
14376 if (multiple_of_p (TREE_TYPE (value), value, div))
14377 return value;
14378 }
14379
14380 /* If divisor is a power of two, simplify this to bit manipulation. */
14381 if (divisor == (divisor & -divisor))
14382 {
14383 if (TREE_CODE (value) == INTEGER_CST)
14384 {
14385 wide_int val = value;
14386 bool overflow_p;
14387
14388 if ((val & (divisor - 1)) == 0)
14389 return value;
14390
14391 overflow_p = TREE_OVERFLOW (value);
14392 val += divisor - 1;
14393 val &= - (int) divisor;
14394 if (val == 0)
14395 overflow_p = true;
14396
14397 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
14398 }
14399 else
14400 {
14401 tree t;
14402
14403 t = build_int_cst (TREE_TYPE (value), divisor - 1);
14404 value = size_binop_loc (loc, PLUS_EXPR, value, t);
14405 t = build_int_cst (TREE_TYPE (value), - (int) divisor);
14406 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14407 }
14408 }
14409 else
14410 {
14411 if (!div)
14412 div = build_int_cst (TREE_TYPE (value), divisor);
14413 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
14414 value = size_binop_loc (loc, MULT_EXPR, value, div);
14415 }
14416
14417 return value;
14418 }
14419
14420 /* Likewise, but round down. */
14421
14422 tree
14423 round_down_loc (location_t loc, tree value, int divisor)
14424 {
14425 tree div = NULL_TREE;
14426
14427 gcc_assert (divisor > 0);
14428 if (divisor == 1)
14429 return value;
14430
14431 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14432 have to do anything. Only do this when we are not given a const,
14433 because in that case, this check is more expensive than just
14434 doing it. */
14435 if (TREE_CODE (value) != INTEGER_CST)
14436 {
14437 div = build_int_cst (TREE_TYPE (value), divisor);
14438
14439 if (multiple_of_p (TREE_TYPE (value), value, div))
14440 return value;
14441 }
14442
14443 /* If divisor is a power of two, simplify this to bit manipulation. */
14444 if (divisor == (divisor & -divisor))
14445 {
14446 tree t;
14447
14448 t = build_int_cst (TREE_TYPE (value), -divisor);
14449 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14450 }
14451 else
14452 {
14453 if (!div)
14454 div = build_int_cst (TREE_TYPE (value), divisor);
14455 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
14456 value = size_binop_loc (loc, MULT_EXPR, value, div);
14457 }
14458
14459 return value;
14460 }
14461
14462 /* Returns the pointer to the base of the object addressed by EXP and
14463 extracts the information about the offset of the access, storing it
14464 to PBITPOS and POFFSET. */
14465
14466 static tree
14467 split_address_to_core_and_offset (tree exp,
14468 HOST_WIDE_INT *pbitpos, tree *poffset)
14469 {
14470 tree core;
14471 machine_mode mode;
14472 int unsignedp, reversep, volatilep;
14473 HOST_WIDE_INT bitsize;
14474 location_t loc = EXPR_LOCATION (exp);
14475
14476 if (TREE_CODE (exp) == ADDR_EXPR)
14477 {
14478 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
14479 poffset, &mode, &unsignedp, &reversep,
14480 &volatilep, false);
14481 core = build_fold_addr_expr_loc (loc, core);
14482 }
14483 else
14484 {
14485 core = exp;
14486 *pbitpos = 0;
14487 *poffset = NULL_TREE;
14488 }
14489
14490 return core;
14491 }
14492
14493 /* Returns true if addresses of E1 and E2 differ by a constant, false
14494 otherwise. If they do, E1 - E2 is stored in *DIFF. */
14495
14496 bool
14497 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
14498 {
14499 tree core1, core2;
14500 HOST_WIDE_INT bitpos1, bitpos2;
14501 tree toffset1, toffset2, tdiff, type;
14502
14503 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
14504 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
14505
14506 if (bitpos1 % BITS_PER_UNIT != 0
14507 || bitpos2 % BITS_PER_UNIT != 0
14508 || !operand_equal_p (core1, core2, 0))
14509 return false;
14510
14511 if (toffset1 && toffset2)
14512 {
14513 type = TREE_TYPE (toffset1);
14514 if (type != TREE_TYPE (toffset2))
14515 toffset2 = fold_convert (type, toffset2);
14516
14517 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
14518 if (!cst_and_fits_in_hwi (tdiff))
14519 return false;
14520
14521 *diff = int_cst_value (tdiff);
14522 }
14523 else if (toffset1 || toffset2)
14524 {
14525 /* If only one of the offsets is non-constant, the difference cannot
14526 be a constant. */
14527 return false;
14528 }
14529 else
14530 *diff = 0;
14531
14532 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
14533 return true;
14534 }
14535
14536 /* Return OFF converted to a pointer offset type suitable as offset for
14537 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
14538 tree
14539 convert_to_ptrofftype_loc (location_t loc, tree off)
14540 {
14541 return fold_convert_loc (loc, sizetype, off);
14542 }
14543
14544 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14545 tree
14546 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
14547 {
14548 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14549 ptr, convert_to_ptrofftype_loc (loc, off));
14550 }
14551
14552 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14553 tree
14554 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
14555 {
14556 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14557 ptr, size_int (off));
14558 }
14559
14560 /* Return a char pointer for a C string if it is a string constant
14561 or sum of string constant and integer constant. */
14562
14563 const char *
14564 c_getstr (tree src)
14565 {
14566 tree offset_node;
14567
14568 src = string_constant (src, &offset_node);
14569 if (src == 0)
14570 return 0;
14571
14572 if (offset_node == 0)
14573 return TREE_STRING_POINTER (src);
14574 else if (!tree_fits_uhwi_p (offset_node)
14575 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
14576 return 0;
14577
14578 return TREE_STRING_POINTER (src) + tree_to_uhwi (offset_node);
14579 }