re PR middle-end/68528 ([5 Only] Wrong constant folding)
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
27
28 /* The entry points in this file are fold, size_int_wide and size_binop.
29
30 fold takes a tree as argument and returns a simplified tree.
31
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
35
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
38
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
42
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "backend.h"
47 #include "target.h"
48 #include "rtl.h"
49 #include "tree.h"
50 #include "gimple.h"
51 #include "predict.h"
52 #include "tm_p.h"
53 #include "tree-ssa-operands.h"
54 #include "optabs-query.h"
55 #include "cgraph.h"
56 #include "diagnostic-core.h"
57 #include "flags.h"
58 #include "alias.h"
59 #include "fold-const.h"
60 #include "fold-const-call.h"
61 #include "stor-layout.h"
62 #include "calls.h"
63 #include "tree-iterator.h"
64 #include "expr.h"
65 #include "intl.h"
66 #include "langhooks.h"
67 #include "tree-eh.h"
68 #include "gimplify.h"
69 #include "tree-dfa.h"
70 #include "builtins.h"
71 #include "generic-match.h"
72 #include "gimple-fold.h"
73 #include "params.h"
74 #include "tree-into-ssa.h"
75 #include "md5.h"
76 #include "case-cfn-macros.h"
77
78 #ifndef LOAD_EXTEND_OP
79 #define LOAD_EXTEND_OP(M) UNKNOWN
80 #endif
81
82 /* Nonzero if we are folding constants inside an initializer; zero
83 otherwise. */
84 int folding_initializer = 0;
85
86 /* The following constants represent a bit based encoding of GCC's
87 comparison operators. This encoding simplifies transformations
88 on relational comparison operators, such as AND and OR. */
89 enum comparison_code {
90 COMPCODE_FALSE = 0,
91 COMPCODE_LT = 1,
92 COMPCODE_EQ = 2,
93 COMPCODE_LE = 3,
94 COMPCODE_GT = 4,
95 COMPCODE_LTGT = 5,
96 COMPCODE_GE = 6,
97 COMPCODE_ORD = 7,
98 COMPCODE_UNORD = 8,
99 COMPCODE_UNLT = 9,
100 COMPCODE_UNEQ = 10,
101 COMPCODE_UNLE = 11,
102 COMPCODE_UNGT = 12,
103 COMPCODE_NE = 13,
104 COMPCODE_UNGE = 14,
105 COMPCODE_TRUE = 15
106 };
107
108 static bool negate_expr_p (tree);
109 static tree negate_expr (tree);
110 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
111 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
112 static enum comparison_code comparison_to_compcode (enum tree_code);
113 static enum tree_code compcode_to_comparison (enum comparison_code);
114 static int operand_equal_for_comparison_p (tree, tree, tree);
115 static int twoval_comparison_p (tree, tree *, tree *, int *);
116 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
117 static tree make_bit_field_ref (location_t, tree, tree,
118 HOST_WIDE_INT, HOST_WIDE_INT, int, int);
119 static tree optimize_bit_field_compare (location_t, enum tree_code,
120 tree, tree, tree);
121 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
122 HOST_WIDE_INT *,
123 machine_mode *, int *, int *, int *,
124 tree *, tree *);
125 static int simple_operand_p (const_tree);
126 static bool simple_operand_p_2 (tree);
127 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
128 static tree range_predecessor (tree);
129 static tree range_successor (tree);
130 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
131 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
132 static tree unextend (tree, int, int, tree);
133 static tree optimize_minmax_comparison (location_t, enum tree_code,
134 tree, tree, tree);
135 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
136 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
137 static tree fold_binary_op_with_conditional_arg (location_t,
138 enum tree_code, tree,
139 tree, tree,
140 tree, tree, int);
141 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
142 static bool reorder_operands_p (const_tree, const_tree);
143 static tree fold_negate_const (tree, tree);
144 static tree fold_not_const (const_tree, tree);
145 static tree fold_relational_const (enum tree_code, tree, tree, tree);
146 static tree fold_convert_const (enum tree_code, tree, tree);
147 static tree fold_view_convert_expr (tree, tree);
148 static bool vec_cst_ctor_to_array (tree, tree *);
149
150
151 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
152 Otherwise, return LOC. */
153
154 static location_t
155 expr_location_or (tree t, location_t loc)
156 {
157 location_t tloc = EXPR_LOCATION (t);
158 return tloc == UNKNOWN_LOCATION ? loc : tloc;
159 }
160
161 /* Similar to protected_set_expr_location, but never modify x in place,
162 if location can and needs to be set, unshare it. */
163
164 static inline tree
165 protected_set_expr_location_unshare (tree x, location_t loc)
166 {
167 if (CAN_HAVE_LOCATION_P (x)
168 && EXPR_LOCATION (x) != loc
169 && !(TREE_CODE (x) == SAVE_EXPR
170 || TREE_CODE (x) == TARGET_EXPR
171 || TREE_CODE (x) == BIND_EXPR))
172 {
173 x = copy_node (x);
174 SET_EXPR_LOCATION (x, loc);
175 }
176 return x;
177 }
178 \f
179 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
180 division and returns the quotient. Otherwise returns
181 NULL_TREE. */
182
183 tree
184 div_if_zero_remainder (const_tree arg1, const_tree arg2)
185 {
186 widest_int quo;
187
188 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
189 SIGNED, &quo))
190 return wide_int_to_tree (TREE_TYPE (arg1), quo);
191
192 return NULL_TREE;
193 }
194 \f
195 /* This is nonzero if we should defer warnings about undefined
196 overflow. This facility exists because these warnings are a
197 special case. The code to estimate loop iterations does not want
198 to issue any warnings, since it works with expressions which do not
199 occur in user code. Various bits of cleanup code call fold(), but
200 only use the result if it has certain characteristics (e.g., is a
201 constant); that code only wants to issue a warning if the result is
202 used. */
203
204 static int fold_deferring_overflow_warnings;
205
206 /* If a warning about undefined overflow is deferred, this is the
207 warning. Note that this may cause us to turn two warnings into
208 one, but that is fine since it is sufficient to only give one
209 warning per expression. */
210
211 static const char* fold_deferred_overflow_warning;
212
213 /* If a warning about undefined overflow is deferred, this is the
214 level at which the warning should be emitted. */
215
216 static enum warn_strict_overflow_code fold_deferred_overflow_code;
217
218 /* Start deferring overflow warnings. We could use a stack here to
219 permit nested calls, but at present it is not necessary. */
220
221 void
222 fold_defer_overflow_warnings (void)
223 {
224 ++fold_deferring_overflow_warnings;
225 }
226
227 /* Stop deferring overflow warnings. If there is a pending warning,
228 and ISSUE is true, then issue the warning if appropriate. STMT is
229 the statement with which the warning should be associated (used for
230 location information); STMT may be NULL. CODE is the level of the
231 warning--a warn_strict_overflow_code value. This function will use
232 the smaller of CODE and the deferred code when deciding whether to
233 issue the warning. CODE may be zero to mean to always use the
234 deferred code. */
235
236 void
237 fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
238 {
239 const char *warnmsg;
240 location_t locus;
241
242 gcc_assert (fold_deferring_overflow_warnings > 0);
243 --fold_deferring_overflow_warnings;
244 if (fold_deferring_overflow_warnings > 0)
245 {
246 if (fold_deferred_overflow_warning != NULL
247 && code != 0
248 && code < (int) fold_deferred_overflow_code)
249 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
250 return;
251 }
252
253 warnmsg = fold_deferred_overflow_warning;
254 fold_deferred_overflow_warning = NULL;
255
256 if (!issue || warnmsg == NULL)
257 return;
258
259 if (gimple_no_warning_p (stmt))
260 return;
261
262 /* Use the smallest code level when deciding to issue the
263 warning. */
264 if (code == 0 || code > (int) fold_deferred_overflow_code)
265 code = fold_deferred_overflow_code;
266
267 if (!issue_strict_overflow_warning (code))
268 return;
269
270 if (stmt == NULL)
271 locus = input_location;
272 else
273 locus = gimple_location (stmt);
274 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
275 }
276
277 /* Stop deferring overflow warnings, ignoring any deferred
278 warnings. */
279
280 void
281 fold_undefer_and_ignore_overflow_warnings (void)
282 {
283 fold_undefer_overflow_warnings (false, NULL, 0);
284 }
285
286 /* Whether we are deferring overflow warnings. */
287
288 bool
289 fold_deferring_overflow_warnings_p (void)
290 {
291 return fold_deferring_overflow_warnings > 0;
292 }
293
294 /* This is called when we fold something based on the fact that signed
295 overflow is undefined. */
296
297 static void
298 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
299 {
300 if (fold_deferring_overflow_warnings > 0)
301 {
302 if (fold_deferred_overflow_warning == NULL
303 || wc < fold_deferred_overflow_code)
304 {
305 fold_deferred_overflow_warning = gmsgid;
306 fold_deferred_overflow_code = wc;
307 }
308 }
309 else if (issue_strict_overflow_warning (wc))
310 warning (OPT_Wstrict_overflow, gmsgid);
311 }
312 \f
313 /* Return true if the built-in mathematical function specified by CODE
314 is odd, i.e. -f(x) == f(-x). */
315
316 bool
317 negate_mathfn_p (combined_fn fn)
318 {
319 switch (fn)
320 {
321 CASE_CFN_ASIN:
322 CASE_CFN_ASINH:
323 CASE_CFN_ATAN:
324 CASE_CFN_ATANH:
325 CASE_CFN_CASIN:
326 CASE_CFN_CASINH:
327 CASE_CFN_CATAN:
328 CASE_CFN_CATANH:
329 CASE_CFN_CBRT:
330 CASE_CFN_CPROJ:
331 CASE_CFN_CSIN:
332 CASE_CFN_CSINH:
333 CASE_CFN_CTAN:
334 CASE_CFN_CTANH:
335 CASE_CFN_ERF:
336 CASE_CFN_LLROUND:
337 CASE_CFN_LROUND:
338 CASE_CFN_ROUND:
339 CASE_CFN_SIN:
340 CASE_CFN_SINH:
341 CASE_CFN_TAN:
342 CASE_CFN_TANH:
343 CASE_CFN_TRUNC:
344 return true;
345
346 CASE_CFN_LLRINT:
347 CASE_CFN_LRINT:
348 CASE_CFN_NEARBYINT:
349 CASE_CFN_RINT:
350 return !flag_rounding_math;
351
352 default:
353 break;
354 }
355 return false;
356 }
357
358 /* Check whether we may negate an integer constant T without causing
359 overflow. */
360
361 bool
362 may_negate_without_overflow_p (const_tree t)
363 {
364 tree type;
365
366 gcc_assert (TREE_CODE (t) == INTEGER_CST);
367
368 type = TREE_TYPE (t);
369 if (TYPE_UNSIGNED (type))
370 return false;
371
372 return !wi::only_sign_bit_p (t);
373 }
374
375 /* Determine whether an expression T can be cheaply negated using
376 the function negate_expr without introducing undefined overflow. */
377
378 static bool
379 negate_expr_p (tree t)
380 {
381 tree type;
382
383 if (t == 0)
384 return false;
385
386 type = TREE_TYPE (t);
387
388 STRIP_SIGN_NOPS (t);
389 switch (TREE_CODE (t))
390 {
391 case INTEGER_CST:
392 if (INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_WRAPS (type))
393 return true;
394
395 /* Check that -CST will not overflow type. */
396 return may_negate_without_overflow_p (t);
397 case BIT_NOT_EXPR:
398 return (INTEGRAL_TYPE_P (type)
399 && TYPE_OVERFLOW_WRAPS (type));
400
401 case FIXED_CST:
402 return true;
403
404 case NEGATE_EXPR:
405 return !TYPE_OVERFLOW_SANITIZED (type);
406
407 case REAL_CST:
408 /* We want to canonicalize to positive real constants. Pretend
409 that only negative ones can be easily negated. */
410 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
411
412 case COMPLEX_CST:
413 return negate_expr_p (TREE_REALPART (t))
414 && negate_expr_p (TREE_IMAGPART (t));
415
416 case VECTOR_CST:
417 {
418 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
419 return true;
420
421 int count = TYPE_VECTOR_SUBPARTS (type), i;
422
423 for (i = 0; i < count; i++)
424 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
425 return false;
426
427 return true;
428 }
429
430 case COMPLEX_EXPR:
431 return negate_expr_p (TREE_OPERAND (t, 0))
432 && negate_expr_p (TREE_OPERAND (t, 1));
433
434 case CONJ_EXPR:
435 return negate_expr_p (TREE_OPERAND (t, 0));
436
437 case PLUS_EXPR:
438 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
439 || HONOR_SIGNED_ZEROS (element_mode (type))
440 || (INTEGRAL_TYPE_P (type)
441 && ! TYPE_OVERFLOW_WRAPS (type)))
442 return false;
443 /* -(A + B) -> (-B) - A. */
444 if (negate_expr_p (TREE_OPERAND (t, 1))
445 && reorder_operands_p (TREE_OPERAND (t, 0),
446 TREE_OPERAND (t, 1)))
447 return true;
448 /* -(A + B) -> (-A) - B. */
449 return negate_expr_p (TREE_OPERAND (t, 0));
450
451 case MINUS_EXPR:
452 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
453 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
454 && !HONOR_SIGNED_ZEROS (element_mode (type))
455 && (! INTEGRAL_TYPE_P (type)
456 || TYPE_OVERFLOW_WRAPS (type))
457 && reorder_operands_p (TREE_OPERAND (t, 0),
458 TREE_OPERAND (t, 1));
459
460 case MULT_EXPR:
461 if (TYPE_UNSIGNED (type))
462 break;
463 /* INT_MIN/n * n doesn't overflow while negating one operand it does
464 if n is a power of two. */
465 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
466 && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
467 && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
468 && ! integer_pow2p (TREE_OPERAND (t, 0)))
469 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
470 && ! integer_pow2p (TREE_OPERAND (t, 1)))))
471 break;
472
473 /* Fall through. */
474
475 case RDIV_EXPR:
476 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
477 return negate_expr_p (TREE_OPERAND (t, 1))
478 || negate_expr_p (TREE_OPERAND (t, 0));
479 break;
480
481 case TRUNC_DIV_EXPR:
482 case ROUND_DIV_EXPR:
483 case EXACT_DIV_EXPR:
484 if (TYPE_UNSIGNED (type))
485 break;
486 if (negate_expr_p (TREE_OPERAND (t, 0)))
487 return true;
488 /* In general we can't negate B in A / B, because if A is INT_MIN and
489 B is 1, we may turn this into INT_MIN / -1 which is undefined
490 and actually traps on some architectures. */
491 if (! INTEGRAL_TYPE_P (TREE_TYPE (t))
492 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
493 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
494 && ! integer_onep (TREE_OPERAND (t, 1))))
495 return negate_expr_p (TREE_OPERAND (t, 1));
496 break;
497
498 case NOP_EXPR:
499 /* Negate -((double)float) as (double)(-float). */
500 if (TREE_CODE (type) == REAL_TYPE)
501 {
502 tree tem = strip_float_extensions (t);
503 if (tem != t)
504 return negate_expr_p (tem);
505 }
506 break;
507
508 case CALL_EXPR:
509 /* Negate -f(x) as f(-x). */
510 if (negate_mathfn_p (get_call_combined_fn (t)))
511 return negate_expr_p (CALL_EXPR_ARG (t, 0));
512 break;
513
514 case RSHIFT_EXPR:
515 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
516 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
517 {
518 tree op1 = TREE_OPERAND (t, 1);
519 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
520 return true;
521 }
522 break;
523
524 default:
525 break;
526 }
527 return false;
528 }
529
530 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
531 simplification is possible.
532 If negate_expr_p would return true for T, NULL_TREE will never be
533 returned. */
534
535 static tree
536 fold_negate_expr (location_t loc, tree t)
537 {
538 tree type = TREE_TYPE (t);
539 tree tem;
540
541 switch (TREE_CODE (t))
542 {
543 /* Convert - (~A) to A + 1. */
544 case BIT_NOT_EXPR:
545 if (INTEGRAL_TYPE_P (type))
546 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
547 build_one_cst (type));
548 break;
549
550 case INTEGER_CST:
551 tem = fold_negate_const (t, type);
552 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
553 || (ANY_INTEGRAL_TYPE_P (type)
554 && !TYPE_OVERFLOW_TRAPS (type)
555 && TYPE_OVERFLOW_WRAPS (type))
556 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
557 return tem;
558 break;
559
560 case REAL_CST:
561 tem = fold_negate_const (t, type);
562 return tem;
563
564 case FIXED_CST:
565 tem = fold_negate_const (t, type);
566 return tem;
567
568 case COMPLEX_CST:
569 {
570 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
571 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
572 if (rpart && ipart)
573 return build_complex (type, rpart, ipart);
574 }
575 break;
576
577 case VECTOR_CST:
578 {
579 int count = TYPE_VECTOR_SUBPARTS (type), i;
580 tree *elts = XALLOCAVEC (tree, count);
581
582 for (i = 0; i < count; i++)
583 {
584 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
585 if (elts[i] == NULL_TREE)
586 return NULL_TREE;
587 }
588
589 return build_vector (type, elts);
590 }
591
592 case COMPLEX_EXPR:
593 if (negate_expr_p (t))
594 return fold_build2_loc (loc, COMPLEX_EXPR, type,
595 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
596 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
597 break;
598
599 case CONJ_EXPR:
600 if (negate_expr_p (t))
601 return fold_build1_loc (loc, CONJ_EXPR, type,
602 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
603 break;
604
605 case NEGATE_EXPR:
606 if (!TYPE_OVERFLOW_SANITIZED (type))
607 return TREE_OPERAND (t, 0);
608 break;
609
610 case PLUS_EXPR:
611 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
612 && !HONOR_SIGNED_ZEROS (element_mode (type)))
613 {
614 /* -(A + B) -> (-B) - A. */
615 if (negate_expr_p (TREE_OPERAND (t, 1))
616 && reorder_operands_p (TREE_OPERAND (t, 0),
617 TREE_OPERAND (t, 1)))
618 {
619 tem = negate_expr (TREE_OPERAND (t, 1));
620 return fold_build2_loc (loc, MINUS_EXPR, type,
621 tem, TREE_OPERAND (t, 0));
622 }
623
624 /* -(A + B) -> (-A) - B. */
625 if (negate_expr_p (TREE_OPERAND (t, 0)))
626 {
627 tem = negate_expr (TREE_OPERAND (t, 0));
628 return fold_build2_loc (loc, MINUS_EXPR, type,
629 tem, TREE_OPERAND (t, 1));
630 }
631 }
632 break;
633
634 case MINUS_EXPR:
635 /* - (A - B) -> B - A */
636 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
637 && !HONOR_SIGNED_ZEROS (element_mode (type))
638 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
639 return fold_build2_loc (loc, MINUS_EXPR, type,
640 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
641 break;
642
643 case MULT_EXPR:
644 if (TYPE_UNSIGNED (type))
645 break;
646
647 /* Fall through. */
648
649 case RDIV_EXPR:
650 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
651 {
652 tem = TREE_OPERAND (t, 1);
653 if (negate_expr_p (tem))
654 return fold_build2_loc (loc, TREE_CODE (t), type,
655 TREE_OPERAND (t, 0), negate_expr (tem));
656 tem = TREE_OPERAND (t, 0);
657 if (negate_expr_p (tem))
658 return fold_build2_loc (loc, TREE_CODE (t), type,
659 negate_expr (tem), TREE_OPERAND (t, 1));
660 }
661 break;
662
663 case TRUNC_DIV_EXPR:
664 case ROUND_DIV_EXPR:
665 case EXACT_DIV_EXPR:
666 if (TYPE_UNSIGNED (type))
667 break;
668 if (negate_expr_p (TREE_OPERAND (t, 0)))
669 return fold_build2_loc (loc, TREE_CODE (t), type,
670 negate_expr (TREE_OPERAND (t, 0)),
671 TREE_OPERAND (t, 1));
672 /* In general we can't negate B in A / B, because if A is INT_MIN and
673 B is 1, we may turn this into INT_MIN / -1 which is undefined
674 and actually traps on some architectures. */
675 if ((! INTEGRAL_TYPE_P (TREE_TYPE (t))
676 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
677 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
678 && ! integer_onep (TREE_OPERAND (t, 1))))
679 && negate_expr_p (TREE_OPERAND (t, 1)))
680 return fold_build2_loc (loc, TREE_CODE (t), type,
681 TREE_OPERAND (t, 0),
682 negate_expr (TREE_OPERAND (t, 1)));
683 break;
684
685 case NOP_EXPR:
686 /* Convert -((double)float) into (double)(-float). */
687 if (TREE_CODE (type) == REAL_TYPE)
688 {
689 tem = strip_float_extensions (t);
690 if (tem != t && negate_expr_p (tem))
691 return fold_convert_loc (loc, type, negate_expr (tem));
692 }
693 break;
694
695 case CALL_EXPR:
696 /* Negate -f(x) as f(-x). */
697 if (negate_mathfn_p (get_call_combined_fn (t))
698 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
699 {
700 tree fndecl, arg;
701
702 fndecl = get_callee_fndecl (t);
703 arg = negate_expr (CALL_EXPR_ARG (t, 0));
704 return build_call_expr_loc (loc, fndecl, 1, arg);
705 }
706 break;
707
708 case RSHIFT_EXPR:
709 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
710 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
711 {
712 tree op1 = TREE_OPERAND (t, 1);
713 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
714 {
715 tree ntype = TYPE_UNSIGNED (type)
716 ? signed_type_for (type)
717 : unsigned_type_for (type);
718 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
719 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
720 return fold_convert_loc (loc, type, temp);
721 }
722 }
723 break;
724
725 default:
726 break;
727 }
728
729 return NULL_TREE;
730 }
731
732 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
733 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
734 return NULL_TREE. */
735
736 static tree
737 negate_expr (tree t)
738 {
739 tree type, tem;
740 location_t loc;
741
742 if (t == NULL_TREE)
743 return NULL_TREE;
744
745 loc = EXPR_LOCATION (t);
746 type = TREE_TYPE (t);
747 STRIP_SIGN_NOPS (t);
748
749 tem = fold_negate_expr (loc, t);
750 if (!tem)
751 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
752 return fold_convert_loc (loc, type, tem);
753 }
754 \f
755 /* Split a tree IN into a constant, literal and variable parts that could be
756 combined with CODE to make IN. "constant" means an expression with
757 TREE_CONSTANT but that isn't an actual constant. CODE must be a
758 commutative arithmetic operation. Store the constant part into *CONP,
759 the literal in *LITP and return the variable part. If a part isn't
760 present, set it to null. If the tree does not decompose in this way,
761 return the entire tree as the variable part and the other parts as null.
762
763 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
764 case, we negate an operand that was subtracted. Except if it is a
765 literal for which we use *MINUS_LITP instead.
766
767 If NEGATE_P is true, we are negating all of IN, again except a literal
768 for which we use *MINUS_LITP instead.
769
770 If IN is itself a literal or constant, return it as appropriate.
771
772 Note that we do not guarantee that any of the three values will be the
773 same type as IN, but they will have the same signedness and mode. */
774
775 static tree
776 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
777 tree *minus_litp, int negate_p)
778 {
779 tree var = 0;
780
781 *conp = 0;
782 *litp = 0;
783 *minus_litp = 0;
784
785 /* Strip any conversions that don't change the machine mode or signedness. */
786 STRIP_SIGN_NOPS (in);
787
788 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
789 || TREE_CODE (in) == FIXED_CST)
790 *litp = in;
791 else if (TREE_CODE (in) == code
792 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
793 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
794 /* We can associate addition and subtraction together (even
795 though the C standard doesn't say so) for integers because
796 the value is not affected. For reals, the value might be
797 affected, so we can't. */
798 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
799 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
800 {
801 tree op0 = TREE_OPERAND (in, 0);
802 tree op1 = TREE_OPERAND (in, 1);
803 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
804 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
805
806 /* First see if either of the operands is a literal, then a constant. */
807 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
808 || TREE_CODE (op0) == FIXED_CST)
809 *litp = op0, op0 = 0;
810 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
811 || TREE_CODE (op1) == FIXED_CST)
812 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
813
814 if (op0 != 0 && TREE_CONSTANT (op0))
815 *conp = op0, op0 = 0;
816 else if (op1 != 0 && TREE_CONSTANT (op1))
817 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
818
819 /* If we haven't dealt with either operand, this is not a case we can
820 decompose. Otherwise, VAR is either of the ones remaining, if any. */
821 if (op0 != 0 && op1 != 0)
822 var = in;
823 else if (op0 != 0)
824 var = op0;
825 else
826 var = op1, neg_var_p = neg1_p;
827
828 /* Now do any needed negations. */
829 if (neg_litp_p)
830 *minus_litp = *litp, *litp = 0;
831 if (neg_conp_p)
832 *conp = negate_expr (*conp);
833 if (neg_var_p)
834 var = negate_expr (var);
835 }
836 else if (TREE_CODE (in) == BIT_NOT_EXPR
837 && code == PLUS_EXPR)
838 {
839 /* -X - 1 is folded to ~X, undo that here. */
840 *minus_litp = build_one_cst (TREE_TYPE (in));
841 var = negate_expr (TREE_OPERAND (in, 0));
842 }
843 else if (TREE_CONSTANT (in))
844 *conp = in;
845 else
846 var = in;
847
848 if (negate_p)
849 {
850 if (*litp)
851 *minus_litp = *litp, *litp = 0;
852 else if (*minus_litp)
853 *litp = *minus_litp, *minus_litp = 0;
854 *conp = negate_expr (*conp);
855 var = negate_expr (var);
856 }
857
858 return var;
859 }
860
861 /* Re-associate trees split by the above function. T1 and T2 are
862 either expressions to associate or null. Return the new
863 expression, if any. LOC is the location of the new expression. If
864 we build an operation, do it in TYPE and with CODE. */
865
866 static tree
867 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
868 {
869 if (t1 == 0)
870 return t2;
871 else if (t2 == 0)
872 return t1;
873
874 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
875 try to fold this since we will have infinite recursion. But do
876 deal with any NEGATE_EXPRs. */
877 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
878 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
879 {
880 if (code == PLUS_EXPR)
881 {
882 if (TREE_CODE (t1) == NEGATE_EXPR)
883 return build2_loc (loc, MINUS_EXPR, type,
884 fold_convert_loc (loc, type, t2),
885 fold_convert_loc (loc, type,
886 TREE_OPERAND (t1, 0)));
887 else if (TREE_CODE (t2) == NEGATE_EXPR)
888 return build2_loc (loc, MINUS_EXPR, type,
889 fold_convert_loc (loc, type, t1),
890 fold_convert_loc (loc, type,
891 TREE_OPERAND (t2, 0)));
892 else if (integer_zerop (t2))
893 return fold_convert_loc (loc, type, t1);
894 }
895 else if (code == MINUS_EXPR)
896 {
897 if (integer_zerop (t2))
898 return fold_convert_loc (loc, type, t1);
899 }
900
901 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
902 fold_convert_loc (loc, type, t2));
903 }
904
905 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
906 fold_convert_loc (loc, type, t2));
907 }
908 \f
909 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
910 for use in int_const_binop, size_binop and size_diffop. */
911
912 static bool
913 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
914 {
915 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
916 return false;
917 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
918 return false;
919
920 switch (code)
921 {
922 case LSHIFT_EXPR:
923 case RSHIFT_EXPR:
924 case LROTATE_EXPR:
925 case RROTATE_EXPR:
926 return true;
927
928 default:
929 break;
930 }
931
932 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
933 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
934 && TYPE_MODE (type1) == TYPE_MODE (type2);
935 }
936
937
938 /* Combine two integer constants ARG1 and ARG2 under operation CODE
939 to produce a new constant. Return NULL_TREE if we don't know how
940 to evaluate CODE at compile-time. */
941
942 static tree
943 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree parg2,
944 int overflowable)
945 {
946 wide_int res;
947 tree t;
948 tree type = TREE_TYPE (arg1);
949 signop sign = TYPE_SIGN (type);
950 bool overflow = false;
951
952 wide_int arg2 = wide_int::from (parg2, TYPE_PRECISION (type),
953 TYPE_SIGN (TREE_TYPE (parg2)));
954
955 switch (code)
956 {
957 case BIT_IOR_EXPR:
958 res = wi::bit_or (arg1, arg2);
959 break;
960
961 case BIT_XOR_EXPR:
962 res = wi::bit_xor (arg1, arg2);
963 break;
964
965 case BIT_AND_EXPR:
966 res = wi::bit_and (arg1, arg2);
967 break;
968
969 case RSHIFT_EXPR:
970 case LSHIFT_EXPR:
971 if (wi::neg_p (arg2))
972 {
973 arg2 = -arg2;
974 if (code == RSHIFT_EXPR)
975 code = LSHIFT_EXPR;
976 else
977 code = RSHIFT_EXPR;
978 }
979
980 if (code == RSHIFT_EXPR)
981 /* It's unclear from the C standard whether shifts can overflow.
982 The following code ignores overflow; perhaps a C standard
983 interpretation ruling is needed. */
984 res = wi::rshift (arg1, arg2, sign);
985 else
986 res = wi::lshift (arg1, arg2);
987 break;
988
989 case RROTATE_EXPR:
990 case LROTATE_EXPR:
991 if (wi::neg_p (arg2))
992 {
993 arg2 = -arg2;
994 if (code == RROTATE_EXPR)
995 code = LROTATE_EXPR;
996 else
997 code = RROTATE_EXPR;
998 }
999
1000 if (code == RROTATE_EXPR)
1001 res = wi::rrotate (arg1, arg2);
1002 else
1003 res = wi::lrotate (arg1, arg2);
1004 break;
1005
1006 case PLUS_EXPR:
1007 res = wi::add (arg1, arg2, sign, &overflow);
1008 break;
1009
1010 case MINUS_EXPR:
1011 res = wi::sub (arg1, arg2, sign, &overflow);
1012 break;
1013
1014 case MULT_EXPR:
1015 res = wi::mul (arg1, arg2, sign, &overflow);
1016 break;
1017
1018 case MULT_HIGHPART_EXPR:
1019 res = wi::mul_high (arg1, arg2, sign);
1020 break;
1021
1022 case TRUNC_DIV_EXPR:
1023 case EXACT_DIV_EXPR:
1024 if (arg2 == 0)
1025 return NULL_TREE;
1026 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1027 break;
1028
1029 case FLOOR_DIV_EXPR:
1030 if (arg2 == 0)
1031 return NULL_TREE;
1032 res = wi::div_floor (arg1, arg2, sign, &overflow);
1033 break;
1034
1035 case CEIL_DIV_EXPR:
1036 if (arg2 == 0)
1037 return NULL_TREE;
1038 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1039 break;
1040
1041 case ROUND_DIV_EXPR:
1042 if (arg2 == 0)
1043 return NULL_TREE;
1044 res = wi::div_round (arg1, arg2, sign, &overflow);
1045 break;
1046
1047 case TRUNC_MOD_EXPR:
1048 if (arg2 == 0)
1049 return NULL_TREE;
1050 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1051 break;
1052
1053 case FLOOR_MOD_EXPR:
1054 if (arg2 == 0)
1055 return NULL_TREE;
1056 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1057 break;
1058
1059 case CEIL_MOD_EXPR:
1060 if (arg2 == 0)
1061 return NULL_TREE;
1062 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1063 break;
1064
1065 case ROUND_MOD_EXPR:
1066 if (arg2 == 0)
1067 return NULL_TREE;
1068 res = wi::mod_round (arg1, arg2, sign, &overflow);
1069 break;
1070
1071 case MIN_EXPR:
1072 res = wi::min (arg1, arg2, sign);
1073 break;
1074
1075 case MAX_EXPR:
1076 res = wi::max (arg1, arg2, sign);
1077 break;
1078
1079 default:
1080 return NULL_TREE;
1081 }
1082
1083 t = force_fit_type (type, res, overflowable,
1084 (((sign == SIGNED || overflowable == -1)
1085 && overflow)
1086 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (parg2)));
1087
1088 return t;
1089 }
1090
1091 tree
1092 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1093 {
1094 return int_const_binop_1 (code, arg1, arg2, 1);
1095 }
1096
1097 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1098 constant. We assume ARG1 and ARG2 have the same data type, or at least
1099 are the same kind of constant and the same machine mode. Return zero if
1100 combining the constants is not allowed in the current operating mode. */
1101
1102 static tree
1103 const_binop (enum tree_code code, tree arg1, tree arg2)
1104 {
1105 /* Sanity check for the recursive cases. */
1106 if (!arg1 || !arg2)
1107 return NULL_TREE;
1108
1109 STRIP_NOPS (arg1);
1110 STRIP_NOPS (arg2);
1111
1112 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1113 {
1114 if (code == POINTER_PLUS_EXPR)
1115 return int_const_binop (PLUS_EXPR,
1116 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1117
1118 return int_const_binop (code, arg1, arg2);
1119 }
1120
1121 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1122 {
1123 machine_mode mode;
1124 REAL_VALUE_TYPE d1;
1125 REAL_VALUE_TYPE d2;
1126 REAL_VALUE_TYPE value;
1127 REAL_VALUE_TYPE result;
1128 bool inexact;
1129 tree t, type;
1130
1131 /* The following codes are handled by real_arithmetic. */
1132 switch (code)
1133 {
1134 case PLUS_EXPR:
1135 case MINUS_EXPR:
1136 case MULT_EXPR:
1137 case RDIV_EXPR:
1138 case MIN_EXPR:
1139 case MAX_EXPR:
1140 break;
1141
1142 default:
1143 return NULL_TREE;
1144 }
1145
1146 d1 = TREE_REAL_CST (arg1);
1147 d2 = TREE_REAL_CST (arg2);
1148
1149 type = TREE_TYPE (arg1);
1150 mode = TYPE_MODE (type);
1151
1152 /* Don't perform operation if we honor signaling NaNs and
1153 either operand is a NaN. */
1154 if (HONOR_SNANS (mode)
1155 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1156 return NULL_TREE;
1157
1158 /* Don't perform operation if it would raise a division
1159 by zero exception. */
1160 if (code == RDIV_EXPR
1161 && real_equal (&d2, &dconst0)
1162 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1163 return NULL_TREE;
1164
1165 /* If either operand is a NaN, just return it. Otherwise, set up
1166 for floating-point trap; we return an overflow. */
1167 if (REAL_VALUE_ISNAN (d1))
1168 return arg1;
1169 else if (REAL_VALUE_ISNAN (d2))
1170 return arg2;
1171
1172 inexact = real_arithmetic (&value, code, &d1, &d2);
1173 real_convert (&result, mode, &value);
1174
1175 /* Don't constant fold this floating point operation if
1176 the result has overflowed and flag_trapping_math. */
1177 if (flag_trapping_math
1178 && MODE_HAS_INFINITIES (mode)
1179 && REAL_VALUE_ISINF (result)
1180 && !REAL_VALUE_ISINF (d1)
1181 && !REAL_VALUE_ISINF (d2))
1182 return NULL_TREE;
1183
1184 /* Don't constant fold this floating point operation if the
1185 result may dependent upon the run-time rounding mode and
1186 flag_rounding_math is set, or if GCC's software emulation
1187 is unable to accurately represent the result. */
1188 if ((flag_rounding_math
1189 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1190 && (inexact || !real_identical (&result, &value)))
1191 return NULL_TREE;
1192
1193 t = build_real (type, result);
1194
1195 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1196 return t;
1197 }
1198
1199 if (TREE_CODE (arg1) == FIXED_CST)
1200 {
1201 FIXED_VALUE_TYPE f1;
1202 FIXED_VALUE_TYPE f2;
1203 FIXED_VALUE_TYPE result;
1204 tree t, type;
1205 int sat_p;
1206 bool overflow_p;
1207
1208 /* The following codes are handled by fixed_arithmetic. */
1209 switch (code)
1210 {
1211 case PLUS_EXPR:
1212 case MINUS_EXPR:
1213 case MULT_EXPR:
1214 case TRUNC_DIV_EXPR:
1215 if (TREE_CODE (arg2) != FIXED_CST)
1216 return NULL_TREE;
1217 f2 = TREE_FIXED_CST (arg2);
1218 break;
1219
1220 case LSHIFT_EXPR:
1221 case RSHIFT_EXPR:
1222 {
1223 if (TREE_CODE (arg2) != INTEGER_CST)
1224 return NULL_TREE;
1225 wide_int w2 = arg2;
1226 f2.data.high = w2.elt (1);
1227 f2.data.low = w2.elt (0);
1228 f2.mode = SImode;
1229 }
1230 break;
1231
1232 default:
1233 return NULL_TREE;
1234 }
1235
1236 f1 = TREE_FIXED_CST (arg1);
1237 type = TREE_TYPE (arg1);
1238 sat_p = TYPE_SATURATING (type);
1239 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1240 t = build_fixed (type, result);
1241 /* Propagate overflow flags. */
1242 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1243 TREE_OVERFLOW (t) = 1;
1244 return t;
1245 }
1246
1247 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1248 {
1249 tree type = TREE_TYPE (arg1);
1250 tree r1 = TREE_REALPART (arg1);
1251 tree i1 = TREE_IMAGPART (arg1);
1252 tree r2 = TREE_REALPART (arg2);
1253 tree i2 = TREE_IMAGPART (arg2);
1254 tree real, imag;
1255
1256 switch (code)
1257 {
1258 case PLUS_EXPR:
1259 case MINUS_EXPR:
1260 real = const_binop (code, r1, r2);
1261 imag = const_binop (code, i1, i2);
1262 break;
1263
1264 case MULT_EXPR:
1265 if (COMPLEX_FLOAT_TYPE_P (type))
1266 return do_mpc_arg2 (arg1, arg2, type,
1267 /* do_nonfinite= */ folding_initializer,
1268 mpc_mul);
1269
1270 real = const_binop (MINUS_EXPR,
1271 const_binop (MULT_EXPR, r1, r2),
1272 const_binop (MULT_EXPR, i1, i2));
1273 imag = const_binop (PLUS_EXPR,
1274 const_binop (MULT_EXPR, r1, i2),
1275 const_binop (MULT_EXPR, i1, r2));
1276 break;
1277
1278 case RDIV_EXPR:
1279 if (COMPLEX_FLOAT_TYPE_P (type))
1280 return do_mpc_arg2 (arg1, arg2, type,
1281 /* do_nonfinite= */ folding_initializer,
1282 mpc_div);
1283 /* Fallthru ... */
1284 case TRUNC_DIV_EXPR:
1285 case CEIL_DIV_EXPR:
1286 case FLOOR_DIV_EXPR:
1287 case ROUND_DIV_EXPR:
1288 if (flag_complex_method == 0)
1289 {
1290 /* Keep this algorithm in sync with
1291 tree-complex.c:expand_complex_div_straight().
1292
1293 Expand complex division to scalars, straightforward algorithm.
1294 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1295 t = br*br + bi*bi
1296 */
1297 tree magsquared
1298 = const_binop (PLUS_EXPR,
1299 const_binop (MULT_EXPR, r2, r2),
1300 const_binop (MULT_EXPR, i2, i2));
1301 tree t1
1302 = const_binop (PLUS_EXPR,
1303 const_binop (MULT_EXPR, r1, r2),
1304 const_binop (MULT_EXPR, i1, i2));
1305 tree t2
1306 = const_binop (MINUS_EXPR,
1307 const_binop (MULT_EXPR, i1, r2),
1308 const_binop (MULT_EXPR, r1, i2));
1309
1310 real = const_binop (code, t1, magsquared);
1311 imag = const_binop (code, t2, magsquared);
1312 }
1313 else
1314 {
1315 /* Keep this algorithm in sync with
1316 tree-complex.c:expand_complex_div_wide().
1317
1318 Expand complex division to scalars, modified algorithm to minimize
1319 overflow with wide input ranges. */
1320 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1321 fold_abs_const (r2, TREE_TYPE (type)),
1322 fold_abs_const (i2, TREE_TYPE (type)));
1323
1324 if (integer_nonzerop (compare))
1325 {
1326 /* In the TRUE branch, we compute
1327 ratio = br/bi;
1328 div = (br * ratio) + bi;
1329 tr = (ar * ratio) + ai;
1330 ti = (ai * ratio) - ar;
1331 tr = tr / div;
1332 ti = ti / div; */
1333 tree ratio = const_binop (code, r2, i2);
1334 tree div = const_binop (PLUS_EXPR, i2,
1335 const_binop (MULT_EXPR, r2, ratio));
1336 real = const_binop (MULT_EXPR, r1, ratio);
1337 real = const_binop (PLUS_EXPR, real, i1);
1338 real = const_binop (code, real, div);
1339
1340 imag = const_binop (MULT_EXPR, i1, ratio);
1341 imag = const_binop (MINUS_EXPR, imag, r1);
1342 imag = const_binop (code, imag, div);
1343 }
1344 else
1345 {
1346 /* In the FALSE branch, we compute
1347 ratio = d/c;
1348 divisor = (d * ratio) + c;
1349 tr = (b * ratio) + a;
1350 ti = b - (a * ratio);
1351 tr = tr / div;
1352 ti = ti / div; */
1353 tree ratio = const_binop (code, i2, r2);
1354 tree div = const_binop (PLUS_EXPR, r2,
1355 const_binop (MULT_EXPR, i2, ratio));
1356
1357 real = const_binop (MULT_EXPR, i1, ratio);
1358 real = const_binop (PLUS_EXPR, real, r1);
1359 real = const_binop (code, real, div);
1360
1361 imag = const_binop (MULT_EXPR, r1, ratio);
1362 imag = const_binop (MINUS_EXPR, i1, imag);
1363 imag = const_binop (code, imag, div);
1364 }
1365 }
1366 break;
1367
1368 default:
1369 return NULL_TREE;
1370 }
1371
1372 if (real && imag)
1373 return build_complex (type, real, imag);
1374 }
1375
1376 if (TREE_CODE (arg1) == VECTOR_CST
1377 && TREE_CODE (arg2) == VECTOR_CST)
1378 {
1379 tree type = TREE_TYPE (arg1);
1380 int count = TYPE_VECTOR_SUBPARTS (type), i;
1381 tree *elts = XALLOCAVEC (tree, count);
1382
1383 for (i = 0; i < count; i++)
1384 {
1385 tree elem1 = VECTOR_CST_ELT (arg1, i);
1386 tree elem2 = VECTOR_CST_ELT (arg2, i);
1387
1388 elts[i] = const_binop (code, elem1, elem2);
1389
1390 /* It is possible that const_binop cannot handle the given
1391 code and return NULL_TREE */
1392 if (elts[i] == NULL_TREE)
1393 return NULL_TREE;
1394 }
1395
1396 return build_vector (type, elts);
1397 }
1398
1399 /* Shifts allow a scalar offset for a vector. */
1400 if (TREE_CODE (arg1) == VECTOR_CST
1401 && TREE_CODE (arg2) == INTEGER_CST)
1402 {
1403 tree type = TREE_TYPE (arg1);
1404 int count = TYPE_VECTOR_SUBPARTS (type), i;
1405 tree *elts = XALLOCAVEC (tree, count);
1406
1407 for (i = 0; i < count; i++)
1408 {
1409 tree elem1 = VECTOR_CST_ELT (arg1, i);
1410
1411 elts[i] = const_binop (code, elem1, arg2);
1412
1413 /* It is possible that const_binop cannot handle the given
1414 code and return NULL_TREE. */
1415 if (elts[i] == NULL_TREE)
1416 return NULL_TREE;
1417 }
1418
1419 return build_vector (type, elts);
1420 }
1421 return NULL_TREE;
1422 }
1423
1424 /* Overload that adds a TYPE parameter to be able to dispatch
1425 to fold_relational_const. */
1426
1427 tree
1428 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1429 {
1430 if (TREE_CODE_CLASS (code) == tcc_comparison)
1431 return fold_relational_const (code, type, arg1, arg2);
1432
1433 /* ??? Until we make the const_binop worker take the type of the
1434 result as argument put those cases that need it here. */
1435 switch (code)
1436 {
1437 case COMPLEX_EXPR:
1438 if ((TREE_CODE (arg1) == REAL_CST
1439 && TREE_CODE (arg2) == REAL_CST)
1440 || (TREE_CODE (arg1) == INTEGER_CST
1441 && TREE_CODE (arg2) == INTEGER_CST))
1442 return build_complex (type, arg1, arg2);
1443 return NULL_TREE;
1444
1445 case VEC_PACK_TRUNC_EXPR:
1446 case VEC_PACK_FIX_TRUNC_EXPR:
1447 {
1448 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1449 tree *elts;
1450
1451 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2
1452 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts / 2);
1453 if (TREE_CODE (arg1) != VECTOR_CST
1454 || TREE_CODE (arg2) != VECTOR_CST)
1455 return NULL_TREE;
1456
1457 elts = XALLOCAVEC (tree, nelts);
1458 if (!vec_cst_ctor_to_array (arg1, elts)
1459 || !vec_cst_ctor_to_array (arg2, elts + nelts / 2))
1460 return NULL_TREE;
1461
1462 for (i = 0; i < nelts; i++)
1463 {
1464 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1465 ? NOP_EXPR : FIX_TRUNC_EXPR,
1466 TREE_TYPE (type), elts[i]);
1467 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1468 return NULL_TREE;
1469 }
1470
1471 return build_vector (type, elts);
1472 }
1473
1474 case VEC_WIDEN_MULT_LO_EXPR:
1475 case VEC_WIDEN_MULT_HI_EXPR:
1476 case VEC_WIDEN_MULT_EVEN_EXPR:
1477 case VEC_WIDEN_MULT_ODD_EXPR:
1478 {
1479 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
1480 unsigned int out, ofs, scale;
1481 tree *elts;
1482
1483 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2
1484 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts * 2);
1485 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1486 return NULL_TREE;
1487
1488 elts = XALLOCAVEC (tree, nelts * 4);
1489 if (!vec_cst_ctor_to_array (arg1, elts)
1490 || !vec_cst_ctor_to_array (arg2, elts + nelts * 2))
1491 return NULL_TREE;
1492
1493 if (code == VEC_WIDEN_MULT_LO_EXPR)
1494 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
1495 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1496 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
1497 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1498 scale = 1, ofs = 0;
1499 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1500 scale = 1, ofs = 1;
1501
1502 for (out = 0; out < nelts; out++)
1503 {
1504 unsigned int in1 = (out << scale) + ofs;
1505 unsigned int in2 = in1 + nelts * 2;
1506 tree t1, t2;
1507
1508 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
1509 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
1510
1511 if (t1 == NULL_TREE || t2 == NULL_TREE)
1512 return NULL_TREE;
1513 elts[out] = const_binop (MULT_EXPR, t1, t2);
1514 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
1515 return NULL_TREE;
1516 }
1517
1518 return build_vector (type, elts);
1519 }
1520
1521 default:;
1522 }
1523
1524 if (TREE_CODE_CLASS (code) != tcc_binary)
1525 return NULL_TREE;
1526
1527 /* Make sure type and arg0 have the same saturating flag. */
1528 gcc_checking_assert (TYPE_SATURATING (type)
1529 == TYPE_SATURATING (TREE_TYPE (arg1)));
1530
1531 return const_binop (code, arg1, arg2);
1532 }
1533
1534 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1535 Return zero if computing the constants is not possible. */
1536
1537 tree
1538 const_unop (enum tree_code code, tree type, tree arg0)
1539 {
1540 switch (code)
1541 {
1542 CASE_CONVERT:
1543 case FLOAT_EXPR:
1544 case FIX_TRUNC_EXPR:
1545 case FIXED_CONVERT_EXPR:
1546 return fold_convert_const (code, type, arg0);
1547
1548 case ADDR_SPACE_CONVERT_EXPR:
1549 /* If the source address is 0, and the source address space
1550 cannot have a valid object at 0, fold to dest type null. */
1551 if (integer_zerop (arg0)
1552 && !(targetm.addr_space.zero_address_valid
1553 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))))))
1554 return fold_convert_const (code, type, arg0);
1555 break;
1556
1557 case VIEW_CONVERT_EXPR:
1558 return fold_view_convert_expr (type, arg0);
1559
1560 case NEGATE_EXPR:
1561 {
1562 /* Can't call fold_negate_const directly here as that doesn't
1563 handle all cases and we might not be able to negate some
1564 constants. */
1565 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1566 if (tem && CONSTANT_CLASS_P (tem))
1567 return tem;
1568 break;
1569 }
1570
1571 case ABS_EXPR:
1572 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1573 return fold_abs_const (arg0, type);
1574 break;
1575
1576 case CONJ_EXPR:
1577 if (TREE_CODE (arg0) == COMPLEX_CST)
1578 {
1579 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1580 TREE_TYPE (type));
1581 return build_complex (type, TREE_REALPART (arg0), ipart);
1582 }
1583 break;
1584
1585 case BIT_NOT_EXPR:
1586 if (TREE_CODE (arg0) == INTEGER_CST)
1587 return fold_not_const (arg0, type);
1588 /* Perform BIT_NOT_EXPR on each element individually. */
1589 else if (TREE_CODE (arg0) == VECTOR_CST)
1590 {
1591 tree *elements;
1592 tree elem;
1593 unsigned count = VECTOR_CST_NELTS (arg0), i;
1594
1595 elements = XALLOCAVEC (tree, count);
1596 for (i = 0; i < count; i++)
1597 {
1598 elem = VECTOR_CST_ELT (arg0, i);
1599 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1600 if (elem == NULL_TREE)
1601 break;
1602 elements[i] = elem;
1603 }
1604 if (i == count)
1605 return build_vector (type, elements);
1606 }
1607 break;
1608
1609 case TRUTH_NOT_EXPR:
1610 if (TREE_CODE (arg0) == INTEGER_CST)
1611 return constant_boolean_node (integer_zerop (arg0), type);
1612 break;
1613
1614 case REALPART_EXPR:
1615 if (TREE_CODE (arg0) == COMPLEX_CST)
1616 return fold_convert (type, TREE_REALPART (arg0));
1617 break;
1618
1619 case IMAGPART_EXPR:
1620 if (TREE_CODE (arg0) == COMPLEX_CST)
1621 return fold_convert (type, TREE_IMAGPART (arg0));
1622 break;
1623
1624 case VEC_UNPACK_LO_EXPR:
1625 case VEC_UNPACK_HI_EXPR:
1626 case VEC_UNPACK_FLOAT_LO_EXPR:
1627 case VEC_UNPACK_FLOAT_HI_EXPR:
1628 {
1629 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1630 tree *elts;
1631 enum tree_code subcode;
1632
1633 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
1634 if (TREE_CODE (arg0) != VECTOR_CST)
1635 return NULL_TREE;
1636
1637 elts = XALLOCAVEC (tree, nelts * 2);
1638 if (!vec_cst_ctor_to_array (arg0, elts))
1639 return NULL_TREE;
1640
1641 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1642 || code == VEC_UNPACK_FLOAT_LO_EXPR))
1643 elts += nelts;
1644
1645 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1646 subcode = NOP_EXPR;
1647 else
1648 subcode = FLOAT_EXPR;
1649
1650 for (i = 0; i < nelts; i++)
1651 {
1652 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
1653 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1654 return NULL_TREE;
1655 }
1656
1657 return build_vector (type, elts);
1658 }
1659
1660 case REDUC_MIN_EXPR:
1661 case REDUC_MAX_EXPR:
1662 case REDUC_PLUS_EXPR:
1663 {
1664 unsigned int nelts, i;
1665 tree *elts;
1666 enum tree_code subcode;
1667
1668 if (TREE_CODE (arg0) != VECTOR_CST)
1669 return NULL_TREE;
1670 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
1671
1672 elts = XALLOCAVEC (tree, nelts);
1673 if (!vec_cst_ctor_to_array (arg0, elts))
1674 return NULL_TREE;
1675
1676 switch (code)
1677 {
1678 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
1679 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
1680 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
1681 default: gcc_unreachable ();
1682 }
1683
1684 for (i = 1; i < nelts; i++)
1685 {
1686 elts[0] = const_binop (subcode, elts[0], elts[i]);
1687 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
1688 return NULL_TREE;
1689 }
1690
1691 return elts[0];
1692 }
1693
1694 default:
1695 break;
1696 }
1697
1698 return NULL_TREE;
1699 }
1700
1701 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1702 indicates which particular sizetype to create. */
1703
1704 tree
1705 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1706 {
1707 return build_int_cst (sizetype_tab[(int) kind], number);
1708 }
1709 \f
1710 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1711 is a tree code. The type of the result is taken from the operands.
1712 Both must be equivalent integer types, ala int_binop_types_match_p.
1713 If the operands are constant, so is the result. */
1714
1715 tree
1716 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1717 {
1718 tree type = TREE_TYPE (arg0);
1719
1720 if (arg0 == error_mark_node || arg1 == error_mark_node)
1721 return error_mark_node;
1722
1723 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1724 TREE_TYPE (arg1)));
1725
1726 /* Handle the special case of two integer constants faster. */
1727 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1728 {
1729 /* And some specific cases even faster than that. */
1730 if (code == PLUS_EXPR)
1731 {
1732 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1733 return arg1;
1734 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1735 return arg0;
1736 }
1737 else if (code == MINUS_EXPR)
1738 {
1739 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1740 return arg0;
1741 }
1742 else if (code == MULT_EXPR)
1743 {
1744 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1745 return arg1;
1746 }
1747
1748 /* Handle general case of two integer constants. For sizetype
1749 constant calculations we always want to know about overflow,
1750 even in the unsigned case. */
1751 return int_const_binop_1 (code, arg0, arg1, -1);
1752 }
1753
1754 return fold_build2_loc (loc, code, type, arg0, arg1);
1755 }
1756
1757 /* Given two values, either both of sizetype or both of bitsizetype,
1758 compute the difference between the two values. Return the value
1759 in signed type corresponding to the type of the operands. */
1760
1761 tree
1762 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1763 {
1764 tree type = TREE_TYPE (arg0);
1765 tree ctype;
1766
1767 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1768 TREE_TYPE (arg1)));
1769
1770 /* If the type is already signed, just do the simple thing. */
1771 if (!TYPE_UNSIGNED (type))
1772 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1773
1774 if (type == sizetype)
1775 ctype = ssizetype;
1776 else if (type == bitsizetype)
1777 ctype = sbitsizetype;
1778 else
1779 ctype = signed_type_for (type);
1780
1781 /* If either operand is not a constant, do the conversions to the signed
1782 type and subtract. The hardware will do the right thing with any
1783 overflow in the subtraction. */
1784 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1785 return size_binop_loc (loc, MINUS_EXPR,
1786 fold_convert_loc (loc, ctype, arg0),
1787 fold_convert_loc (loc, ctype, arg1));
1788
1789 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1790 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1791 overflow) and negate (which can't either). Special-case a result
1792 of zero while we're here. */
1793 if (tree_int_cst_equal (arg0, arg1))
1794 return build_int_cst (ctype, 0);
1795 else if (tree_int_cst_lt (arg1, arg0))
1796 return fold_convert_loc (loc, ctype,
1797 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1798 else
1799 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1800 fold_convert_loc (loc, ctype,
1801 size_binop_loc (loc,
1802 MINUS_EXPR,
1803 arg1, arg0)));
1804 }
1805 \f
1806 /* A subroutine of fold_convert_const handling conversions of an
1807 INTEGER_CST to another integer type. */
1808
1809 static tree
1810 fold_convert_const_int_from_int (tree type, const_tree arg1)
1811 {
1812 /* Given an integer constant, make new constant with new type,
1813 appropriately sign-extended or truncated. Use widest_int
1814 so that any extension is done according ARG1's type. */
1815 return force_fit_type (type, wi::to_widest (arg1),
1816 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1817 TREE_OVERFLOW (arg1));
1818 }
1819
1820 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1821 to an integer type. */
1822
1823 static tree
1824 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1825 {
1826 bool overflow = false;
1827 tree t;
1828
1829 /* The following code implements the floating point to integer
1830 conversion rules required by the Java Language Specification,
1831 that IEEE NaNs are mapped to zero and values that overflow
1832 the target precision saturate, i.e. values greater than
1833 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1834 are mapped to INT_MIN. These semantics are allowed by the
1835 C and C++ standards that simply state that the behavior of
1836 FP-to-integer conversion is unspecified upon overflow. */
1837
1838 wide_int val;
1839 REAL_VALUE_TYPE r;
1840 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1841
1842 switch (code)
1843 {
1844 case FIX_TRUNC_EXPR:
1845 real_trunc (&r, VOIDmode, &x);
1846 break;
1847
1848 default:
1849 gcc_unreachable ();
1850 }
1851
1852 /* If R is NaN, return zero and show we have an overflow. */
1853 if (REAL_VALUE_ISNAN (r))
1854 {
1855 overflow = true;
1856 val = wi::zero (TYPE_PRECISION (type));
1857 }
1858
1859 /* See if R is less than the lower bound or greater than the
1860 upper bound. */
1861
1862 if (! overflow)
1863 {
1864 tree lt = TYPE_MIN_VALUE (type);
1865 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1866 if (real_less (&r, &l))
1867 {
1868 overflow = true;
1869 val = lt;
1870 }
1871 }
1872
1873 if (! overflow)
1874 {
1875 tree ut = TYPE_MAX_VALUE (type);
1876 if (ut)
1877 {
1878 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1879 if (real_less (&u, &r))
1880 {
1881 overflow = true;
1882 val = ut;
1883 }
1884 }
1885 }
1886
1887 if (! overflow)
1888 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1889
1890 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1891 return t;
1892 }
1893
1894 /* A subroutine of fold_convert_const handling conversions of a
1895 FIXED_CST to an integer type. */
1896
1897 static tree
1898 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1899 {
1900 tree t;
1901 double_int temp, temp_trunc;
1902 unsigned int mode;
1903
1904 /* Right shift FIXED_CST to temp by fbit. */
1905 temp = TREE_FIXED_CST (arg1).data;
1906 mode = TREE_FIXED_CST (arg1).mode;
1907 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1908 {
1909 temp = temp.rshift (GET_MODE_FBIT (mode),
1910 HOST_BITS_PER_DOUBLE_INT,
1911 SIGNED_FIXED_POINT_MODE_P (mode));
1912
1913 /* Left shift temp to temp_trunc by fbit. */
1914 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1915 HOST_BITS_PER_DOUBLE_INT,
1916 SIGNED_FIXED_POINT_MODE_P (mode));
1917 }
1918 else
1919 {
1920 temp = double_int_zero;
1921 temp_trunc = double_int_zero;
1922 }
1923
1924 /* If FIXED_CST is negative, we need to round the value toward 0.
1925 By checking if the fractional bits are not zero to add 1 to temp. */
1926 if (SIGNED_FIXED_POINT_MODE_P (mode)
1927 && temp_trunc.is_negative ()
1928 && TREE_FIXED_CST (arg1).data != temp_trunc)
1929 temp += double_int_one;
1930
1931 /* Given a fixed-point constant, make new constant with new type,
1932 appropriately sign-extended or truncated. */
1933 t = force_fit_type (type, temp, -1,
1934 (temp.is_negative ()
1935 && (TYPE_UNSIGNED (type)
1936 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1937 | TREE_OVERFLOW (arg1));
1938
1939 return t;
1940 }
1941
1942 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1943 to another floating point type. */
1944
1945 static tree
1946 fold_convert_const_real_from_real (tree type, const_tree arg1)
1947 {
1948 REAL_VALUE_TYPE value;
1949 tree t;
1950
1951 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1952 t = build_real (type, value);
1953
1954 /* If converting an infinity or NAN to a representation that doesn't
1955 have one, set the overflow bit so that we can produce some kind of
1956 error message at the appropriate point if necessary. It's not the
1957 most user-friendly message, but it's better than nothing. */
1958 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1959 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1960 TREE_OVERFLOW (t) = 1;
1961 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1962 && !MODE_HAS_NANS (TYPE_MODE (type)))
1963 TREE_OVERFLOW (t) = 1;
1964 /* Regular overflow, conversion produced an infinity in a mode that
1965 can't represent them. */
1966 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1967 && REAL_VALUE_ISINF (value)
1968 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1969 TREE_OVERFLOW (t) = 1;
1970 else
1971 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1972 return t;
1973 }
1974
1975 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1976 to a floating point type. */
1977
1978 static tree
1979 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1980 {
1981 REAL_VALUE_TYPE value;
1982 tree t;
1983
1984 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1985 t = build_real (type, value);
1986
1987 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1988 return t;
1989 }
1990
1991 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1992 to another fixed-point type. */
1993
1994 static tree
1995 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1996 {
1997 FIXED_VALUE_TYPE value;
1998 tree t;
1999 bool overflow_p;
2000
2001 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2002 TYPE_SATURATING (type));
2003 t = build_fixed (type, value);
2004
2005 /* Propagate overflow flags. */
2006 if (overflow_p | TREE_OVERFLOW (arg1))
2007 TREE_OVERFLOW (t) = 1;
2008 return t;
2009 }
2010
2011 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2012 to a fixed-point type. */
2013
2014 static tree
2015 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2016 {
2017 FIXED_VALUE_TYPE value;
2018 tree t;
2019 bool overflow_p;
2020 double_int di;
2021
2022 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2023
2024 di.low = TREE_INT_CST_ELT (arg1, 0);
2025 if (TREE_INT_CST_NUNITS (arg1) == 1)
2026 di.high = (HOST_WIDE_INT) di.low < 0 ? (HOST_WIDE_INT) -1 : 0;
2027 else
2028 di.high = TREE_INT_CST_ELT (arg1, 1);
2029
2030 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di,
2031 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2032 TYPE_SATURATING (type));
2033 t = build_fixed (type, value);
2034
2035 /* Propagate overflow flags. */
2036 if (overflow_p | TREE_OVERFLOW (arg1))
2037 TREE_OVERFLOW (t) = 1;
2038 return t;
2039 }
2040
2041 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2042 to a fixed-point type. */
2043
2044 static tree
2045 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2046 {
2047 FIXED_VALUE_TYPE value;
2048 tree t;
2049 bool overflow_p;
2050
2051 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2052 &TREE_REAL_CST (arg1),
2053 TYPE_SATURATING (type));
2054 t = build_fixed (type, value);
2055
2056 /* Propagate overflow flags. */
2057 if (overflow_p | TREE_OVERFLOW (arg1))
2058 TREE_OVERFLOW (t) = 1;
2059 return t;
2060 }
2061
2062 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2063 type TYPE. If no simplification can be done return NULL_TREE. */
2064
2065 static tree
2066 fold_convert_const (enum tree_code code, tree type, tree arg1)
2067 {
2068 if (TREE_TYPE (arg1) == type)
2069 return arg1;
2070
2071 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2072 || TREE_CODE (type) == OFFSET_TYPE)
2073 {
2074 if (TREE_CODE (arg1) == INTEGER_CST)
2075 return fold_convert_const_int_from_int (type, arg1);
2076 else if (TREE_CODE (arg1) == REAL_CST)
2077 return fold_convert_const_int_from_real (code, type, arg1);
2078 else if (TREE_CODE (arg1) == FIXED_CST)
2079 return fold_convert_const_int_from_fixed (type, arg1);
2080 }
2081 else if (TREE_CODE (type) == REAL_TYPE)
2082 {
2083 if (TREE_CODE (arg1) == INTEGER_CST)
2084 return build_real_from_int_cst (type, arg1);
2085 else if (TREE_CODE (arg1) == REAL_CST)
2086 return fold_convert_const_real_from_real (type, arg1);
2087 else if (TREE_CODE (arg1) == FIXED_CST)
2088 return fold_convert_const_real_from_fixed (type, arg1);
2089 }
2090 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2091 {
2092 if (TREE_CODE (arg1) == FIXED_CST)
2093 return fold_convert_const_fixed_from_fixed (type, arg1);
2094 else if (TREE_CODE (arg1) == INTEGER_CST)
2095 return fold_convert_const_fixed_from_int (type, arg1);
2096 else if (TREE_CODE (arg1) == REAL_CST)
2097 return fold_convert_const_fixed_from_real (type, arg1);
2098 }
2099 else if (TREE_CODE (type) == VECTOR_TYPE)
2100 {
2101 if (TREE_CODE (arg1) == VECTOR_CST
2102 && TYPE_VECTOR_SUBPARTS (type) == VECTOR_CST_NELTS (arg1))
2103 {
2104 int len = TYPE_VECTOR_SUBPARTS (type);
2105 tree elttype = TREE_TYPE (type);
2106 tree *v = XALLOCAVEC (tree, len);
2107 for (int i = 0; i < len; ++i)
2108 {
2109 tree elt = VECTOR_CST_ELT (arg1, i);
2110 tree cvt = fold_convert_const (code, elttype, elt);
2111 if (cvt == NULL_TREE)
2112 return NULL_TREE;
2113 v[i] = cvt;
2114 }
2115 return build_vector (type, v);
2116 }
2117 }
2118 return NULL_TREE;
2119 }
2120
2121 /* Construct a vector of zero elements of vector type TYPE. */
2122
2123 static tree
2124 build_zero_vector (tree type)
2125 {
2126 tree t;
2127
2128 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2129 return build_vector_from_val (type, t);
2130 }
2131
2132 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2133
2134 bool
2135 fold_convertible_p (const_tree type, const_tree arg)
2136 {
2137 tree orig = TREE_TYPE (arg);
2138
2139 if (type == orig)
2140 return true;
2141
2142 if (TREE_CODE (arg) == ERROR_MARK
2143 || TREE_CODE (type) == ERROR_MARK
2144 || TREE_CODE (orig) == ERROR_MARK)
2145 return false;
2146
2147 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2148 return true;
2149
2150 switch (TREE_CODE (type))
2151 {
2152 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2153 case POINTER_TYPE: case REFERENCE_TYPE:
2154 case OFFSET_TYPE:
2155 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2156 || TREE_CODE (orig) == OFFSET_TYPE)
2157 return true;
2158 return (TREE_CODE (orig) == VECTOR_TYPE
2159 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2160
2161 case REAL_TYPE:
2162 case FIXED_POINT_TYPE:
2163 case COMPLEX_TYPE:
2164 case VECTOR_TYPE:
2165 case VOID_TYPE:
2166 return TREE_CODE (type) == TREE_CODE (orig);
2167
2168 default:
2169 return false;
2170 }
2171 }
2172
2173 /* Convert expression ARG to type TYPE. Used by the middle-end for
2174 simple conversions in preference to calling the front-end's convert. */
2175
2176 tree
2177 fold_convert_loc (location_t loc, tree type, tree arg)
2178 {
2179 tree orig = TREE_TYPE (arg);
2180 tree tem;
2181
2182 if (type == orig)
2183 return arg;
2184
2185 if (TREE_CODE (arg) == ERROR_MARK
2186 || TREE_CODE (type) == ERROR_MARK
2187 || TREE_CODE (orig) == ERROR_MARK)
2188 return error_mark_node;
2189
2190 switch (TREE_CODE (type))
2191 {
2192 case POINTER_TYPE:
2193 case REFERENCE_TYPE:
2194 /* Handle conversions between pointers to different address spaces. */
2195 if (POINTER_TYPE_P (orig)
2196 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2197 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2198 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2199 /* fall through */
2200
2201 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2202 case OFFSET_TYPE:
2203 if (TREE_CODE (arg) == INTEGER_CST)
2204 {
2205 tem = fold_convert_const (NOP_EXPR, type, arg);
2206 if (tem != NULL_TREE)
2207 return tem;
2208 }
2209 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2210 || TREE_CODE (orig) == OFFSET_TYPE)
2211 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2212 if (TREE_CODE (orig) == COMPLEX_TYPE)
2213 return fold_convert_loc (loc, type,
2214 fold_build1_loc (loc, REALPART_EXPR,
2215 TREE_TYPE (orig), arg));
2216 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2217 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2218 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2219
2220 case REAL_TYPE:
2221 if (TREE_CODE (arg) == INTEGER_CST)
2222 {
2223 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2224 if (tem != NULL_TREE)
2225 return tem;
2226 }
2227 else if (TREE_CODE (arg) == REAL_CST)
2228 {
2229 tem = fold_convert_const (NOP_EXPR, type, arg);
2230 if (tem != NULL_TREE)
2231 return tem;
2232 }
2233 else if (TREE_CODE (arg) == FIXED_CST)
2234 {
2235 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2236 if (tem != NULL_TREE)
2237 return tem;
2238 }
2239
2240 switch (TREE_CODE (orig))
2241 {
2242 case INTEGER_TYPE:
2243 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2244 case POINTER_TYPE: case REFERENCE_TYPE:
2245 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2246
2247 case REAL_TYPE:
2248 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2249
2250 case FIXED_POINT_TYPE:
2251 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2252
2253 case COMPLEX_TYPE:
2254 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2255 return fold_convert_loc (loc, type, tem);
2256
2257 default:
2258 gcc_unreachable ();
2259 }
2260
2261 case FIXED_POINT_TYPE:
2262 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2263 || TREE_CODE (arg) == REAL_CST)
2264 {
2265 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2266 if (tem != NULL_TREE)
2267 goto fold_convert_exit;
2268 }
2269
2270 switch (TREE_CODE (orig))
2271 {
2272 case FIXED_POINT_TYPE:
2273 case INTEGER_TYPE:
2274 case ENUMERAL_TYPE:
2275 case BOOLEAN_TYPE:
2276 case REAL_TYPE:
2277 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2278
2279 case COMPLEX_TYPE:
2280 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2281 return fold_convert_loc (loc, type, tem);
2282
2283 default:
2284 gcc_unreachable ();
2285 }
2286
2287 case COMPLEX_TYPE:
2288 switch (TREE_CODE (orig))
2289 {
2290 case INTEGER_TYPE:
2291 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2292 case POINTER_TYPE: case REFERENCE_TYPE:
2293 case REAL_TYPE:
2294 case FIXED_POINT_TYPE:
2295 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2296 fold_convert_loc (loc, TREE_TYPE (type), arg),
2297 fold_convert_loc (loc, TREE_TYPE (type),
2298 integer_zero_node));
2299 case COMPLEX_TYPE:
2300 {
2301 tree rpart, ipart;
2302
2303 if (TREE_CODE (arg) == COMPLEX_EXPR)
2304 {
2305 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2306 TREE_OPERAND (arg, 0));
2307 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2308 TREE_OPERAND (arg, 1));
2309 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2310 }
2311
2312 arg = save_expr (arg);
2313 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2314 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2315 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2316 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2317 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2318 }
2319
2320 default:
2321 gcc_unreachable ();
2322 }
2323
2324 case VECTOR_TYPE:
2325 if (integer_zerop (arg))
2326 return build_zero_vector (type);
2327 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2328 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2329 || TREE_CODE (orig) == VECTOR_TYPE);
2330 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2331
2332 case VOID_TYPE:
2333 tem = fold_ignored_result (arg);
2334 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2335
2336 default:
2337 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2338 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2339 gcc_unreachable ();
2340 }
2341 fold_convert_exit:
2342 protected_set_expr_location_unshare (tem, loc);
2343 return tem;
2344 }
2345 \f
2346 /* Return false if expr can be assumed not to be an lvalue, true
2347 otherwise. */
2348
2349 static bool
2350 maybe_lvalue_p (const_tree x)
2351 {
2352 /* We only need to wrap lvalue tree codes. */
2353 switch (TREE_CODE (x))
2354 {
2355 case VAR_DECL:
2356 case PARM_DECL:
2357 case RESULT_DECL:
2358 case LABEL_DECL:
2359 case FUNCTION_DECL:
2360 case SSA_NAME:
2361
2362 case COMPONENT_REF:
2363 case MEM_REF:
2364 case INDIRECT_REF:
2365 case ARRAY_REF:
2366 case ARRAY_RANGE_REF:
2367 case BIT_FIELD_REF:
2368 case OBJ_TYPE_REF:
2369
2370 case REALPART_EXPR:
2371 case IMAGPART_EXPR:
2372 case PREINCREMENT_EXPR:
2373 case PREDECREMENT_EXPR:
2374 case SAVE_EXPR:
2375 case TRY_CATCH_EXPR:
2376 case WITH_CLEANUP_EXPR:
2377 case COMPOUND_EXPR:
2378 case MODIFY_EXPR:
2379 case TARGET_EXPR:
2380 case COND_EXPR:
2381 case BIND_EXPR:
2382 break;
2383
2384 default:
2385 /* Assume the worst for front-end tree codes. */
2386 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2387 break;
2388 return false;
2389 }
2390
2391 return true;
2392 }
2393
2394 /* Return an expr equal to X but certainly not valid as an lvalue. */
2395
2396 tree
2397 non_lvalue_loc (location_t loc, tree x)
2398 {
2399 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2400 us. */
2401 if (in_gimple_form)
2402 return x;
2403
2404 if (! maybe_lvalue_p (x))
2405 return x;
2406 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2407 }
2408
2409 /* When pedantic, return an expr equal to X but certainly not valid as a
2410 pedantic lvalue. Otherwise, return X. */
2411
2412 static tree
2413 pedantic_non_lvalue_loc (location_t loc, tree x)
2414 {
2415 return protected_set_expr_location_unshare (x, loc);
2416 }
2417 \f
2418 /* Given a tree comparison code, return the code that is the logical inverse.
2419 It is generally not safe to do this for floating-point comparisons, except
2420 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2421 ERROR_MARK in this case. */
2422
2423 enum tree_code
2424 invert_tree_comparison (enum tree_code code, bool honor_nans)
2425 {
2426 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2427 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2428 return ERROR_MARK;
2429
2430 switch (code)
2431 {
2432 case EQ_EXPR:
2433 return NE_EXPR;
2434 case NE_EXPR:
2435 return EQ_EXPR;
2436 case GT_EXPR:
2437 return honor_nans ? UNLE_EXPR : LE_EXPR;
2438 case GE_EXPR:
2439 return honor_nans ? UNLT_EXPR : LT_EXPR;
2440 case LT_EXPR:
2441 return honor_nans ? UNGE_EXPR : GE_EXPR;
2442 case LE_EXPR:
2443 return honor_nans ? UNGT_EXPR : GT_EXPR;
2444 case LTGT_EXPR:
2445 return UNEQ_EXPR;
2446 case UNEQ_EXPR:
2447 return LTGT_EXPR;
2448 case UNGT_EXPR:
2449 return LE_EXPR;
2450 case UNGE_EXPR:
2451 return LT_EXPR;
2452 case UNLT_EXPR:
2453 return GE_EXPR;
2454 case UNLE_EXPR:
2455 return GT_EXPR;
2456 case ORDERED_EXPR:
2457 return UNORDERED_EXPR;
2458 case UNORDERED_EXPR:
2459 return ORDERED_EXPR;
2460 default:
2461 gcc_unreachable ();
2462 }
2463 }
2464
2465 /* Similar, but return the comparison that results if the operands are
2466 swapped. This is safe for floating-point. */
2467
2468 enum tree_code
2469 swap_tree_comparison (enum tree_code code)
2470 {
2471 switch (code)
2472 {
2473 case EQ_EXPR:
2474 case NE_EXPR:
2475 case ORDERED_EXPR:
2476 case UNORDERED_EXPR:
2477 case LTGT_EXPR:
2478 case UNEQ_EXPR:
2479 return code;
2480 case GT_EXPR:
2481 return LT_EXPR;
2482 case GE_EXPR:
2483 return LE_EXPR;
2484 case LT_EXPR:
2485 return GT_EXPR;
2486 case LE_EXPR:
2487 return GE_EXPR;
2488 case UNGT_EXPR:
2489 return UNLT_EXPR;
2490 case UNGE_EXPR:
2491 return UNLE_EXPR;
2492 case UNLT_EXPR:
2493 return UNGT_EXPR;
2494 case UNLE_EXPR:
2495 return UNGE_EXPR;
2496 default:
2497 gcc_unreachable ();
2498 }
2499 }
2500
2501
2502 /* Convert a comparison tree code from an enum tree_code representation
2503 into a compcode bit-based encoding. This function is the inverse of
2504 compcode_to_comparison. */
2505
2506 static enum comparison_code
2507 comparison_to_compcode (enum tree_code code)
2508 {
2509 switch (code)
2510 {
2511 case LT_EXPR:
2512 return COMPCODE_LT;
2513 case EQ_EXPR:
2514 return COMPCODE_EQ;
2515 case LE_EXPR:
2516 return COMPCODE_LE;
2517 case GT_EXPR:
2518 return COMPCODE_GT;
2519 case NE_EXPR:
2520 return COMPCODE_NE;
2521 case GE_EXPR:
2522 return COMPCODE_GE;
2523 case ORDERED_EXPR:
2524 return COMPCODE_ORD;
2525 case UNORDERED_EXPR:
2526 return COMPCODE_UNORD;
2527 case UNLT_EXPR:
2528 return COMPCODE_UNLT;
2529 case UNEQ_EXPR:
2530 return COMPCODE_UNEQ;
2531 case UNLE_EXPR:
2532 return COMPCODE_UNLE;
2533 case UNGT_EXPR:
2534 return COMPCODE_UNGT;
2535 case LTGT_EXPR:
2536 return COMPCODE_LTGT;
2537 case UNGE_EXPR:
2538 return COMPCODE_UNGE;
2539 default:
2540 gcc_unreachable ();
2541 }
2542 }
2543
2544 /* Convert a compcode bit-based encoding of a comparison operator back
2545 to GCC's enum tree_code representation. This function is the
2546 inverse of comparison_to_compcode. */
2547
2548 static enum tree_code
2549 compcode_to_comparison (enum comparison_code code)
2550 {
2551 switch (code)
2552 {
2553 case COMPCODE_LT:
2554 return LT_EXPR;
2555 case COMPCODE_EQ:
2556 return EQ_EXPR;
2557 case COMPCODE_LE:
2558 return LE_EXPR;
2559 case COMPCODE_GT:
2560 return GT_EXPR;
2561 case COMPCODE_NE:
2562 return NE_EXPR;
2563 case COMPCODE_GE:
2564 return GE_EXPR;
2565 case COMPCODE_ORD:
2566 return ORDERED_EXPR;
2567 case COMPCODE_UNORD:
2568 return UNORDERED_EXPR;
2569 case COMPCODE_UNLT:
2570 return UNLT_EXPR;
2571 case COMPCODE_UNEQ:
2572 return UNEQ_EXPR;
2573 case COMPCODE_UNLE:
2574 return UNLE_EXPR;
2575 case COMPCODE_UNGT:
2576 return UNGT_EXPR;
2577 case COMPCODE_LTGT:
2578 return LTGT_EXPR;
2579 case COMPCODE_UNGE:
2580 return UNGE_EXPR;
2581 default:
2582 gcc_unreachable ();
2583 }
2584 }
2585
2586 /* Return a tree for the comparison which is the combination of
2587 doing the AND or OR (depending on CODE) of the two operations LCODE
2588 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2589 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2590 if this makes the transformation invalid. */
2591
2592 tree
2593 combine_comparisons (location_t loc,
2594 enum tree_code code, enum tree_code lcode,
2595 enum tree_code rcode, tree truth_type,
2596 tree ll_arg, tree lr_arg)
2597 {
2598 bool honor_nans = HONOR_NANS (ll_arg);
2599 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2600 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2601 int compcode;
2602
2603 switch (code)
2604 {
2605 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2606 compcode = lcompcode & rcompcode;
2607 break;
2608
2609 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2610 compcode = lcompcode | rcompcode;
2611 break;
2612
2613 default:
2614 return NULL_TREE;
2615 }
2616
2617 if (!honor_nans)
2618 {
2619 /* Eliminate unordered comparisons, as well as LTGT and ORD
2620 which are not used unless the mode has NaNs. */
2621 compcode &= ~COMPCODE_UNORD;
2622 if (compcode == COMPCODE_LTGT)
2623 compcode = COMPCODE_NE;
2624 else if (compcode == COMPCODE_ORD)
2625 compcode = COMPCODE_TRUE;
2626 }
2627 else if (flag_trapping_math)
2628 {
2629 /* Check that the original operation and the optimized ones will trap
2630 under the same condition. */
2631 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2632 && (lcompcode != COMPCODE_EQ)
2633 && (lcompcode != COMPCODE_ORD);
2634 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2635 && (rcompcode != COMPCODE_EQ)
2636 && (rcompcode != COMPCODE_ORD);
2637 bool trap = (compcode & COMPCODE_UNORD) == 0
2638 && (compcode != COMPCODE_EQ)
2639 && (compcode != COMPCODE_ORD);
2640
2641 /* In a short-circuited boolean expression the LHS might be
2642 such that the RHS, if evaluated, will never trap. For
2643 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2644 if neither x nor y is NaN. (This is a mixed blessing: for
2645 example, the expression above will never trap, hence
2646 optimizing it to x < y would be invalid). */
2647 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2648 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2649 rtrap = false;
2650
2651 /* If the comparison was short-circuited, and only the RHS
2652 trapped, we may now generate a spurious trap. */
2653 if (rtrap && !ltrap
2654 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2655 return NULL_TREE;
2656
2657 /* If we changed the conditions that cause a trap, we lose. */
2658 if ((ltrap || rtrap) != trap)
2659 return NULL_TREE;
2660 }
2661
2662 if (compcode == COMPCODE_TRUE)
2663 return constant_boolean_node (true, truth_type);
2664 else if (compcode == COMPCODE_FALSE)
2665 return constant_boolean_node (false, truth_type);
2666 else
2667 {
2668 enum tree_code tcode;
2669
2670 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2671 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2672 }
2673 }
2674 \f
2675 /* Return nonzero if two operands (typically of the same tree node)
2676 are necessarily equal. FLAGS modifies behavior as follows:
2677
2678 If OEP_ONLY_CONST is set, only return nonzero for constants.
2679 This function tests whether the operands are indistinguishable;
2680 it does not test whether they are equal using C's == operation.
2681 The distinction is important for IEEE floating point, because
2682 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2683 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2684
2685 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2686 even though it may hold multiple values during a function.
2687 This is because a GCC tree node guarantees that nothing else is
2688 executed between the evaluation of its "operands" (which may often
2689 be evaluated in arbitrary order). Hence if the operands themselves
2690 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2691 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2692 unset means assuming isochronic (or instantaneous) tree equivalence.
2693 Unless comparing arbitrary expression trees, such as from different
2694 statements, this flag can usually be left unset.
2695
2696 If OEP_PURE_SAME is set, then pure functions with identical arguments
2697 are considered the same. It is used when the caller has other ways
2698 to ensure that global memory is unchanged in between.
2699
2700 If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
2701 not values of expressions.
2702
2703 Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
2704 any operand with side effect. This is unnecesarily conservative in the
2705 case we know that arg0 and arg1 are in disjoint code paths (such as in
2706 ?: operator). In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
2707 addresses with TREE_CONSTANT flag set so we know that &var == &var
2708 even if var is volatile. */
2709
2710 int
2711 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2712 {
2713 /* If either is ERROR_MARK, they aren't equal. */
2714 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2715 || TREE_TYPE (arg0) == error_mark_node
2716 || TREE_TYPE (arg1) == error_mark_node)
2717 return 0;
2718
2719 /* Similar, if either does not have a type (like a released SSA name),
2720 they aren't equal. */
2721 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2722 return 0;
2723
2724 /* We cannot consider pointers to different address space equal. */
2725 if (POINTER_TYPE_P (TREE_TYPE (arg0))
2726 && POINTER_TYPE_P (TREE_TYPE (arg1))
2727 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2728 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2729 return 0;
2730
2731 /* Check equality of integer constants before bailing out due to
2732 precision differences. */
2733 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2734 {
2735 /* Address of INTEGER_CST is not defined; check that we did not forget
2736 to drop the OEP_ADDRESS_OF flags. */
2737 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2738 return tree_int_cst_equal (arg0, arg1);
2739 }
2740
2741 if (!(flags & OEP_ADDRESS_OF))
2742 {
2743 /* If both types don't have the same signedness, then we can't consider
2744 them equal. We must check this before the STRIP_NOPS calls
2745 because they may change the signedness of the arguments. As pointers
2746 strictly don't have a signedness, require either two pointers or
2747 two non-pointers as well. */
2748 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2749 || POINTER_TYPE_P (TREE_TYPE (arg0))
2750 != POINTER_TYPE_P (TREE_TYPE (arg1)))
2751 return 0;
2752
2753 /* If both types don't have the same precision, then it is not safe
2754 to strip NOPs. */
2755 if (element_precision (TREE_TYPE (arg0))
2756 != element_precision (TREE_TYPE (arg1)))
2757 return 0;
2758
2759 STRIP_NOPS (arg0);
2760 STRIP_NOPS (arg1);
2761 }
2762 #if 0
2763 /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
2764 sanity check once the issue is solved. */
2765 else
2766 /* Addresses of conversions and SSA_NAMEs (and many other things)
2767 are not defined. Check that we did not forget to drop the
2768 OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
2769 gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
2770 && TREE_CODE (arg0) != SSA_NAME);
2771 #endif
2772
2773 /* In case both args are comparisons but with different comparison
2774 code, try to swap the comparison operands of one arg to produce
2775 a match and compare that variant. */
2776 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2777 && COMPARISON_CLASS_P (arg0)
2778 && COMPARISON_CLASS_P (arg1))
2779 {
2780 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2781
2782 if (TREE_CODE (arg0) == swap_code)
2783 return operand_equal_p (TREE_OPERAND (arg0, 0),
2784 TREE_OPERAND (arg1, 1), flags)
2785 && operand_equal_p (TREE_OPERAND (arg0, 1),
2786 TREE_OPERAND (arg1, 0), flags);
2787 }
2788
2789 if (TREE_CODE (arg0) != TREE_CODE (arg1))
2790 {
2791 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2792 if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
2793 ;
2794 else if (flags & OEP_ADDRESS_OF)
2795 {
2796 /* If we are interested in comparing addresses ignore
2797 MEM_REF wrappings of the base that can appear just for
2798 TBAA reasons. */
2799 if (TREE_CODE (arg0) == MEM_REF
2800 && DECL_P (arg1)
2801 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
2802 && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
2803 && integer_zerop (TREE_OPERAND (arg0, 1)))
2804 return 1;
2805 else if (TREE_CODE (arg1) == MEM_REF
2806 && DECL_P (arg0)
2807 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
2808 && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
2809 && integer_zerop (TREE_OPERAND (arg1, 1)))
2810 return 1;
2811 return 0;
2812 }
2813 else
2814 return 0;
2815 }
2816
2817 /* When not checking adddresses, this is needed for conversions and for
2818 COMPONENT_REF. Might as well play it safe and always test this. */
2819 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2820 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2821 || (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))
2822 && !(flags & OEP_ADDRESS_OF)))
2823 return 0;
2824
2825 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2826 We don't care about side effects in that case because the SAVE_EXPR
2827 takes care of that for us. In all other cases, two expressions are
2828 equal if they have no side effects. If we have two identical
2829 expressions with side effects that should be treated the same due
2830 to the only side effects being identical SAVE_EXPR's, that will
2831 be detected in the recursive calls below.
2832 If we are taking an invariant address of two identical objects
2833 they are necessarily equal as well. */
2834 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2835 && (TREE_CODE (arg0) == SAVE_EXPR
2836 || (flags & OEP_MATCH_SIDE_EFFECTS)
2837 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2838 return 1;
2839
2840 /* Next handle constant cases, those for which we can return 1 even
2841 if ONLY_CONST is set. */
2842 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2843 switch (TREE_CODE (arg0))
2844 {
2845 case INTEGER_CST:
2846 return tree_int_cst_equal (arg0, arg1);
2847
2848 case FIXED_CST:
2849 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2850 TREE_FIXED_CST (arg1));
2851
2852 case REAL_CST:
2853 if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
2854 return 1;
2855
2856
2857 if (!HONOR_SIGNED_ZEROS (arg0))
2858 {
2859 /* If we do not distinguish between signed and unsigned zero,
2860 consider them equal. */
2861 if (real_zerop (arg0) && real_zerop (arg1))
2862 return 1;
2863 }
2864 return 0;
2865
2866 case VECTOR_CST:
2867 {
2868 unsigned i;
2869
2870 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2871 return 0;
2872
2873 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2874 {
2875 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2876 VECTOR_CST_ELT (arg1, i), flags))
2877 return 0;
2878 }
2879 return 1;
2880 }
2881
2882 case COMPLEX_CST:
2883 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2884 flags)
2885 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2886 flags));
2887
2888 case STRING_CST:
2889 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2890 && ! memcmp (TREE_STRING_POINTER (arg0),
2891 TREE_STRING_POINTER (arg1),
2892 TREE_STRING_LENGTH (arg0)));
2893
2894 case ADDR_EXPR:
2895 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2896 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2897 flags | OEP_ADDRESS_OF
2898 | OEP_MATCH_SIDE_EFFECTS);
2899 case CONSTRUCTOR:
2900 /* In GIMPLE empty constructors are allowed in initializers of
2901 aggregates. */
2902 return (!vec_safe_length (CONSTRUCTOR_ELTS (arg0))
2903 && !vec_safe_length (CONSTRUCTOR_ELTS (arg1)));
2904 default:
2905 break;
2906 }
2907
2908 if (flags & OEP_ONLY_CONST)
2909 return 0;
2910
2911 /* Define macros to test an operand from arg0 and arg1 for equality and a
2912 variant that allows null and views null as being different from any
2913 non-null value. In the latter case, if either is null, the both
2914 must be; otherwise, do the normal comparison. */
2915 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2916 TREE_OPERAND (arg1, N), flags)
2917
2918 #define OP_SAME_WITH_NULL(N) \
2919 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2920 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2921
2922 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2923 {
2924 case tcc_unary:
2925 /* Two conversions are equal only if signedness and modes match. */
2926 switch (TREE_CODE (arg0))
2927 {
2928 CASE_CONVERT:
2929 case FIX_TRUNC_EXPR:
2930 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2931 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2932 return 0;
2933 break;
2934 default:
2935 break;
2936 }
2937
2938 return OP_SAME (0);
2939
2940
2941 case tcc_comparison:
2942 case tcc_binary:
2943 if (OP_SAME (0) && OP_SAME (1))
2944 return 1;
2945
2946 /* For commutative ops, allow the other order. */
2947 return (commutative_tree_code (TREE_CODE (arg0))
2948 && operand_equal_p (TREE_OPERAND (arg0, 0),
2949 TREE_OPERAND (arg1, 1), flags)
2950 && operand_equal_p (TREE_OPERAND (arg0, 1),
2951 TREE_OPERAND (arg1, 0), flags));
2952
2953 case tcc_reference:
2954 /* If either of the pointer (or reference) expressions we are
2955 dereferencing contain a side effect, these cannot be equal,
2956 but their addresses can be. */
2957 if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0
2958 && (TREE_SIDE_EFFECTS (arg0)
2959 || TREE_SIDE_EFFECTS (arg1)))
2960 return 0;
2961
2962 switch (TREE_CODE (arg0))
2963 {
2964 case INDIRECT_REF:
2965 if (!(flags & OEP_ADDRESS_OF)
2966 && (TYPE_ALIGN (TREE_TYPE (arg0))
2967 != TYPE_ALIGN (TREE_TYPE (arg1))))
2968 return 0;
2969 flags &= ~OEP_ADDRESS_OF;
2970 return OP_SAME (0);
2971
2972 case REALPART_EXPR:
2973 case IMAGPART_EXPR:
2974 case VIEW_CONVERT_EXPR:
2975 return OP_SAME (0);
2976
2977 case TARGET_MEM_REF:
2978 case MEM_REF:
2979 if (!(flags & OEP_ADDRESS_OF))
2980 {
2981 /* Require equal access sizes */
2982 if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1))
2983 && (!TYPE_SIZE (TREE_TYPE (arg0))
2984 || !TYPE_SIZE (TREE_TYPE (arg1))
2985 || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2986 TYPE_SIZE (TREE_TYPE (arg1)),
2987 flags)))
2988 return 0;
2989 /* Verify that accesses are TBAA compatible. */
2990 if (flag_strict_aliasing
2991 && (!alias_ptr_types_compatible_p
2992 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
2993 TREE_TYPE (TREE_OPERAND (arg1, 1)))
2994 || (MR_DEPENDENCE_CLIQUE (arg0)
2995 != MR_DEPENDENCE_CLIQUE (arg1))
2996 || (MR_DEPENDENCE_BASE (arg0)
2997 != MR_DEPENDENCE_BASE (arg1))))
2998 return 0;
2999 /* Verify that alignment is compatible. */
3000 if (TYPE_ALIGN (TREE_TYPE (arg0))
3001 != TYPE_ALIGN (TREE_TYPE (arg1)))
3002 return 0;
3003 }
3004 flags &= ~OEP_ADDRESS_OF;
3005 return (OP_SAME (0) && OP_SAME (1)
3006 /* TARGET_MEM_REF require equal extra operands. */
3007 && (TREE_CODE (arg0) != TARGET_MEM_REF
3008 || (OP_SAME_WITH_NULL (2)
3009 && OP_SAME_WITH_NULL (3)
3010 && OP_SAME_WITH_NULL (4))));
3011
3012 case ARRAY_REF:
3013 case ARRAY_RANGE_REF:
3014 /* Operands 2 and 3 may be null.
3015 Compare the array index by value if it is constant first as we
3016 may have different types but same value here. */
3017 if (!OP_SAME (0))
3018 return 0;
3019 flags &= ~OEP_ADDRESS_OF;
3020 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3021 TREE_OPERAND (arg1, 1))
3022 || OP_SAME (1))
3023 && OP_SAME_WITH_NULL (2)
3024 && OP_SAME_WITH_NULL (3));
3025
3026 case COMPONENT_REF:
3027 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3028 may be NULL when we're called to compare MEM_EXPRs. */
3029 if (!OP_SAME_WITH_NULL (0)
3030 || !OP_SAME (1))
3031 return 0;
3032 flags &= ~OEP_ADDRESS_OF;
3033 return OP_SAME_WITH_NULL (2);
3034
3035 case BIT_FIELD_REF:
3036 if (!OP_SAME (0))
3037 return 0;
3038 flags &= ~OEP_ADDRESS_OF;
3039 return OP_SAME (1) && OP_SAME (2);
3040
3041 default:
3042 return 0;
3043 }
3044
3045 case tcc_expression:
3046 switch (TREE_CODE (arg0))
3047 {
3048 case ADDR_EXPR:
3049 /* Be sure we pass right ADDRESS_OF flag. */
3050 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3051 return operand_equal_p (TREE_OPERAND (arg0, 0),
3052 TREE_OPERAND (arg1, 0),
3053 flags | OEP_ADDRESS_OF);
3054
3055 case TRUTH_NOT_EXPR:
3056 return OP_SAME (0);
3057
3058 case TRUTH_ANDIF_EXPR:
3059 case TRUTH_ORIF_EXPR:
3060 return OP_SAME (0) && OP_SAME (1);
3061
3062 case FMA_EXPR:
3063 case WIDEN_MULT_PLUS_EXPR:
3064 case WIDEN_MULT_MINUS_EXPR:
3065 if (!OP_SAME (2))
3066 return 0;
3067 /* The multiplcation operands are commutative. */
3068 /* FALLTHRU */
3069
3070 case TRUTH_AND_EXPR:
3071 case TRUTH_OR_EXPR:
3072 case TRUTH_XOR_EXPR:
3073 if (OP_SAME (0) && OP_SAME (1))
3074 return 1;
3075
3076 /* Otherwise take into account this is a commutative operation. */
3077 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3078 TREE_OPERAND (arg1, 1), flags)
3079 && operand_equal_p (TREE_OPERAND (arg0, 1),
3080 TREE_OPERAND (arg1, 0), flags));
3081
3082 case COND_EXPR:
3083 case VEC_COND_EXPR:
3084 case DOT_PROD_EXPR:
3085 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3086
3087 default:
3088 return 0;
3089 }
3090
3091 case tcc_vl_exp:
3092 switch (TREE_CODE (arg0))
3093 {
3094 case CALL_EXPR:
3095 if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3096 != (CALL_EXPR_FN (arg1) == NULL_TREE))
3097 /* If not both CALL_EXPRs are either internal or normal function
3098 functions, then they are not equal. */
3099 return 0;
3100 else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3101 {
3102 /* If the CALL_EXPRs call different internal functions, then they
3103 are not equal. */
3104 if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3105 return 0;
3106 }
3107 else
3108 {
3109 /* If the CALL_EXPRs call different functions, then they are not
3110 equal. */
3111 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3112 flags))
3113 return 0;
3114 }
3115
3116 /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */
3117 {
3118 unsigned int cef = call_expr_flags (arg0);
3119 if (flags & OEP_PURE_SAME)
3120 cef &= ECF_CONST | ECF_PURE;
3121 else
3122 cef &= ECF_CONST;
3123 if (!cef)
3124 return 0;
3125 }
3126
3127 /* Now see if all the arguments are the same. */
3128 {
3129 const_call_expr_arg_iterator iter0, iter1;
3130 const_tree a0, a1;
3131 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3132 a1 = first_const_call_expr_arg (arg1, &iter1);
3133 a0 && a1;
3134 a0 = next_const_call_expr_arg (&iter0),
3135 a1 = next_const_call_expr_arg (&iter1))
3136 if (! operand_equal_p (a0, a1, flags))
3137 return 0;
3138
3139 /* If we get here and both argument lists are exhausted
3140 then the CALL_EXPRs are equal. */
3141 return ! (a0 || a1);
3142 }
3143 default:
3144 return 0;
3145 }
3146
3147 case tcc_declaration:
3148 /* Consider __builtin_sqrt equal to sqrt. */
3149 return (TREE_CODE (arg0) == FUNCTION_DECL
3150 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3151 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3152 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3153
3154 case tcc_exceptional:
3155 if (TREE_CODE (arg0) == CONSTRUCTOR)
3156 {
3157 /* In GIMPLE constructors are used only to build vectors from
3158 elements. Individual elements in the constructor must be
3159 indexed in increasing order and form an initial sequence.
3160
3161 We make no effort to compare constructors in generic.
3162 (see sem_variable::equals in ipa-icf which can do so for
3163 constants). */
3164 if (!VECTOR_TYPE_P (TREE_TYPE (arg0))
3165 || !VECTOR_TYPE_P (TREE_TYPE (arg1)))
3166 return 0;
3167
3168 /* Be sure that vectors constructed have the same representation.
3169 We only tested element precision and modes to match.
3170 Vectors may be BLKmode and thus also check that the number of
3171 parts match. */
3172 if (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))
3173 != TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)))
3174 return 0;
3175
3176 vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3177 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3178 unsigned int len = vec_safe_length (v0);
3179
3180 if (len != vec_safe_length (v1))
3181 return 0;
3182
3183 for (unsigned int i = 0; i < len; i++)
3184 {
3185 constructor_elt *c0 = &(*v0)[i];
3186 constructor_elt *c1 = &(*v1)[i];
3187
3188 if (!operand_equal_p (c0->value, c1->value, flags)
3189 /* In GIMPLE the indexes can be either NULL or matching i.
3190 Double check this so we won't get false
3191 positives for GENERIC. */
3192 || (c0->index
3193 && (TREE_CODE (c0->index) != INTEGER_CST
3194 || !compare_tree_int (c0->index, i)))
3195 || (c1->index
3196 && (TREE_CODE (c1->index) != INTEGER_CST
3197 || !compare_tree_int (c1->index, i))))
3198 return 0;
3199 }
3200 return 1;
3201 }
3202 return 0;
3203
3204 default:
3205 return 0;
3206 }
3207
3208 #undef OP_SAME
3209 #undef OP_SAME_WITH_NULL
3210 }
3211 \f
3212 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3213 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3214
3215 When in doubt, return 0. */
3216
3217 static int
3218 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3219 {
3220 int unsignedp1, unsignedpo;
3221 tree primarg0, primarg1, primother;
3222 unsigned int correct_width;
3223
3224 if (operand_equal_p (arg0, arg1, 0))
3225 return 1;
3226
3227 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3228 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3229 return 0;
3230
3231 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3232 and see if the inner values are the same. This removes any
3233 signedness comparison, which doesn't matter here. */
3234 primarg0 = arg0, primarg1 = arg1;
3235 STRIP_NOPS (primarg0);
3236 STRIP_NOPS (primarg1);
3237 if (operand_equal_p (primarg0, primarg1, 0))
3238 return 1;
3239
3240 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3241 actual comparison operand, ARG0.
3242
3243 First throw away any conversions to wider types
3244 already present in the operands. */
3245
3246 primarg1 = get_narrower (arg1, &unsignedp1);
3247 primother = get_narrower (other, &unsignedpo);
3248
3249 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3250 if (unsignedp1 == unsignedpo
3251 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3252 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3253 {
3254 tree type = TREE_TYPE (arg0);
3255
3256 /* Make sure shorter operand is extended the right way
3257 to match the longer operand. */
3258 primarg1 = fold_convert (signed_or_unsigned_type_for
3259 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3260
3261 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3262 return 1;
3263 }
3264
3265 return 0;
3266 }
3267 \f
3268 /* See if ARG is an expression that is either a comparison or is performing
3269 arithmetic on comparisons. The comparisons must only be comparing
3270 two different values, which will be stored in *CVAL1 and *CVAL2; if
3271 they are nonzero it means that some operands have already been found.
3272 No variables may be used anywhere else in the expression except in the
3273 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3274 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3275
3276 If this is true, return 1. Otherwise, return zero. */
3277
3278 static int
3279 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3280 {
3281 enum tree_code code = TREE_CODE (arg);
3282 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3283
3284 /* We can handle some of the tcc_expression cases here. */
3285 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3286 tclass = tcc_unary;
3287 else if (tclass == tcc_expression
3288 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3289 || code == COMPOUND_EXPR))
3290 tclass = tcc_binary;
3291
3292 else if (tclass == tcc_expression && code == SAVE_EXPR
3293 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3294 {
3295 /* If we've already found a CVAL1 or CVAL2, this expression is
3296 two complex to handle. */
3297 if (*cval1 || *cval2)
3298 return 0;
3299
3300 tclass = tcc_unary;
3301 *save_p = 1;
3302 }
3303
3304 switch (tclass)
3305 {
3306 case tcc_unary:
3307 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3308
3309 case tcc_binary:
3310 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3311 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3312 cval1, cval2, save_p));
3313
3314 case tcc_constant:
3315 return 1;
3316
3317 case tcc_expression:
3318 if (code == COND_EXPR)
3319 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3320 cval1, cval2, save_p)
3321 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3322 cval1, cval2, save_p)
3323 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3324 cval1, cval2, save_p));
3325 return 0;
3326
3327 case tcc_comparison:
3328 /* First see if we can handle the first operand, then the second. For
3329 the second operand, we know *CVAL1 can't be zero. It must be that
3330 one side of the comparison is each of the values; test for the
3331 case where this isn't true by failing if the two operands
3332 are the same. */
3333
3334 if (operand_equal_p (TREE_OPERAND (arg, 0),
3335 TREE_OPERAND (arg, 1), 0))
3336 return 0;
3337
3338 if (*cval1 == 0)
3339 *cval1 = TREE_OPERAND (arg, 0);
3340 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3341 ;
3342 else if (*cval2 == 0)
3343 *cval2 = TREE_OPERAND (arg, 0);
3344 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3345 ;
3346 else
3347 return 0;
3348
3349 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3350 ;
3351 else if (*cval2 == 0)
3352 *cval2 = TREE_OPERAND (arg, 1);
3353 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3354 ;
3355 else
3356 return 0;
3357
3358 return 1;
3359
3360 default:
3361 return 0;
3362 }
3363 }
3364 \f
3365 /* ARG is a tree that is known to contain just arithmetic operations and
3366 comparisons. Evaluate the operations in the tree substituting NEW0 for
3367 any occurrence of OLD0 as an operand of a comparison and likewise for
3368 NEW1 and OLD1. */
3369
3370 static tree
3371 eval_subst (location_t loc, tree arg, tree old0, tree new0,
3372 tree old1, tree new1)
3373 {
3374 tree type = TREE_TYPE (arg);
3375 enum tree_code code = TREE_CODE (arg);
3376 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3377
3378 /* We can handle some of the tcc_expression cases here. */
3379 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3380 tclass = tcc_unary;
3381 else if (tclass == tcc_expression
3382 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3383 tclass = tcc_binary;
3384
3385 switch (tclass)
3386 {
3387 case tcc_unary:
3388 return fold_build1_loc (loc, code, type,
3389 eval_subst (loc, TREE_OPERAND (arg, 0),
3390 old0, new0, old1, new1));
3391
3392 case tcc_binary:
3393 return fold_build2_loc (loc, code, type,
3394 eval_subst (loc, TREE_OPERAND (arg, 0),
3395 old0, new0, old1, new1),
3396 eval_subst (loc, TREE_OPERAND (arg, 1),
3397 old0, new0, old1, new1));
3398
3399 case tcc_expression:
3400 switch (code)
3401 {
3402 case SAVE_EXPR:
3403 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3404 old1, new1);
3405
3406 case COMPOUND_EXPR:
3407 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3408 old1, new1);
3409
3410 case COND_EXPR:
3411 return fold_build3_loc (loc, code, type,
3412 eval_subst (loc, TREE_OPERAND (arg, 0),
3413 old0, new0, old1, new1),
3414 eval_subst (loc, TREE_OPERAND (arg, 1),
3415 old0, new0, old1, new1),
3416 eval_subst (loc, TREE_OPERAND (arg, 2),
3417 old0, new0, old1, new1));
3418 default:
3419 break;
3420 }
3421 /* Fall through - ??? */
3422
3423 case tcc_comparison:
3424 {
3425 tree arg0 = TREE_OPERAND (arg, 0);
3426 tree arg1 = TREE_OPERAND (arg, 1);
3427
3428 /* We need to check both for exact equality and tree equality. The
3429 former will be true if the operand has a side-effect. In that
3430 case, we know the operand occurred exactly once. */
3431
3432 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3433 arg0 = new0;
3434 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3435 arg0 = new1;
3436
3437 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3438 arg1 = new0;
3439 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3440 arg1 = new1;
3441
3442 return fold_build2_loc (loc, code, type, arg0, arg1);
3443 }
3444
3445 default:
3446 return arg;
3447 }
3448 }
3449 \f
3450 /* Return a tree for the case when the result of an expression is RESULT
3451 converted to TYPE and OMITTED was previously an operand of the expression
3452 but is now not needed (e.g., we folded OMITTED * 0).
3453
3454 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3455 the conversion of RESULT to TYPE. */
3456
3457 tree
3458 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3459 {
3460 tree t = fold_convert_loc (loc, type, result);
3461
3462 /* If the resulting operand is an empty statement, just return the omitted
3463 statement casted to void. */
3464 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3465 return build1_loc (loc, NOP_EXPR, void_type_node,
3466 fold_ignored_result (omitted));
3467
3468 if (TREE_SIDE_EFFECTS (omitted))
3469 return build2_loc (loc, COMPOUND_EXPR, type,
3470 fold_ignored_result (omitted), t);
3471
3472 return non_lvalue_loc (loc, t);
3473 }
3474
3475 /* Return a tree for the case when the result of an expression is RESULT
3476 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3477 of the expression but are now not needed.
3478
3479 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3480 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3481 evaluated before OMITTED2. Otherwise, if neither has side effects,
3482 just do the conversion of RESULT to TYPE. */
3483
3484 tree
3485 omit_two_operands_loc (location_t loc, tree type, tree result,
3486 tree omitted1, tree omitted2)
3487 {
3488 tree t = fold_convert_loc (loc, type, result);
3489
3490 if (TREE_SIDE_EFFECTS (omitted2))
3491 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3492 if (TREE_SIDE_EFFECTS (omitted1))
3493 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3494
3495 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3496 }
3497
3498 \f
3499 /* Return a simplified tree node for the truth-negation of ARG. This
3500 never alters ARG itself. We assume that ARG is an operation that
3501 returns a truth value (0 or 1).
3502
3503 FIXME: one would think we would fold the result, but it causes
3504 problems with the dominator optimizer. */
3505
3506 static tree
3507 fold_truth_not_expr (location_t loc, tree arg)
3508 {
3509 tree type = TREE_TYPE (arg);
3510 enum tree_code code = TREE_CODE (arg);
3511 location_t loc1, loc2;
3512
3513 /* If this is a comparison, we can simply invert it, except for
3514 floating-point non-equality comparisons, in which case we just
3515 enclose a TRUTH_NOT_EXPR around what we have. */
3516
3517 if (TREE_CODE_CLASS (code) == tcc_comparison)
3518 {
3519 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3520 if (FLOAT_TYPE_P (op_type)
3521 && flag_trapping_math
3522 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3523 && code != NE_EXPR && code != EQ_EXPR)
3524 return NULL_TREE;
3525
3526 code = invert_tree_comparison (code, HONOR_NANS (op_type));
3527 if (code == ERROR_MARK)
3528 return NULL_TREE;
3529
3530 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3531 TREE_OPERAND (arg, 1));
3532 }
3533
3534 switch (code)
3535 {
3536 case INTEGER_CST:
3537 return constant_boolean_node (integer_zerop (arg), type);
3538
3539 case TRUTH_AND_EXPR:
3540 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3541 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3542 return build2_loc (loc, TRUTH_OR_EXPR, type,
3543 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3544 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3545
3546 case TRUTH_OR_EXPR:
3547 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3548 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3549 return build2_loc (loc, TRUTH_AND_EXPR, type,
3550 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3551 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3552
3553 case TRUTH_XOR_EXPR:
3554 /* Here we can invert either operand. We invert the first operand
3555 unless the second operand is a TRUTH_NOT_EXPR in which case our
3556 result is the XOR of the first operand with the inside of the
3557 negation of the second operand. */
3558
3559 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3560 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3561 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3562 else
3563 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3564 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3565 TREE_OPERAND (arg, 1));
3566
3567 case TRUTH_ANDIF_EXPR:
3568 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3569 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3570 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3571 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3572 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3573
3574 case TRUTH_ORIF_EXPR:
3575 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3576 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3577 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3578 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3579 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3580
3581 case TRUTH_NOT_EXPR:
3582 return TREE_OPERAND (arg, 0);
3583
3584 case COND_EXPR:
3585 {
3586 tree arg1 = TREE_OPERAND (arg, 1);
3587 tree arg2 = TREE_OPERAND (arg, 2);
3588
3589 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3590 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3591
3592 /* A COND_EXPR may have a throw as one operand, which
3593 then has void type. Just leave void operands
3594 as they are. */
3595 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3596 VOID_TYPE_P (TREE_TYPE (arg1))
3597 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3598 VOID_TYPE_P (TREE_TYPE (arg2))
3599 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3600 }
3601
3602 case COMPOUND_EXPR:
3603 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3604 return build2_loc (loc, COMPOUND_EXPR, type,
3605 TREE_OPERAND (arg, 0),
3606 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3607
3608 case NON_LVALUE_EXPR:
3609 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3610 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3611
3612 CASE_CONVERT:
3613 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3614 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3615
3616 /* ... fall through ... */
3617
3618 case FLOAT_EXPR:
3619 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3620 return build1_loc (loc, TREE_CODE (arg), type,
3621 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3622
3623 case BIT_AND_EXPR:
3624 if (!integer_onep (TREE_OPERAND (arg, 1)))
3625 return NULL_TREE;
3626 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3627
3628 case SAVE_EXPR:
3629 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3630
3631 case CLEANUP_POINT_EXPR:
3632 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3633 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3634 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3635
3636 default:
3637 return NULL_TREE;
3638 }
3639 }
3640
3641 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3642 assume that ARG is an operation that returns a truth value (0 or 1
3643 for scalars, 0 or -1 for vectors). Return the folded expression if
3644 folding is successful. Otherwise, return NULL_TREE. */
3645
3646 static tree
3647 fold_invert_truthvalue (location_t loc, tree arg)
3648 {
3649 tree type = TREE_TYPE (arg);
3650 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3651 ? BIT_NOT_EXPR
3652 : TRUTH_NOT_EXPR,
3653 type, arg);
3654 }
3655
3656 /* Return a simplified tree node for the truth-negation of ARG. This
3657 never alters ARG itself. We assume that ARG is an operation that
3658 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3659
3660 tree
3661 invert_truthvalue_loc (location_t loc, tree arg)
3662 {
3663 if (TREE_CODE (arg) == ERROR_MARK)
3664 return arg;
3665
3666 tree type = TREE_TYPE (arg);
3667 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3668 ? BIT_NOT_EXPR
3669 : TRUTH_NOT_EXPR,
3670 type, arg);
3671 }
3672
3673 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3674 with code CODE. This optimization is unsafe. */
3675 static tree
3676 distribute_real_division (location_t loc, enum tree_code code, tree type,
3677 tree arg0, tree arg1)
3678 {
3679 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3680 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3681
3682 /* (A / C) +- (B / C) -> (A +- B) / C. */
3683 if (mul0 == mul1
3684 && operand_equal_p (TREE_OPERAND (arg0, 1),
3685 TREE_OPERAND (arg1, 1), 0))
3686 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3687 fold_build2_loc (loc, code, type,
3688 TREE_OPERAND (arg0, 0),
3689 TREE_OPERAND (arg1, 0)),
3690 TREE_OPERAND (arg0, 1));
3691
3692 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3693 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3694 TREE_OPERAND (arg1, 0), 0)
3695 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3696 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3697 {
3698 REAL_VALUE_TYPE r0, r1;
3699 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3700 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3701 if (!mul0)
3702 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3703 if (!mul1)
3704 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3705 real_arithmetic (&r0, code, &r0, &r1);
3706 return fold_build2_loc (loc, MULT_EXPR, type,
3707 TREE_OPERAND (arg0, 0),
3708 build_real (type, r0));
3709 }
3710
3711 return NULL_TREE;
3712 }
3713 \f
3714 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3715 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero
3716 and uses reverse storage order if REVERSEP is nonzero. */
3717
3718 static tree
3719 make_bit_field_ref (location_t loc, tree inner, tree type,
3720 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
3721 int unsignedp, int reversep)
3722 {
3723 tree result, bftype;
3724
3725 if (bitpos == 0 && !reversep)
3726 {
3727 tree size = TYPE_SIZE (TREE_TYPE (inner));
3728 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3729 || POINTER_TYPE_P (TREE_TYPE (inner)))
3730 && tree_fits_shwi_p (size)
3731 && tree_to_shwi (size) == bitsize)
3732 return fold_convert_loc (loc, type, inner);
3733 }
3734
3735 bftype = type;
3736 if (TYPE_PRECISION (bftype) != bitsize
3737 || TYPE_UNSIGNED (bftype) == !unsignedp)
3738 bftype = build_nonstandard_integer_type (bitsize, 0);
3739
3740 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3741 size_int (bitsize), bitsize_int (bitpos));
3742 REF_REVERSE_STORAGE_ORDER (result) = reversep;
3743
3744 if (bftype != type)
3745 result = fold_convert_loc (loc, type, result);
3746
3747 return result;
3748 }
3749
3750 /* Optimize a bit-field compare.
3751
3752 There are two cases: First is a compare against a constant and the
3753 second is a comparison of two items where the fields are at the same
3754 bit position relative to the start of a chunk (byte, halfword, word)
3755 large enough to contain it. In these cases we can avoid the shift
3756 implicit in bitfield extractions.
3757
3758 For constants, we emit a compare of the shifted constant with the
3759 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3760 compared. For two fields at the same position, we do the ANDs with the
3761 similar mask and compare the result of the ANDs.
3762
3763 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3764 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3765 are the left and right operands of the comparison, respectively.
3766
3767 If the optimization described above can be done, we return the resulting
3768 tree. Otherwise we return zero. */
3769
3770 static tree
3771 optimize_bit_field_compare (location_t loc, enum tree_code code,
3772 tree compare_type, tree lhs, tree rhs)
3773 {
3774 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3775 tree type = TREE_TYPE (lhs);
3776 tree unsigned_type;
3777 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3778 machine_mode lmode, rmode, nmode;
3779 int lunsignedp, runsignedp;
3780 int lreversep, rreversep;
3781 int lvolatilep = 0, rvolatilep = 0;
3782 tree linner, rinner = NULL_TREE;
3783 tree mask;
3784 tree offset;
3785
3786 /* Get all the information about the extractions being done. If the bit size
3787 if the same as the size of the underlying object, we aren't doing an
3788 extraction at all and so can do nothing. We also don't want to
3789 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3790 then will no longer be able to replace it. */
3791 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3792 &lunsignedp, &lreversep, &lvolatilep, false);
3793 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3794 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3795 return 0;
3796
3797 if (const_p)
3798 rreversep = lreversep;
3799 else
3800 {
3801 /* If this is not a constant, we can only do something if bit positions,
3802 sizes, signedness and storage order are the same. */
3803 rinner
3804 = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3805 &runsignedp, &rreversep, &rvolatilep, false);
3806
3807 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3808 || lunsignedp != runsignedp || lreversep != rreversep || offset != 0
3809 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3810 return 0;
3811 }
3812
3813 /* See if we can find a mode to refer to this field. We should be able to,
3814 but fail if we can't. */
3815 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3816 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3817 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3818 TYPE_ALIGN (TREE_TYPE (rinner))),
3819 word_mode, false);
3820 if (nmode == VOIDmode)
3821 return 0;
3822
3823 /* Set signed and unsigned types of the precision of this mode for the
3824 shifts below. */
3825 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3826
3827 /* Compute the bit position and size for the new reference and our offset
3828 within it. If the new reference is the same size as the original, we
3829 won't optimize anything, so return zero. */
3830 nbitsize = GET_MODE_BITSIZE (nmode);
3831 nbitpos = lbitpos & ~ (nbitsize - 1);
3832 lbitpos -= nbitpos;
3833 if (nbitsize == lbitsize)
3834 return 0;
3835
3836 if (lreversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
3837 lbitpos = nbitsize - lbitsize - lbitpos;
3838
3839 /* Make the mask to be used against the extracted field. */
3840 mask = build_int_cst_type (unsigned_type, -1);
3841 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3842 mask = const_binop (RSHIFT_EXPR, mask,
3843 size_int (nbitsize - lbitsize - lbitpos));
3844
3845 if (! const_p)
3846 /* If not comparing with constant, just rework the comparison
3847 and return. */
3848 return fold_build2_loc (loc, code, compare_type,
3849 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3850 make_bit_field_ref (loc, linner,
3851 unsigned_type,
3852 nbitsize, nbitpos,
3853 1, lreversep),
3854 mask),
3855 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3856 make_bit_field_ref (loc, rinner,
3857 unsigned_type,
3858 nbitsize, nbitpos,
3859 1, rreversep),
3860 mask));
3861
3862 /* Otherwise, we are handling the constant case. See if the constant is too
3863 big for the field. Warn and return a tree for 0 (false) if so. We do
3864 this not only for its own sake, but to avoid having to test for this
3865 error case below. If we didn't, we might generate wrong code.
3866
3867 For unsigned fields, the constant shifted right by the field length should
3868 be all zero. For signed fields, the high-order bits should agree with
3869 the sign bit. */
3870
3871 if (lunsignedp)
3872 {
3873 if (wi::lrshift (rhs, lbitsize) != 0)
3874 {
3875 warning (0, "comparison is always %d due to width of bit-field",
3876 code == NE_EXPR);
3877 return constant_boolean_node (code == NE_EXPR, compare_type);
3878 }
3879 }
3880 else
3881 {
3882 wide_int tem = wi::arshift (rhs, lbitsize - 1);
3883 if (tem != 0 && tem != -1)
3884 {
3885 warning (0, "comparison is always %d due to width of bit-field",
3886 code == NE_EXPR);
3887 return constant_boolean_node (code == NE_EXPR, compare_type);
3888 }
3889 }
3890
3891 /* Single-bit compares should always be against zero. */
3892 if (lbitsize == 1 && ! integer_zerop (rhs))
3893 {
3894 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3895 rhs = build_int_cst (type, 0);
3896 }
3897
3898 /* Make a new bitfield reference, shift the constant over the
3899 appropriate number of bits and mask it with the computed mask
3900 (in case this was a signed field). If we changed it, make a new one. */
3901 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1,
3902 lreversep);
3903
3904 rhs = const_binop (BIT_AND_EXPR,
3905 const_binop (LSHIFT_EXPR,
3906 fold_convert_loc (loc, unsigned_type, rhs),
3907 size_int (lbitpos)),
3908 mask);
3909
3910 lhs = build2_loc (loc, code, compare_type,
3911 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3912 return lhs;
3913 }
3914 \f
3915 /* Subroutine for fold_truth_andor_1: decode a field reference.
3916
3917 If EXP is a comparison reference, we return the innermost reference.
3918
3919 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3920 set to the starting bit number.
3921
3922 If the innermost field can be completely contained in a mode-sized
3923 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3924
3925 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3926 otherwise it is not changed.
3927
3928 *PUNSIGNEDP is set to the signedness of the field.
3929
3930 *PREVERSEP is set to the storage order of the field.
3931
3932 *PMASK is set to the mask used. This is either contained in a
3933 BIT_AND_EXPR or derived from the width of the field.
3934
3935 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3936
3937 Return 0 if this is not a component reference or is one that we can't
3938 do anything with. */
3939
3940 static tree
3941 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3942 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
3943 int *punsignedp, int *preversep, int *pvolatilep,
3944 tree *pmask, tree *pand_mask)
3945 {
3946 tree outer_type = 0;
3947 tree and_mask = 0;
3948 tree mask, inner, offset;
3949 tree unsigned_type;
3950 unsigned int precision;
3951
3952 /* All the optimizations using this function assume integer fields.
3953 There are problems with FP fields since the type_for_size call
3954 below can fail for, e.g., XFmode. */
3955 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3956 return 0;
3957
3958 /* We are interested in the bare arrangement of bits, so strip everything
3959 that doesn't affect the machine mode. However, record the type of the
3960 outermost expression if it may matter below. */
3961 if (CONVERT_EXPR_P (exp)
3962 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3963 outer_type = TREE_TYPE (exp);
3964 STRIP_NOPS (exp);
3965
3966 if (TREE_CODE (exp) == BIT_AND_EXPR)
3967 {
3968 and_mask = TREE_OPERAND (exp, 1);
3969 exp = TREE_OPERAND (exp, 0);
3970 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3971 if (TREE_CODE (and_mask) != INTEGER_CST)
3972 return 0;
3973 }
3974
3975 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3976 punsignedp, preversep, pvolatilep, false);
3977 if ((inner == exp && and_mask == 0)
3978 || *pbitsize < 0 || offset != 0
3979 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3980 return 0;
3981
3982 /* If the number of bits in the reference is the same as the bitsize of
3983 the outer type, then the outer type gives the signedness. Otherwise
3984 (in case of a small bitfield) the signedness is unchanged. */
3985 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3986 *punsignedp = TYPE_UNSIGNED (outer_type);
3987
3988 /* Compute the mask to access the bitfield. */
3989 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3990 precision = TYPE_PRECISION (unsigned_type);
3991
3992 mask = build_int_cst_type (unsigned_type, -1);
3993
3994 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3995 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3996
3997 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3998 if (and_mask != 0)
3999 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4000 fold_convert_loc (loc, unsigned_type, and_mask), mask);
4001
4002 *pmask = mask;
4003 *pand_mask = and_mask;
4004 return inner;
4005 }
4006
4007 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4008 bit positions and MASK is SIGNED. */
4009
4010 static int
4011 all_ones_mask_p (const_tree mask, unsigned int size)
4012 {
4013 tree type = TREE_TYPE (mask);
4014 unsigned int precision = TYPE_PRECISION (type);
4015
4016 /* If this function returns true when the type of the mask is
4017 UNSIGNED, then there will be errors. In particular see
4018 gcc.c-torture/execute/990326-1.c. There does not appear to be
4019 any documentation paper trail as to why this is so. But the pre
4020 wide-int worked with that restriction and it has been preserved
4021 here. */
4022 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
4023 return false;
4024
4025 return wi::mask (size, false, precision) == mask;
4026 }
4027
4028 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4029 represents the sign bit of EXP's type. If EXP represents a sign
4030 or zero extension, also test VAL against the unextended type.
4031 The return value is the (sub)expression whose sign bit is VAL,
4032 or NULL_TREE otherwise. */
4033
4034 tree
4035 sign_bit_p (tree exp, const_tree val)
4036 {
4037 int width;
4038 tree t;
4039
4040 /* Tree EXP must have an integral type. */
4041 t = TREE_TYPE (exp);
4042 if (! INTEGRAL_TYPE_P (t))
4043 return NULL_TREE;
4044
4045 /* Tree VAL must be an integer constant. */
4046 if (TREE_CODE (val) != INTEGER_CST
4047 || TREE_OVERFLOW (val))
4048 return NULL_TREE;
4049
4050 width = TYPE_PRECISION (t);
4051 if (wi::only_sign_bit_p (val, width))
4052 return exp;
4053
4054 /* Handle extension from a narrower type. */
4055 if (TREE_CODE (exp) == NOP_EXPR
4056 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4057 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4058
4059 return NULL_TREE;
4060 }
4061
4062 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
4063 to be evaluated unconditionally. */
4064
4065 static int
4066 simple_operand_p (const_tree exp)
4067 {
4068 /* Strip any conversions that don't change the machine mode. */
4069 STRIP_NOPS (exp);
4070
4071 return (CONSTANT_CLASS_P (exp)
4072 || TREE_CODE (exp) == SSA_NAME
4073 || (DECL_P (exp)
4074 && ! TREE_ADDRESSABLE (exp)
4075 && ! TREE_THIS_VOLATILE (exp)
4076 && ! DECL_NONLOCAL (exp)
4077 /* Don't regard global variables as simple. They may be
4078 allocated in ways unknown to the compiler (shared memory,
4079 #pragma weak, etc). */
4080 && ! TREE_PUBLIC (exp)
4081 && ! DECL_EXTERNAL (exp)
4082 /* Weakrefs are not safe to be read, since they can be NULL.
4083 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4084 have DECL_WEAK flag set. */
4085 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4086 /* Loading a static variable is unduly expensive, but global
4087 registers aren't expensive. */
4088 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4089 }
4090
4091 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4092 to be evaluated unconditionally.
4093 I addition to simple_operand_p, we assume that comparisons, conversions,
4094 and logic-not operations are simple, if their operands are simple, too. */
4095
4096 static bool
4097 simple_operand_p_2 (tree exp)
4098 {
4099 enum tree_code code;
4100
4101 if (TREE_SIDE_EFFECTS (exp)
4102 || tree_could_trap_p (exp))
4103 return false;
4104
4105 while (CONVERT_EXPR_P (exp))
4106 exp = TREE_OPERAND (exp, 0);
4107
4108 code = TREE_CODE (exp);
4109
4110 if (TREE_CODE_CLASS (code) == tcc_comparison)
4111 return (simple_operand_p (TREE_OPERAND (exp, 0))
4112 && simple_operand_p (TREE_OPERAND (exp, 1)));
4113
4114 if (code == TRUTH_NOT_EXPR)
4115 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4116
4117 return simple_operand_p (exp);
4118 }
4119
4120 \f
4121 /* The following functions are subroutines to fold_range_test and allow it to
4122 try to change a logical combination of comparisons into a range test.
4123
4124 For example, both
4125 X == 2 || X == 3 || X == 4 || X == 5
4126 and
4127 X >= 2 && X <= 5
4128 are converted to
4129 (unsigned) (X - 2) <= 3
4130
4131 We describe each set of comparisons as being either inside or outside
4132 a range, using a variable named like IN_P, and then describe the
4133 range with a lower and upper bound. If one of the bounds is omitted,
4134 it represents either the highest or lowest value of the type.
4135
4136 In the comments below, we represent a range by two numbers in brackets
4137 preceded by a "+" to designate being inside that range, or a "-" to
4138 designate being outside that range, so the condition can be inverted by
4139 flipping the prefix. An omitted bound is represented by a "-". For
4140 example, "- [-, 10]" means being outside the range starting at the lowest
4141 possible value and ending at 10, in other words, being greater than 10.
4142 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4143 always false.
4144
4145 We set up things so that the missing bounds are handled in a consistent
4146 manner so neither a missing bound nor "true" and "false" need to be
4147 handled using a special case. */
4148
4149 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4150 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4151 and UPPER1_P are nonzero if the respective argument is an upper bound
4152 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4153 must be specified for a comparison. ARG1 will be converted to ARG0's
4154 type if both are specified. */
4155
4156 static tree
4157 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4158 tree arg1, int upper1_p)
4159 {
4160 tree tem;
4161 int result;
4162 int sgn0, sgn1;
4163
4164 /* If neither arg represents infinity, do the normal operation.
4165 Else, if not a comparison, return infinity. Else handle the special
4166 comparison rules. Note that most of the cases below won't occur, but
4167 are handled for consistency. */
4168
4169 if (arg0 != 0 && arg1 != 0)
4170 {
4171 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4172 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4173 STRIP_NOPS (tem);
4174 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4175 }
4176
4177 if (TREE_CODE_CLASS (code) != tcc_comparison)
4178 return 0;
4179
4180 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4181 for neither. In real maths, we cannot assume open ended ranges are
4182 the same. But, this is computer arithmetic, where numbers are finite.
4183 We can therefore make the transformation of any unbounded range with
4184 the value Z, Z being greater than any representable number. This permits
4185 us to treat unbounded ranges as equal. */
4186 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4187 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4188 switch (code)
4189 {
4190 case EQ_EXPR:
4191 result = sgn0 == sgn1;
4192 break;
4193 case NE_EXPR:
4194 result = sgn0 != sgn1;
4195 break;
4196 case LT_EXPR:
4197 result = sgn0 < sgn1;
4198 break;
4199 case LE_EXPR:
4200 result = sgn0 <= sgn1;
4201 break;
4202 case GT_EXPR:
4203 result = sgn0 > sgn1;
4204 break;
4205 case GE_EXPR:
4206 result = sgn0 >= sgn1;
4207 break;
4208 default:
4209 gcc_unreachable ();
4210 }
4211
4212 return constant_boolean_node (result, type);
4213 }
4214 \f
4215 /* Helper routine for make_range. Perform one step for it, return
4216 new expression if the loop should continue or NULL_TREE if it should
4217 stop. */
4218
4219 tree
4220 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4221 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4222 bool *strict_overflow_p)
4223 {
4224 tree arg0_type = TREE_TYPE (arg0);
4225 tree n_low, n_high, low = *p_low, high = *p_high;
4226 int in_p = *p_in_p, n_in_p;
4227
4228 switch (code)
4229 {
4230 case TRUTH_NOT_EXPR:
4231 /* We can only do something if the range is testing for zero. */
4232 if (low == NULL_TREE || high == NULL_TREE
4233 || ! integer_zerop (low) || ! integer_zerop (high))
4234 return NULL_TREE;
4235 *p_in_p = ! in_p;
4236 return arg0;
4237
4238 case EQ_EXPR: case NE_EXPR:
4239 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4240 /* We can only do something if the range is testing for zero
4241 and if the second operand is an integer constant. Note that
4242 saying something is "in" the range we make is done by
4243 complementing IN_P since it will set in the initial case of
4244 being not equal to zero; "out" is leaving it alone. */
4245 if (low == NULL_TREE || high == NULL_TREE
4246 || ! integer_zerop (low) || ! integer_zerop (high)
4247 || TREE_CODE (arg1) != INTEGER_CST)
4248 return NULL_TREE;
4249
4250 switch (code)
4251 {
4252 case NE_EXPR: /* - [c, c] */
4253 low = high = arg1;
4254 break;
4255 case EQ_EXPR: /* + [c, c] */
4256 in_p = ! in_p, low = high = arg1;
4257 break;
4258 case GT_EXPR: /* - [-, c] */
4259 low = 0, high = arg1;
4260 break;
4261 case GE_EXPR: /* + [c, -] */
4262 in_p = ! in_p, low = arg1, high = 0;
4263 break;
4264 case LT_EXPR: /* - [c, -] */
4265 low = arg1, high = 0;
4266 break;
4267 case LE_EXPR: /* + [-, c] */
4268 in_p = ! in_p, low = 0, high = arg1;
4269 break;
4270 default:
4271 gcc_unreachable ();
4272 }
4273
4274 /* If this is an unsigned comparison, we also know that EXP is
4275 greater than or equal to zero. We base the range tests we make
4276 on that fact, so we record it here so we can parse existing
4277 range tests. We test arg0_type since often the return type
4278 of, e.g. EQ_EXPR, is boolean. */
4279 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4280 {
4281 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4282 in_p, low, high, 1,
4283 build_int_cst (arg0_type, 0),
4284 NULL_TREE))
4285 return NULL_TREE;
4286
4287 in_p = n_in_p, low = n_low, high = n_high;
4288
4289 /* If the high bound is missing, but we have a nonzero low
4290 bound, reverse the range so it goes from zero to the low bound
4291 minus 1. */
4292 if (high == 0 && low && ! integer_zerop (low))
4293 {
4294 in_p = ! in_p;
4295 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4296 build_int_cst (TREE_TYPE (low), 1), 0);
4297 low = build_int_cst (arg0_type, 0);
4298 }
4299 }
4300
4301 *p_low = low;
4302 *p_high = high;
4303 *p_in_p = in_p;
4304 return arg0;
4305
4306 case NEGATE_EXPR:
4307 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4308 low and high are non-NULL, then normalize will DTRT. */
4309 if (!TYPE_UNSIGNED (arg0_type)
4310 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4311 {
4312 if (low == NULL_TREE)
4313 low = TYPE_MIN_VALUE (arg0_type);
4314 if (high == NULL_TREE)
4315 high = TYPE_MAX_VALUE (arg0_type);
4316 }
4317
4318 /* (-x) IN [a,b] -> x in [-b, -a] */
4319 n_low = range_binop (MINUS_EXPR, exp_type,
4320 build_int_cst (exp_type, 0),
4321 0, high, 1);
4322 n_high = range_binop (MINUS_EXPR, exp_type,
4323 build_int_cst (exp_type, 0),
4324 0, low, 0);
4325 if (n_high != 0 && TREE_OVERFLOW (n_high))
4326 return NULL_TREE;
4327 goto normalize;
4328
4329 case BIT_NOT_EXPR:
4330 /* ~ X -> -X - 1 */
4331 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4332 build_int_cst (exp_type, 1));
4333
4334 case PLUS_EXPR:
4335 case MINUS_EXPR:
4336 if (TREE_CODE (arg1) != INTEGER_CST)
4337 return NULL_TREE;
4338
4339 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4340 move a constant to the other side. */
4341 if (!TYPE_UNSIGNED (arg0_type)
4342 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4343 return NULL_TREE;
4344
4345 /* If EXP is signed, any overflow in the computation is undefined,
4346 so we don't worry about it so long as our computations on
4347 the bounds don't overflow. For unsigned, overflow is defined
4348 and this is exactly the right thing. */
4349 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4350 arg0_type, low, 0, arg1, 0);
4351 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4352 arg0_type, high, 1, arg1, 0);
4353 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4354 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4355 return NULL_TREE;
4356
4357 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4358 *strict_overflow_p = true;
4359
4360 normalize:
4361 /* Check for an unsigned range which has wrapped around the maximum
4362 value thus making n_high < n_low, and normalize it. */
4363 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4364 {
4365 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4366 build_int_cst (TREE_TYPE (n_high), 1), 0);
4367 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4368 build_int_cst (TREE_TYPE (n_low), 1), 0);
4369
4370 /* If the range is of the form +/- [ x+1, x ], we won't
4371 be able to normalize it. But then, it represents the
4372 whole range or the empty set, so make it
4373 +/- [ -, - ]. */
4374 if (tree_int_cst_equal (n_low, low)
4375 && tree_int_cst_equal (n_high, high))
4376 low = high = 0;
4377 else
4378 in_p = ! in_p;
4379 }
4380 else
4381 low = n_low, high = n_high;
4382
4383 *p_low = low;
4384 *p_high = high;
4385 *p_in_p = in_p;
4386 return arg0;
4387
4388 CASE_CONVERT:
4389 case NON_LVALUE_EXPR:
4390 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4391 return NULL_TREE;
4392
4393 if (! INTEGRAL_TYPE_P (arg0_type)
4394 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4395 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4396 return NULL_TREE;
4397
4398 n_low = low, n_high = high;
4399
4400 if (n_low != 0)
4401 n_low = fold_convert_loc (loc, arg0_type, n_low);
4402
4403 if (n_high != 0)
4404 n_high = fold_convert_loc (loc, arg0_type, n_high);
4405
4406 /* If we're converting arg0 from an unsigned type, to exp,
4407 a signed type, we will be doing the comparison as unsigned.
4408 The tests above have already verified that LOW and HIGH
4409 are both positive.
4410
4411 So we have to ensure that we will handle large unsigned
4412 values the same way that the current signed bounds treat
4413 negative values. */
4414
4415 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4416 {
4417 tree high_positive;
4418 tree equiv_type;
4419 /* For fixed-point modes, we need to pass the saturating flag
4420 as the 2nd parameter. */
4421 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4422 equiv_type
4423 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4424 TYPE_SATURATING (arg0_type));
4425 else
4426 equiv_type
4427 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4428
4429 /* A range without an upper bound is, naturally, unbounded.
4430 Since convert would have cropped a very large value, use
4431 the max value for the destination type. */
4432 high_positive
4433 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4434 : TYPE_MAX_VALUE (arg0_type);
4435
4436 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4437 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4438 fold_convert_loc (loc, arg0_type,
4439 high_positive),
4440 build_int_cst (arg0_type, 1));
4441
4442 /* If the low bound is specified, "and" the range with the
4443 range for which the original unsigned value will be
4444 positive. */
4445 if (low != 0)
4446 {
4447 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4448 1, fold_convert_loc (loc, arg0_type,
4449 integer_zero_node),
4450 high_positive))
4451 return NULL_TREE;
4452
4453 in_p = (n_in_p == in_p);
4454 }
4455 else
4456 {
4457 /* Otherwise, "or" the range with the range of the input
4458 that will be interpreted as negative. */
4459 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4460 1, fold_convert_loc (loc, arg0_type,
4461 integer_zero_node),
4462 high_positive))
4463 return NULL_TREE;
4464
4465 in_p = (in_p != n_in_p);
4466 }
4467 }
4468
4469 *p_low = n_low;
4470 *p_high = n_high;
4471 *p_in_p = in_p;
4472 return arg0;
4473
4474 default:
4475 return NULL_TREE;
4476 }
4477 }
4478
4479 /* Given EXP, a logical expression, set the range it is testing into
4480 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4481 actually being tested. *PLOW and *PHIGH will be made of the same
4482 type as the returned expression. If EXP is not a comparison, we
4483 will most likely not be returning a useful value and range. Set
4484 *STRICT_OVERFLOW_P to true if the return value is only valid
4485 because signed overflow is undefined; otherwise, do not change
4486 *STRICT_OVERFLOW_P. */
4487
4488 tree
4489 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4490 bool *strict_overflow_p)
4491 {
4492 enum tree_code code;
4493 tree arg0, arg1 = NULL_TREE;
4494 tree exp_type, nexp;
4495 int in_p;
4496 tree low, high;
4497 location_t loc = EXPR_LOCATION (exp);
4498
4499 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4500 and see if we can refine the range. Some of the cases below may not
4501 happen, but it doesn't seem worth worrying about this. We "continue"
4502 the outer loop when we've changed something; otherwise we "break"
4503 the switch, which will "break" the while. */
4504
4505 in_p = 0;
4506 low = high = build_int_cst (TREE_TYPE (exp), 0);
4507
4508 while (1)
4509 {
4510 code = TREE_CODE (exp);
4511 exp_type = TREE_TYPE (exp);
4512 arg0 = NULL_TREE;
4513
4514 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4515 {
4516 if (TREE_OPERAND_LENGTH (exp) > 0)
4517 arg0 = TREE_OPERAND (exp, 0);
4518 if (TREE_CODE_CLASS (code) == tcc_binary
4519 || TREE_CODE_CLASS (code) == tcc_comparison
4520 || (TREE_CODE_CLASS (code) == tcc_expression
4521 && TREE_OPERAND_LENGTH (exp) > 1))
4522 arg1 = TREE_OPERAND (exp, 1);
4523 }
4524 if (arg0 == NULL_TREE)
4525 break;
4526
4527 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4528 &high, &in_p, strict_overflow_p);
4529 if (nexp == NULL_TREE)
4530 break;
4531 exp = nexp;
4532 }
4533
4534 /* If EXP is a constant, we can evaluate whether this is true or false. */
4535 if (TREE_CODE (exp) == INTEGER_CST)
4536 {
4537 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4538 exp, 0, low, 0))
4539 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4540 exp, 1, high, 1)));
4541 low = high = 0;
4542 exp = 0;
4543 }
4544
4545 *pin_p = in_p, *plow = low, *phigh = high;
4546 return exp;
4547 }
4548 \f
4549 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4550 type, TYPE, return an expression to test if EXP is in (or out of, depending
4551 on IN_P) the range. Return 0 if the test couldn't be created. */
4552
4553 tree
4554 build_range_check (location_t loc, tree type, tree exp, int in_p,
4555 tree low, tree high)
4556 {
4557 tree etype = TREE_TYPE (exp), value;
4558
4559 /* Disable this optimization for function pointer expressions
4560 on targets that require function pointer canonicalization. */
4561 if (targetm.have_canonicalize_funcptr_for_compare ()
4562 && TREE_CODE (etype) == POINTER_TYPE
4563 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4564 return NULL_TREE;
4565
4566 if (! in_p)
4567 {
4568 value = build_range_check (loc, type, exp, 1, low, high);
4569 if (value != 0)
4570 return invert_truthvalue_loc (loc, value);
4571
4572 return 0;
4573 }
4574
4575 if (low == 0 && high == 0)
4576 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4577
4578 if (low == 0)
4579 return fold_build2_loc (loc, LE_EXPR, type, exp,
4580 fold_convert_loc (loc, etype, high));
4581
4582 if (high == 0)
4583 return fold_build2_loc (loc, GE_EXPR, type, exp,
4584 fold_convert_loc (loc, etype, low));
4585
4586 if (operand_equal_p (low, high, 0))
4587 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4588 fold_convert_loc (loc, etype, low));
4589
4590 if (integer_zerop (low))
4591 {
4592 if (! TYPE_UNSIGNED (etype))
4593 {
4594 etype = unsigned_type_for (etype);
4595 high = fold_convert_loc (loc, etype, high);
4596 exp = fold_convert_loc (loc, etype, exp);
4597 }
4598 return build_range_check (loc, type, exp, 1, 0, high);
4599 }
4600
4601 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4602 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4603 {
4604 int prec = TYPE_PRECISION (etype);
4605
4606 if (wi::mask (prec - 1, false, prec) == high)
4607 {
4608 if (TYPE_UNSIGNED (etype))
4609 {
4610 tree signed_etype = signed_type_for (etype);
4611 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4612 etype
4613 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4614 else
4615 etype = signed_etype;
4616 exp = fold_convert_loc (loc, etype, exp);
4617 }
4618 return fold_build2_loc (loc, GT_EXPR, type, exp,
4619 build_int_cst (etype, 0));
4620 }
4621 }
4622
4623 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4624 This requires wrap-around arithmetics for the type of the expression.
4625 First make sure that arithmetics in this type is valid, then make sure
4626 that it wraps around. */
4627 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4628 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4629 TYPE_UNSIGNED (etype));
4630
4631 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4632 {
4633 tree utype, minv, maxv;
4634
4635 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4636 for the type in question, as we rely on this here. */
4637 utype = unsigned_type_for (etype);
4638 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4639 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4640 build_int_cst (TREE_TYPE (maxv), 1), 1);
4641 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4642
4643 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4644 minv, 1, maxv, 1)))
4645 etype = utype;
4646 else
4647 return 0;
4648 }
4649
4650 high = fold_convert_loc (loc, etype, high);
4651 low = fold_convert_loc (loc, etype, low);
4652 exp = fold_convert_loc (loc, etype, exp);
4653
4654 value = const_binop (MINUS_EXPR, high, low);
4655
4656
4657 if (POINTER_TYPE_P (etype))
4658 {
4659 if (value != 0 && !TREE_OVERFLOW (value))
4660 {
4661 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4662 return build_range_check (loc, type,
4663 fold_build_pointer_plus_loc (loc, exp, low),
4664 1, build_int_cst (etype, 0), value);
4665 }
4666 return 0;
4667 }
4668
4669 if (value != 0 && !TREE_OVERFLOW (value))
4670 return build_range_check (loc, type,
4671 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4672 1, build_int_cst (etype, 0), value);
4673
4674 return 0;
4675 }
4676 \f
4677 /* Return the predecessor of VAL in its type, handling the infinite case. */
4678
4679 static tree
4680 range_predecessor (tree val)
4681 {
4682 tree type = TREE_TYPE (val);
4683
4684 if (INTEGRAL_TYPE_P (type)
4685 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4686 return 0;
4687 else
4688 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4689 build_int_cst (TREE_TYPE (val), 1), 0);
4690 }
4691
4692 /* Return the successor of VAL in its type, handling the infinite case. */
4693
4694 static tree
4695 range_successor (tree val)
4696 {
4697 tree type = TREE_TYPE (val);
4698
4699 if (INTEGRAL_TYPE_P (type)
4700 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4701 return 0;
4702 else
4703 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4704 build_int_cst (TREE_TYPE (val), 1), 0);
4705 }
4706
4707 /* Given two ranges, see if we can merge them into one. Return 1 if we
4708 can, 0 if we can't. Set the output range into the specified parameters. */
4709
4710 bool
4711 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4712 tree high0, int in1_p, tree low1, tree high1)
4713 {
4714 int no_overlap;
4715 int subset;
4716 int temp;
4717 tree tem;
4718 int in_p;
4719 tree low, high;
4720 int lowequal = ((low0 == 0 && low1 == 0)
4721 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4722 low0, 0, low1, 0)));
4723 int highequal = ((high0 == 0 && high1 == 0)
4724 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4725 high0, 1, high1, 1)));
4726
4727 /* Make range 0 be the range that starts first, or ends last if they
4728 start at the same value. Swap them if it isn't. */
4729 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4730 low0, 0, low1, 0))
4731 || (lowequal
4732 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4733 high1, 1, high0, 1))))
4734 {
4735 temp = in0_p, in0_p = in1_p, in1_p = temp;
4736 tem = low0, low0 = low1, low1 = tem;
4737 tem = high0, high0 = high1, high1 = tem;
4738 }
4739
4740 /* Now flag two cases, whether the ranges are disjoint or whether the
4741 second range is totally subsumed in the first. Note that the tests
4742 below are simplified by the ones above. */
4743 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4744 high0, 1, low1, 0));
4745 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4746 high1, 1, high0, 1));
4747
4748 /* We now have four cases, depending on whether we are including or
4749 excluding the two ranges. */
4750 if (in0_p && in1_p)
4751 {
4752 /* If they don't overlap, the result is false. If the second range
4753 is a subset it is the result. Otherwise, the range is from the start
4754 of the second to the end of the first. */
4755 if (no_overlap)
4756 in_p = 0, low = high = 0;
4757 else if (subset)
4758 in_p = 1, low = low1, high = high1;
4759 else
4760 in_p = 1, low = low1, high = high0;
4761 }
4762
4763 else if (in0_p && ! in1_p)
4764 {
4765 /* If they don't overlap, the result is the first range. If they are
4766 equal, the result is false. If the second range is a subset of the
4767 first, and the ranges begin at the same place, we go from just after
4768 the end of the second range to the end of the first. If the second
4769 range is not a subset of the first, or if it is a subset and both
4770 ranges end at the same place, the range starts at the start of the
4771 first range and ends just before the second range.
4772 Otherwise, we can't describe this as a single range. */
4773 if (no_overlap)
4774 in_p = 1, low = low0, high = high0;
4775 else if (lowequal && highequal)
4776 in_p = 0, low = high = 0;
4777 else if (subset && lowequal)
4778 {
4779 low = range_successor (high1);
4780 high = high0;
4781 in_p = 1;
4782 if (low == 0)
4783 {
4784 /* We are in the weird situation where high0 > high1 but
4785 high1 has no successor. Punt. */
4786 return 0;
4787 }
4788 }
4789 else if (! subset || highequal)
4790 {
4791 low = low0;
4792 high = range_predecessor (low1);
4793 in_p = 1;
4794 if (high == 0)
4795 {
4796 /* low0 < low1 but low1 has no predecessor. Punt. */
4797 return 0;
4798 }
4799 }
4800 else
4801 return 0;
4802 }
4803
4804 else if (! in0_p && in1_p)
4805 {
4806 /* If they don't overlap, the result is the second range. If the second
4807 is a subset of the first, the result is false. Otherwise,
4808 the range starts just after the first range and ends at the
4809 end of the second. */
4810 if (no_overlap)
4811 in_p = 1, low = low1, high = high1;
4812 else if (subset || highequal)
4813 in_p = 0, low = high = 0;
4814 else
4815 {
4816 low = range_successor (high0);
4817 high = high1;
4818 in_p = 1;
4819 if (low == 0)
4820 {
4821 /* high1 > high0 but high0 has no successor. Punt. */
4822 return 0;
4823 }
4824 }
4825 }
4826
4827 else
4828 {
4829 /* The case where we are excluding both ranges. Here the complex case
4830 is if they don't overlap. In that case, the only time we have a
4831 range is if they are adjacent. If the second is a subset of the
4832 first, the result is the first. Otherwise, the range to exclude
4833 starts at the beginning of the first range and ends at the end of the
4834 second. */
4835 if (no_overlap)
4836 {
4837 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4838 range_successor (high0),
4839 1, low1, 0)))
4840 in_p = 0, low = low0, high = high1;
4841 else
4842 {
4843 /* Canonicalize - [min, x] into - [-, x]. */
4844 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4845 switch (TREE_CODE (TREE_TYPE (low0)))
4846 {
4847 case ENUMERAL_TYPE:
4848 if (TYPE_PRECISION (TREE_TYPE (low0))
4849 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4850 break;
4851 /* FALLTHROUGH */
4852 case INTEGER_TYPE:
4853 if (tree_int_cst_equal (low0,
4854 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4855 low0 = 0;
4856 break;
4857 case POINTER_TYPE:
4858 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4859 && integer_zerop (low0))
4860 low0 = 0;
4861 break;
4862 default:
4863 break;
4864 }
4865
4866 /* Canonicalize - [x, max] into - [x, -]. */
4867 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4868 switch (TREE_CODE (TREE_TYPE (high1)))
4869 {
4870 case ENUMERAL_TYPE:
4871 if (TYPE_PRECISION (TREE_TYPE (high1))
4872 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4873 break;
4874 /* FALLTHROUGH */
4875 case INTEGER_TYPE:
4876 if (tree_int_cst_equal (high1,
4877 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4878 high1 = 0;
4879 break;
4880 case POINTER_TYPE:
4881 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4882 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4883 high1, 1,
4884 build_int_cst (TREE_TYPE (high1), 1),
4885 1)))
4886 high1 = 0;
4887 break;
4888 default:
4889 break;
4890 }
4891
4892 /* The ranges might be also adjacent between the maximum and
4893 minimum values of the given type. For
4894 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4895 return + [x + 1, y - 1]. */
4896 if (low0 == 0 && high1 == 0)
4897 {
4898 low = range_successor (high0);
4899 high = range_predecessor (low1);
4900 if (low == 0 || high == 0)
4901 return 0;
4902
4903 in_p = 1;
4904 }
4905 else
4906 return 0;
4907 }
4908 }
4909 else if (subset)
4910 in_p = 0, low = low0, high = high0;
4911 else
4912 in_p = 0, low = low0, high = high1;
4913 }
4914
4915 *pin_p = in_p, *plow = low, *phigh = high;
4916 return 1;
4917 }
4918 \f
4919
4920 /* Subroutine of fold, looking inside expressions of the form
4921 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4922 of the COND_EXPR. This function is being used also to optimize
4923 A op B ? C : A, by reversing the comparison first.
4924
4925 Return a folded expression whose code is not a COND_EXPR
4926 anymore, or NULL_TREE if no folding opportunity is found. */
4927
4928 static tree
4929 fold_cond_expr_with_comparison (location_t loc, tree type,
4930 tree arg0, tree arg1, tree arg2)
4931 {
4932 enum tree_code comp_code = TREE_CODE (arg0);
4933 tree arg00 = TREE_OPERAND (arg0, 0);
4934 tree arg01 = TREE_OPERAND (arg0, 1);
4935 tree arg1_type = TREE_TYPE (arg1);
4936 tree tem;
4937
4938 STRIP_NOPS (arg1);
4939 STRIP_NOPS (arg2);
4940
4941 /* If we have A op 0 ? A : -A, consider applying the following
4942 transformations:
4943
4944 A == 0? A : -A same as -A
4945 A != 0? A : -A same as A
4946 A >= 0? A : -A same as abs (A)
4947 A > 0? A : -A same as abs (A)
4948 A <= 0? A : -A same as -abs (A)
4949 A < 0? A : -A same as -abs (A)
4950
4951 None of these transformations work for modes with signed
4952 zeros. If A is +/-0, the first two transformations will
4953 change the sign of the result (from +0 to -0, or vice
4954 versa). The last four will fix the sign of the result,
4955 even though the original expressions could be positive or
4956 negative, depending on the sign of A.
4957
4958 Note that all these transformations are correct if A is
4959 NaN, since the two alternatives (A and -A) are also NaNs. */
4960 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4961 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4962 ? real_zerop (arg01)
4963 : integer_zerop (arg01))
4964 && ((TREE_CODE (arg2) == NEGATE_EXPR
4965 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4966 /* In the case that A is of the form X-Y, '-A' (arg2) may
4967 have already been folded to Y-X, check for that. */
4968 || (TREE_CODE (arg1) == MINUS_EXPR
4969 && TREE_CODE (arg2) == MINUS_EXPR
4970 && operand_equal_p (TREE_OPERAND (arg1, 0),
4971 TREE_OPERAND (arg2, 1), 0)
4972 && operand_equal_p (TREE_OPERAND (arg1, 1),
4973 TREE_OPERAND (arg2, 0), 0))))
4974 switch (comp_code)
4975 {
4976 case EQ_EXPR:
4977 case UNEQ_EXPR:
4978 tem = fold_convert_loc (loc, arg1_type, arg1);
4979 return pedantic_non_lvalue_loc (loc,
4980 fold_convert_loc (loc, type,
4981 negate_expr (tem)));
4982 case NE_EXPR:
4983 case LTGT_EXPR:
4984 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4985 case UNGE_EXPR:
4986 case UNGT_EXPR:
4987 if (flag_trapping_math)
4988 break;
4989 /* Fall through. */
4990 case GE_EXPR:
4991 case GT_EXPR:
4992 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4993 break;
4994 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4995 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4996 case UNLE_EXPR:
4997 case UNLT_EXPR:
4998 if (flag_trapping_math)
4999 break;
5000 case LE_EXPR:
5001 case LT_EXPR:
5002 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5003 break;
5004 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5005 return negate_expr (fold_convert_loc (loc, type, tem));
5006 default:
5007 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5008 break;
5009 }
5010
5011 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5012 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5013 both transformations are correct when A is NaN: A != 0
5014 is then true, and A == 0 is false. */
5015
5016 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5017 && integer_zerop (arg01) && integer_zerop (arg2))
5018 {
5019 if (comp_code == NE_EXPR)
5020 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5021 else if (comp_code == EQ_EXPR)
5022 return build_zero_cst (type);
5023 }
5024
5025 /* Try some transformations of A op B ? A : B.
5026
5027 A == B? A : B same as B
5028 A != B? A : B same as A
5029 A >= B? A : B same as max (A, B)
5030 A > B? A : B same as max (B, A)
5031 A <= B? A : B same as min (A, B)
5032 A < B? A : B same as min (B, A)
5033
5034 As above, these transformations don't work in the presence
5035 of signed zeros. For example, if A and B are zeros of
5036 opposite sign, the first two transformations will change
5037 the sign of the result. In the last four, the original
5038 expressions give different results for (A=+0, B=-0) and
5039 (A=-0, B=+0), but the transformed expressions do not.
5040
5041 The first two transformations are correct if either A or B
5042 is a NaN. In the first transformation, the condition will
5043 be false, and B will indeed be chosen. In the case of the
5044 second transformation, the condition A != B will be true,
5045 and A will be chosen.
5046
5047 The conversions to max() and min() are not correct if B is
5048 a number and A is not. The conditions in the original
5049 expressions will be false, so all four give B. The min()
5050 and max() versions would give a NaN instead. */
5051 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5052 && operand_equal_for_comparison_p (arg01, arg2, arg00)
5053 /* Avoid these transformations if the COND_EXPR may be used
5054 as an lvalue in the C++ front-end. PR c++/19199. */
5055 && (in_gimple_form
5056 || VECTOR_TYPE_P (type)
5057 || (! lang_GNU_CXX ()
5058 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5059 || ! maybe_lvalue_p (arg1)
5060 || ! maybe_lvalue_p (arg2)))
5061 {
5062 tree comp_op0 = arg00;
5063 tree comp_op1 = arg01;
5064 tree comp_type = TREE_TYPE (comp_op0);
5065
5066 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
5067 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
5068 {
5069 comp_type = type;
5070 comp_op0 = arg1;
5071 comp_op1 = arg2;
5072 }
5073
5074 switch (comp_code)
5075 {
5076 case EQ_EXPR:
5077 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
5078 case NE_EXPR:
5079 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5080 case LE_EXPR:
5081 case LT_EXPR:
5082 case UNLE_EXPR:
5083 case UNLT_EXPR:
5084 /* In C++ a ?: expression can be an lvalue, so put the
5085 operand which will be used if they are equal first
5086 so that we can convert this back to the
5087 corresponding COND_EXPR. */
5088 if (!HONOR_NANS (arg1))
5089 {
5090 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5091 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5092 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5093 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5094 : fold_build2_loc (loc, MIN_EXPR, comp_type,
5095 comp_op1, comp_op0);
5096 return pedantic_non_lvalue_loc (loc,
5097 fold_convert_loc (loc, type, tem));
5098 }
5099 break;
5100 case GE_EXPR:
5101 case GT_EXPR:
5102 case UNGE_EXPR:
5103 case UNGT_EXPR:
5104 if (!HONOR_NANS (arg1))
5105 {
5106 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5107 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5108 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5109 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5110 : fold_build2_loc (loc, MAX_EXPR, comp_type,
5111 comp_op1, comp_op0);
5112 return pedantic_non_lvalue_loc (loc,
5113 fold_convert_loc (loc, type, tem));
5114 }
5115 break;
5116 case UNEQ_EXPR:
5117 if (!HONOR_NANS (arg1))
5118 return pedantic_non_lvalue_loc (loc,
5119 fold_convert_loc (loc, type, arg2));
5120 break;
5121 case LTGT_EXPR:
5122 if (!HONOR_NANS (arg1))
5123 return pedantic_non_lvalue_loc (loc,
5124 fold_convert_loc (loc, type, arg1));
5125 break;
5126 default:
5127 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5128 break;
5129 }
5130 }
5131
5132 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5133 we might still be able to simplify this. For example,
5134 if C1 is one less or one more than C2, this might have started
5135 out as a MIN or MAX and been transformed by this function.
5136 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5137
5138 if (INTEGRAL_TYPE_P (type)
5139 && TREE_CODE (arg01) == INTEGER_CST
5140 && TREE_CODE (arg2) == INTEGER_CST)
5141 switch (comp_code)
5142 {
5143 case EQ_EXPR:
5144 if (TREE_CODE (arg1) == INTEGER_CST)
5145 break;
5146 /* We can replace A with C1 in this case. */
5147 arg1 = fold_convert_loc (loc, type, arg01);
5148 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
5149
5150 case LT_EXPR:
5151 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
5152 MIN_EXPR, to preserve the signedness of the comparison. */
5153 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5154 OEP_ONLY_CONST)
5155 && operand_equal_p (arg01,
5156 const_binop (PLUS_EXPR, arg2,
5157 build_int_cst (type, 1)),
5158 OEP_ONLY_CONST))
5159 {
5160 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5161 fold_convert_loc (loc, TREE_TYPE (arg00),
5162 arg2));
5163 return pedantic_non_lvalue_loc (loc,
5164 fold_convert_loc (loc, type, tem));
5165 }
5166 break;
5167
5168 case LE_EXPR:
5169 /* If C1 is C2 - 1, this is min(A, C2), with the same care
5170 as above. */
5171 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5172 OEP_ONLY_CONST)
5173 && operand_equal_p (arg01,
5174 const_binop (MINUS_EXPR, arg2,
5175 build_int_cst (type, 1)),
5176 OEP_ONLY_CONST))
5177 {
5178 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5179 fold_convert_loc (loc, TREE_TYPE (arg00),
5180 arg2));
5181 return pedantic_non_lvalue_loc (loc,
5182 fold_convert_loc (loc, type, tem));
5183 }
5184 break;
5185
5186 case GT_EXPR:
5187 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
5188 MAX_EXPR, to preserve the signedness of the comparison. */
5189 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5190 OEP_ONLY_CONST)
5191 && operand_equal_p (arg01,
5192 const_binop (MINUS_EXPR, arg2,
5193 build_int_cst (type, 1)),
5194 OEP_ONLY_CONST))
5195 {
5196 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5197 fold_convert_loc (loc, TREE_TYPE (arg00),
5198 arg2));
5199 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5200 }
5201 break;
5202
5203 case GE_EXPR:
5204 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
5205 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5206 OEP_ONLY_CONST)
5207 && operand_equal_p (arg01,
5208 const_binop (PLUS_EXPR, arg2,
5209 build_int_cst (type, 1)),
5210 OEP_ONLY_CONST))
5211 {
5212 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5213 fold_convert_loc (loc, TREE_TYPE (arg00),
5214 arg2));
5215 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5216 }
5217 break;
5218 case NE_EXPR:
5219 break;
5220 default:
5221 gcc_unreachable ();
5222 }
5223
5224 return NULL_TREE;
5225 }
5226
5227
5228 \f
5229 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5230 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5231 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5232 false) >= 2)
5233 #endif
5234
5235 /* EXP is some logical combination of boolean tests. See if we can
5236 merge it into some range test. Return the new tree if so. */
5237
5238 static tree
5239 fold_range_test (location_t loc, enum tree_code code, tree type,
5240 tree op0, tree op1)
5241 {
5242 int or_op = (code == TRUTH_ORIF_EXPR
5243 || code == TRUTH_OR_EXPR);
5244 int in0_p, in1_p, in_p;
5245 tree low0, low1, low, high0, high1, high;
5246 bool strict_overflow_p = false;
5247 tree tem, lhs, rhs;
5248 const char * const warnmsg = G_("assuming signed overflow does not occur "
5249 "when simplifying range test");
5250
5251 if (!INTEGRAL_TYPE_P (type))
5252 return 0;
5253
5254 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5255 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5256
5257 /* If this is an OR operation, invert both sides; we will invert
5258 again at the end. */
5259 if (or_op)
5260 in0_p = ! in0_p, in1_p = ! in1_p;
5261
5262 /* If both expressions are the same, if we can merge the ranges, and we
5263 can build the range test, return it or it inverted. If one of the
5264 ranges is always true or always false, consider it to be the same
5265 expression as the other. */
5266 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5267 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5268 in1_p, low1, high1)
5269 && 0 != (tem = (build_range_check (loc, type,
5270 lhs != 0 ? lhs
5271 : rhs != 0 ? rhs : integer_zero_node,
5272 in_p, low, high))))
5273 {
5274 if (strict_overflow_p)
5275 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5276 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5277 }
5278
5279 /* On machines where the branch cost is expensive, if this is a
5280 short-circuited branch and the underlying object on both sides
5281 is the same, make a non-short-circuit operation. */
5282 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5283 && lhs != 0 && rhs != 0
5284 && (code == TRUTH_ANDIF_EXPR
5285 || code == TRUTH_ORIF_EXPR)
5286 && operand_equal_p (lhs, rhs, 0))
5287 {
5288 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5289 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5290 which cases we can't do this. */
5291 if (simple_operand_p (lhs))
5292 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5293 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5294 type, op0, op1);
5295
5296 else if (!lang_hooks.decls.global_bindings_p ()
5297 && !CONTAINS_PLACEHOLDER_P (lhs))
5298 {
5299 tree common = save_expr (lhs);
5300
5301 if (0 != (lhs = build_range_check (loc, type, common,
5302 or_op ? ! in0_p : in0_p,
5303 low0, high0))
5304 && (0 != (rhs = build_range_check (loc, type, common,
5305 or_op ? ! in1_p : in1_p,
5306 low1, high1))))
5307 {
5308 if (strict_overflow_p)
5309 fold_overflow_warning (warnmsg,
5310 WARN_STRICT_OVERFLOW_COMPARISON);
5311 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5312 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5313 type, lhs, rhs);
5314 }
5315 }
5316 }
5317
5318 return 0;
5319 }
5320 \f
5321 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5322 bit value. Arrange things so the extra bits will be set to zero if and
5323 only if C is signed-extended to its full width. If MASK is nonzero,
5324 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5325
5326 static tree
5327 unextend (tree c, int p, int unsignedp, tree mask)
5328 {
5329 tree type = TREE_TYPE (c);
5330 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5331 tree temp;
5332
5333 if (p == modesize || unsignedp)
5334 return c;
5335
5336 /* We work by getting just the sign bit into the low-order bit, then
5337 into the high-order bit, then sign-extend. We then XOR that value
5338 with C. */
5339 temp = build_int_cst (TREE_TYPE (c), wi::extract_uhwi (c, p - 1, 1));
5340
5341 /* We must use a signed type in order to get an arithmetic right shift.
5342 However, we must also avoid introducing accidental overflows, so that
5343 a subsequent call to integer_zerop will work. Hence we must
5344 do the type conversion here. At this point, the constant is either
5345 zero or one, and the conversion to a signed type can never overflow.
5346 We could get an overflow if this conversion is done anywhere else. */
5347 if (TYPE_UNSIGNED (type))
5348 temp = fold_convert (signed_type_for (type), temp);
5349
5350 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5351 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5352 if (mask != 0)
5353 temp = const_binop (BIT_AND_EXPR, temp,
5354 fold_convert (TREE_TYPE (c), mask));
5355 /* If necessary, convert the type back to match the type of C. */
5356 if (TYPE_UNSIGNED (type))
5357 temp = fold_convert (type, temp);
5358
5359 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5360 }
5361 \f
5362 /* For an expression that has the form
5363 (A && B) || ~B
5364 or
5365 (A || B) && ~B,
5366 we can drop one of the inner expressions and simplify to
5367 A || ~B
5368 or
5369 A && ~B
5370 LOC is the location of the resulting expression. OP is the inner
5371 logical operation; the left-hand side in the examples above, while CMPOP
5372 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5373 removing a condition that guards another, as in
5374 (A != NULL && A->...) || A == NULL
5375 which we must not transform. If RHS_ONLY is true, only eliminate the
5376 right-most operand of the inner logical operation. */
5377
5378 static tree
5379 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5380 bool rhs_only)
5381 {
5382 tree type = TREE_TYPE (cmpop);
5383 enum tree_code code = TREE_CODE (cmpop);
5384 enum tree_code truthop_code = TREE_CODE (op);
5385 tree lhs = TREE_OPERAND (op, 0);
5386 tree rhs = TREE_OPERAND (op, 1);
5387 tree orig_lhs = lhs, orig_rhs = rhs;
5388 enum tree_code rhs_code = TREE_CODE (rhs);
5389 enum tree_code lhs_code = TREE_CODE (lhs);
5390 enum tree_code inv_code;
5391
5392 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5393 return NULL_TREE;
5394
5395 if (TREE_CODE_CLASS (code) != tcc_comparison)
5396 return NULL_TREE;
5397
5398 if (rhs_code == truthop_code)
5399 {
5400 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5401 if (newrhs != NULL_TREE)
5402 {
5403 rhs = newrhs;
5404 rhs_code = TREE_CODE (rhs);
5405 }
5406 }
5407 if (lhs_code == truthop_code && !rhs_only)
5408 {
5409 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5410 if (newlhs != NULL_TREE)
5411 {
5412 lhs = newlhs;
5413 lhs_code = TREE_CODE (lhs);
5414 }
5415 }
5416
5417 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
5418 if (inv_code == rhs_code
5419 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5420 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5421 return lhs;
5422 if (!rhs_only && inv_code == lhs_code
5423 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5424 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5425 return rhs;
5426 if (rhs != orig_rhs || lhs != orig_lhs)
5427 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5428 lhs, rhs);
5429 return NULL_TREE;
5430 }
5431
5432 /* Find ways of folding logical expressions of LHS and RHS:
5433 Try to merge two comparisons to the same innermost item.
5434 Look for range tests like "ch >= '0' && ch <= '9'".
5435 Look for combinations of simple terms on machines with expensive branches
5436 and evaluate the RHS unconditionally.
5437
5438 For example, if we have p->a == 2 && p->b == 4 and we can make an
5439 object large enough to span both A and B, we can do this with a comparison
5440 against the object ANDed with the a mask.
5441
5442 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5443 operations to do this with one comparison.
5444
5445 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5446 function and the one above.
5447
5448 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5449 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5450
5451 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5452 two operands.
5453
5454 We return the simplified tree or 0 if no optimization is possible. */
5455
5456 static tree
5457 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5458 tree lhs, tree rhs)
5459 {
5460 /* If this is the "or" of two comparisons, we can do something if
5461 the comparisons are NE_EXPR. If this is the "and", we can do something
5462 if the comparisons are EQ_EXPR. I.e.,
5463 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5464
5465 WANTED_CODE is this operation code. For single bit fields, we can
5466 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5467 comparison for one-bit fields. */
5468
5469 enum tree_code wanted_code;
5470 enum tree_code lcode, rcode;
5471 tree ll_arg, lr_arg, rl_arg, rr_arg;
5472 tree ll_inner, lr_inner, rl_inner, rr_inner;
5473 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5474 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5475 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5476 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5477 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5478 int ll_reversep, lr_reversep, rl_reversep, rr_reversep;
5479 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5480 machine_mode lnmode, rnmode;
5481 tree ll_mask, lr_mask, rl_mask, rr_mask;
5482 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5483 tree l_const, r_const;
5484 tree lntype, rntype, result;
5485 HOST_WIDE_INT first_bit, end_bit;
5486 int volatilep;
5487
5488 /* Start by getting the comparison codes. Fail if anything is volatile.
5489 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5490 it were surrounded with a NE_EXPR. */
5491
5492 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5493 return 0;
5494
5495 lcode = TREE_CODE (lhs);
5496 rcode = TREE_CODE (rhs);
5497
5498 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5499 {
5500 lhs = build2 (NE_EXPR, truth_type, lhs,
5501 build_int_cst (TREE_TYPE (lhs), 0));
5502 lcode = NE_EXPR;
5503 }
5504
5505 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5506 {
5507 rhs = build2 (NE_EXPR, truth_type, rhs,
5508 build_int_cst (TREE_TYPE (rhs), 0));
5509 rcode = NE_EXPR;
5510 }
5511
5512 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5513 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5514 return 0;
5515
5516 ll_arg = TREE_OPERAND (lhs, 0);
5517 lr_arg = TREE_OPERAND (lhs, 1);
5518 rl_arg = TREE_OPERAND (rhs, 0);
5519 rr_arg = TREE_OPERAND (rhs, 1);
5520
5521 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5522 if (simple_operand_p (ll_arg)
5523 && simple_operand_p (lr_arg))
5524 {
5525 if (operand_equal_p (ll_arg, rl_arg, 0)
5526 && operand_equal_p (lr_arg, rr_arg, 0))
5527 {
5528 result = combine_comparisons (loc, code, lcode, rcode,
5529 truth_type, ll_arg, lr_arg);
5530 if (result)
5531 return result;
5532 }
5533 else if (operand_equal_p (ll_arg, rr_arg, 0)
5534 && operand_equal_p (lr_arg, rl_arg, 0))
5535 {
5536 result = combine_comparisons (loc, code, lcode,
5537 swap_tree_comparison (rcode),
5538 truth_type, ll_arg, lr_arg);
5539 if (result)
5540 return result;
5541 }
5542 }
5543
5544 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5545 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5546
5547 /* If the RHS can be evaluated unconditionally and its operands are
5548 simple, it wins to evaluate the RHS unconditionally on machines
5549 with expensive branches. In this case, this isn't a comparison
5550 that can be merged. */
5551
5552 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5553 false) >= 2
5554 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5555 && simple_operand_p (rl_arg)
5556 && simple_operand_p (rr_arg))
5557 {
5558 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5559 if (code == TRUTH_OR_EXPR
5560 && lcode == NE_EXPR && integer_zerop (lr_arg)
5561 && rcode == NE_EXPR && integer_zerop (rr_arg)
5562 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5563 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5564 return build2_loc (loc, NE_EXPR, truth_type,
5565 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5566 ll_arg, rl_arg),
5567 build_int_cst (TREE_TYPE (ll_arg), 0));
5568
5569 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5570 if (code == TRUTH_AND_EXPR
5571 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5572 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5573 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5574 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5575 return build2_loc (loc, EQ_EXPR, truth_type,
5576 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5577 ll_arg, rl_arg),
5578 build_int_cst (TREE_TYPE (ll_arg), 0));
5579 }
5580
5581 /* See if the comparisons can be merged. Then get all the parameters for
5582 each side. */
5583
5584 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5585 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5586 return 0;
5587
5588 ll_reversep = lr_reversep = rl_reversep = rr_reversep = 0;
5589 volatilep = 0;
5590 ll_inner = decode_field_reference (loc, ll_arg,
5591 &ll_bitsize, &ll_bitpos, &ll_mode,
5592 &ll_unsignedp, &ll_reversep, &volatilep,
5593 &ll_mask, &ll_and_mask);
5594 lr_inner = decode_field_reference (loc, lr_arg,
5595 &lr_bitsize, &lr_bitpos, &lr_mode,
5596 &lr_unsignedp, &lr_reversep, &volatilep,
5597 &lr_mask, &lr_and_mask);
5598 rl_inner = decode_field_reference (loc, rl_arg,
5599 &rl_bitsize, &rl_bitpos, &rl_mode,
5600 &rl_unsignedp, &rl_reversep, &volatilep,
5601 &rl_mask, &rl_and_mask);
5602 rr_inner = decode_field_reference (loc, rr_arg,
5603 &rr_bitsize, &rr_bitpos, &rr_mode,
5604 &rr_unsignedp, &rr_reversep, &volatilep,
5605 &rr_mask, &rr_and_mask);
5606
5607 /* It must be true that the inner operation on the lhs of each
5608 comparison must be the same if we are to be able to do anything.
5609 Then see if we have constants. If not, the same must be true for
5610 the rhs's. */
5611 if (volatilep
5612 || ll_reversep != rl_reversep
5613 || ll_inner == 0 || rl_inner == 0
5614 || ! operand_equal_p (ll_inner, rl_inner, 0))
5615 return 0;
5616
5617 if (TREE_CODE (lr_arg) == INTEGER_CST
5618 && TREE_CODE (rr_arg) == INTEGER_CST)
5619 {
5620 l_const = lr_arg, r_const = rr_arg;
5621 lr_reversep = ll_reversep;
5622 }
5623 else if (lr_reversep != rr_reversep
5624 || lr_inner == 0 || rr_inner == 0
5625 || ! operand_equal_p (lr_inner, rr_inner, 0))
5626 return 0;
5627 else
5628 l_const = r_const = 0;
5629
5630 /* If either comparison code is not correct for our logical operation,
5631 fail. However, we can convert a one-bit comparison against zero into
5632 the opposite comparison against that bit being set in the field. */
5633
5634 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5635 if (lcode != wanted_code)
5636 {
5637 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5638 {
5639 /* Make the left operand unsigned, since we are only interested
5640 in the value of one bit. Otherwise we are doing the wrong
5641 thing below. */
5642 ll_unsignedp = 1;
5643 l_const = ll_mask;
5644 }
5645 else
5646 return 0;
5647 }
5648
5649 /* This is analogous to the code for l_const above. */
5650 if (rcode != wanted_code)
5651 {
5652 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5653 {
5654 rl_unsignedp = 1;
5655 r_const = rl_mask;
5656 }
5657 else
5658 return 0;
5659 }
5660
5661 /* See if we can find a mode that contains both fields being compared on
5662 the left. If we can't, fail. Otherwise, update all constants and masks
5663 to be relative to a field of that size. */
5664 first_bit = MIN (ll_bitpos, rl_bitpos);
5665 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5666 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5667 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5668 volatilep);
5669 if (lnmode == VOIDmode)
5670 return 0;
5671
5672 lnbitsize = GET_MODE_BITSIZE (lnmode);
5673 lnbitpos = first_bit & ~ (lnbitsize - 1);
5674 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5675 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5676
5677 if (ll_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5678 {
5679 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5680 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5681 }
5682
5683 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5684 size_int (xll_bitpos));
5685 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5686 size_int (xrl_bitpos));
5687
5688 if (l_const)
5689 {
5690 l_const = fold_convert_loc (loc, lntype, l_const);
5691 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5692 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5693 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5694 fold_build1_loc (loc, BIT_NOT_EXPR,
5695 lntype, ll_mask))))
5696 {
5697 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5698
5699 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5700 }
5701 }
5702 if (r_const)
5703 {
5704 r_const = fold_convert_loc (loc, lntype, r_const);
5705 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5706 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5707 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5708 fold_build1_loc (loc, BIT_NOT_EXPR,
5709 lntype, rl_mask))))
5710 {
5711 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5712
5713 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5714 }
5715 }
5716
5717 /* If the right sides are not constant, do the same for it. Also,
5718 disallow this optimization if a size or signedness mismatch occurs
5719 between the left and right sides. */
5720 if (l_const == 0)
5721 {
5722 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5723 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5724 /* Make sure the two fields on the right
5725 correspond to the left without being swapped. */
5726 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5727 return 0;
5728
5729 first_bit = MIN (lr_bitpos, rr_bitpos);
5730 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5731 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5732 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5733 volatilep);
5734 if (rnmode == VOIDmode)
5735 return 0;
5736
5737 rnbitsize = GET_MODE_BITSIZE (rnmode);
5738 rnbitpos = first_bit & ~ (rnbitsize - 1);
5739 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5740 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5741
5742 if (lr_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5743 {
5744 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5745 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5746 }
5747
5748 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5749 rntype, lr_mask),
5750 size_int (xlr_bitpos));
5751 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5752 rntype, rr_mask),
5753 size_int (xrr_bitpos));
5754
5755 /* Make a mask that corresponds to both fields being compared.
5756 Do this for both items being compared. If the operands are the
5757 same size and the bits being compared are in the same position
5758 then we can do this by masking both and comparing the masked
5759 results. */
5760 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5761 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5762 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5763 {
5764 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5765 ll_unsignedp || rl_unsignedp, ll_reversep);
5766 if (! all_ones_mask_p (ll_mask, lnbitsize))
5767 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5768
5769 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5770 lr_unsignedp || rr_unsignedp, lr_reversep);
5771 if (! all_ones_mask_p (lr_mask, rnbitsize))
5772 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5773
5774 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5775 }
5776
5777 /* There is still another way we can do something: If both pairs of
5778 fields being compared are adjacent, we may be able to make a wider
5779 field containing them both.
5780
5781 Note that we still must mask the lhs/rhs expressions. Furthermore,
5782 the mask must be shifted to account for the shift done by
5783 make_bit_field_ref. */
5784 if ((ll_bitsize + ll_bitpos == rl_bitpos
5785 && lr_bitsize + lr_bitpos == rr_bitpos)
5786 || (ll_bitpos == rl_bitpos + rl_bitsize
5787 && lr_bitpos == rr_bitpos + rr_bitsize))
5788 {
5789 tree type;
5790
5791 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5792 ll_bitsize + rl_bitsize,
5793 MIN (ll_bitpos, rl_bitpos),
5794 ll_unsignedp, ll_reversep);
5795 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5796 lr_bitsize + rr_bitsize,
5797 MIN (lr_bitpos, rr_bitpos),
5798 lr_unsignedp, lr_reversep);
5799
5800 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5801 size_int (MIN (xll_bitpos, xrl_bitpos)));
5802 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5803 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5804
5805 /* Convert to the smaller type before masking out unwanted bits. */
5806 type = lntype;
5807 if (lntype != rntype)
5808 {
5809 if (lnbitsize > rnbitsize)
5810 {
5811 lhs = fold_convert_loc (loc, rntype, lhs);
5812 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5813 type = rntype;
5814 }
5815 else if (lnbitsize < rnbitsize)
5816 {
5817 rhs = fold_convert_loc (loc, lntype, rhs);
5818 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5819 type = lntype;
5820 }
5821 }
5822
5823 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5824 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5825
5826 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5827 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5828
5829 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5830 }
5831
5832 return 0;
5833 }
5834
5835 /* Handle the case of comparisons with constants. If there is something in
5836 common between the masks, those bits of the constants must be the same.
5837 If not, the condition is always false. Test for this to avoid generating
5838 incorrect code below. */
5839 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5840 if (! integer_zerop (result)
5841 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5842 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5843 {
5844 if (wanted_code == NE_EXPR)
5845 {
5846 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5847 return constant_boolean_node (true, truth_type);
5848 }
5849 else
5850 {
5851 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5852 return constant_boolean_node (false, truth_type);
5853 }
5854 }
5855
5856 /* Construct the expression we will return. First get the component
5857 reference we will make. Unless the mask is all ones the width of
5858 that field, perform the mask operation. Then compare with the
5859 merged constant. */
5860 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5861 ll_unsignedp || rl_unsignedp, ll_reversep);
5862
5863 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5864 if (! all_ones_mask_p (ll_mask, lnbitsize))
5865 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5866
5867 return build2_loc (loc, wanted_code, truth_type, result,
5868 const_binop (BIT_IOR_EXPR, l_const, r_const));
5869 }
5870 \f
5871 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5872 constant. */
5873
5874 static tree
5875 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5876 tree op0, tree op1)
5877 {
5878 tree arg0 = op0;
5879 enum tree_code op_code;
5880 tree comp_const;
5881 tree minmax_const;
5882 int consts_equal, consts_lt;
5883 tree inner;
5884
5885 STRIP_SIGN_NOPS (arg0);
5886
5887 op_code = TREE_CODE (arg0);
5888 minmax_const = TREE_OPERAND (arg0, 1);
5889 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5890 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5891 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5892 inner = TREE_OPERAND (arg0, 0);
5893
5894 /* If something does not permit us to optimize, return the original tree. */
5895 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5896 || TREE_CODE (comp_const) != INTEGER_CST
5897 || TREE_OVERFLOW (comp_const)
5898 || TREE_CODE (minmax_const) != INTEGER_CST
5899 || TREE_OVERFLOW (minmax_const))
5900 return NULL_TREE;
5901
5902 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5903 and GT_EXPR, doing the rest with recursive calls using logical
5904 simplifications. */
5905 switch (code)
5906 {
5907 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5908 {
5909 tree tem
5910 = optimize_minmax_comparison (loc,
5911 invert_tree_comparison (code, false),
5912 type, op0, op1);
5913 if (tem)
5914 return invert_truthvalue_loc (loc, tem);
5915 return NULL_TREE;
5916 }
5917
5918 case GE_EXPR:
5919 return
5920 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5921 optimize_minmax_comparison
5922 (loc, EQ_EXPR, type, arg0, comp_const),
5923 optimize_minmax_comparison
5924 (loc, GT_EXPR, type, arg0, comp_const));
5925
5926 case EQ_EXPR:
5927 if (op_code == MAX_EXPR && consts_equal)
5928 /* MAX (X, 0) == 0 -> X <= 0 */
5929 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5930
5931 else if (op_code == MAX_EXPR && consts_lt)
5932 /* MAX (X, 0) == 5 -> X == 5 */
5933 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5934
5935 else if (op_code == MAX_EXPR)
5936 /* MAX (X, 0) == -1 -> false */
5937 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5938
5939 else if (consts_equal)
5940 /* MIN (X, 0) == 0 -> X >= 0 */
5941 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5942
5943 else if (consts_lt)
5944 /* MIN (X, 0) == 5 -> false */
5945 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5946
5947 else
5948 /* MIN (X, 0) == -1 -> X == -1 */
5949 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5950
5951 case GT_EXPR:
5952 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5953 /* MAX (X, 0) > 0 -> X > 0
5954 MAX (X, 0) > 5 -> X > 5 */
5955 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5956
5957 else if (op_code == MAX_EXPR)
5958 /* MAX (X, 0) > -1 -> true */
5959 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5960
5961 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5962 /* MIN (X, 0) > 0 -> false
5963 MIN (X, 0) > 5 -> false */
5964 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5965
5966 else
5967 /* MIN (X, 0) > -1 -> X > -1 */
5968 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5969
5970 default:
5971 return NULL_TREE;
5972 }
5973 }
5974 \f
5975 /* T is an integer expression that is being multiplied, divided, or taken a
5976 modulus (CODE says which and what kind of divide or modulus) by a
5977 constant C. See if we can eliminate that operation by folding it with
5978 other operations already in T. WIDE_TYPE, if non-null, is a type that
5979 should be used for the computation if wider than our type.
5980
5981 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5982 (X * 2) + (Y * 4). We must, however, be assured that either the original
5983 expression would not overflow or that overflow is undefined for the type
5984 in the language in question.
5985
5986 If we return a non-null expression, it is an equivalent form of the
5987 original computation, but need not be in the original type.
5988
5989 We set *STRICT_OVERFLOW_P to true if the return values depends on
5990 signed overflow being undefined. Otherwise we do not change
5991 *STRICT_OVERFLOW_P. */
5992
5993 static tree
5994 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5995 bool *strict_overflow_p)
5996 {
5997 /* To avoid exponential search depth, refuse to allow recursion past
5998 three levels. Beyond that (1) it's highly unlikely that we'll find
5999 something interesting and (2) we've probably processed it before
6000 when we built the inner expression. */
6001
6002 static int depth;
6003 tree ret;
6004
6005 if (depth > 3)
6006 return NULL;
6007
6008 depth++;
6009 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6010 depth--;
6011
6012 return ret;
6013 }
6014
6015 static tree
6016 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6017 bool *strict_overflow_p)
6018 {
6019 tree type = TREE_TYPE (t);
6020 enum tree_code tcode = TREE_CODE (t);
6021 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
6022 > GET_MODE_SIZE (TYPE_MODE (type)))
6023 ? wide_type : type);
6024 tree t1, t2;
6025 int same_p = tcode == code;
6026 tree op0 = NULL_TREE, op1 = NULL_TREE;
6027 bool sub_strict_overflow_p;
6028
6029 /* Don't deal with constants of zero here; they confuse the code below. */
6030 if (integer_zerop (c))
6031 return NULL_TREE;
6032
6033 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6034 op0 = TREE_OPERAND (t, 0);
6035
6036 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6037 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6038
6039 /* Note that we need not handle conditional operations here since fold
6040 already handles those cases. So just do arithmetic here. */
6041 switch (tcode)
6042 {
6043 case INTEGER_CST:
6044 /* For a constant, we can always simplify if we are a multiply
6045 or (for divide and modulus) if it is a multiple of our constant. */
6046 if (code == MULT_EXPR
6047 || wi::multiple_of_p (t, c, TYPE_SIGN (type)))
6048 {
6049 tree tem = const_binop (code, fold_convert (ctype, t),
6050 fold_convert (ctype, c));
6051 /* If the multiplication overflowed to INT_MIN then we lost sign
6052 information on it and a subsequent multiplication might
6053 spuriously overflow. See PR68142. */
6054 if (TREE_OVERFLOW (tem)
6055 && wi::eq_p (tem, wi::min_value (TYPE_PRECISION (ctype), SIGNED)))
6056 return NULL_TREE;
6057 return tem;
6058 }
6059 break;
6060
6061 CASE_CONVERT: case NON_LVALUE_EXPR:
6062 /* If op0 is an expression ... */
6063 if ((COMPARISON_CLASS_P (op0)
6064 || UNARY_CLASS_P (op0)
6065 || BINARY_CLASS_P (op0)
6066 || VL_EXP_CLASS_P (op0)
6067 || EXPRESSION_CLASS_P (op0))
6068 /* ... and has wrapping overflow, and its type is smaller
6069 than ctype, then we cannot pass through as widening. */
6070 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6071 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0)))
6072 && (TYPE_PRECISION (ctype)
6073 > TYPE_PRECISION (TREE_TYPE (op0))))
6074 /* ... or this is a truncation (t is narrower than op0),
6075 then we cannot pass through this narrowing. */
6076 || (TYPE_PRECISION (type)
6077 < TYPE_PRECISION (TREE_TYPE (op0)))
6078 /* ... or signedness changes for division or modulus,
6079 then we cannot pass through this conversion. */
6080 || (code != MULT_EXPR
6081 && (TYPE_UNSIGNED (ctype)
6082 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6083 /* ... or has undefined overflow while the converted to
6084 type has not, we cannot do the operation in the inner type
6085 as that would introduce undefined overflow. */
6086 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6087 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0)))
6088 && !TYPE_OVERFLOW_UNDEFINED (type))))
6089 break;
6090
6091 /* Pass the constant down and see if we can make a simplification. If
6092 we can, replace this expression with the inner simplification for
6093 possible later conversion to our or some other type. */
6094 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6095 && TREE_CODE (t2) == INTEGER_CST
6096 && !TREE_OVERFLOW (t2)
6097 && (0 != (t1 = extract_muldiv (op0, t2, code,
6098 code == MULT_EXPR
6099 ? ctype : NULL_TREE,
6100 strict_overflow_p))))
6101 return t1;
6102 break;
6103
6104 case ABS_EXPR:
6105 /* If widening the type changes it from signed to unsigned, then we
6106 must avoid building ABS_EXPR itself as unsigned. */
6107 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6108 {
6109 tree cstype = (*signed_type_for) (ctype);
6110 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6111 != 0)
6112 {
6113 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6114 return fold_convert (ctype, t1);
6115 }
6116 break;
6117 }
6118 /* If the constant is negative, we cannot simplify this. */
6119 if (tree_int_cst_sgn (c) == -1)
6120 break;
6121 /* FALLTHROUGH */
6122 case NEGATE_EXPR:
6123 /* For division and modulus, type can't be unsigned, as e.g.
6124 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6125 For signed types, even with wrapping overflow, this is fine. */
6126 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6127 break;
6128 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6129 != 0)
6130 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6131 break;
6132
6133 case MIN_EXPR: case MAX_EXPR:
6134 /* If widening the type changes the signedness, then we can't perform
6135 this optimization as that changes the result. */
6136 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6137 break;
6138
6139 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6140 sub_strict_overflow_p = false;
6141 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6142 &sub_strict_overflow_p)) != 0
6143 && (t2 = extract_muldiv (op1, c, code, wide_type,
6144 &sub_strict_overflow_p)) != 0)
6145 {
6146 if (tree_int_cst_sgn (c) < 0)
6147 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6148 if (sub_strict_overflow_p)
6149 *strict_overflow_p = true;
6150 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6151 fold_convert (ctype, t2));
6152 }
6153 break;
6154
6155 case LSHIFT_EXPR: case RSHIFT_EXPR:
6156 /* If the second operand is constant, this is a multiplication
6157 or floor division, by a power of two, so we can treat it that
6158 way unless the multiplier or divisor overflows. Signed
6159 left-shift overflow is implementation-defined rather than
6160 undefined in C90, so do not convert signed left shift into
6161 multiplication. */
6162 if (TREE_CODE (op1) == INTEGER_CST
6163 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6164 /* const_binop may not detect overflow correctly,
6165 so check for it explicitly here. */
6166 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
6167 && 0 != (t1 = fold_convert (ctype,
6168 const_binop (LSHIFT_EXPR,
6169 size_one_node,
6170 op1)))
6171 && !TREE_OVERFLOW (t1))
6172 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6173 ? MULT_EXPR : FLOOR_DIV_EXPR,
6174 ctype,
6175 fold_convert (ctype, op0),
6176 t1),
6177 c, code, wide_type, strict_overflow_p);
6178 break;
6179
6180 case PLUS_EXPR: case MINUS_EXPR:
6181 /* See if we can eliminate the operation on both sides. If we can, we
6182 can return a new PLUS or MINUS. If we can't, the only remaining
6183 cases where we can do anything are if the second operand is a
6184 constant. */
6185 sub_strict_overflow_p = false;
6186 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6187 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6188 if (t1 != 0 && t2 != 0
6189 && (code == MULT_EXPR
6190 /* If not multiplication, we can only do this if both operands
6191 are divisible by c. */
6192 || (multiple_of_p (ctype, op0, c)
6193 && multiple_of_p (ctype, op1, c))))
6194 {
6195 if (sub_strict_overflow_p)
6196 *strict_overflow_p = true;
6197 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6198 fold_convert (ctype, t2));
6199 }
6200
6201 /* If this was a subtraction, negate OP1 and set it to be an addition.
6202 This simplifies the logic below. */
6203 if (tcode == MINUS_EXPR)
6204 {
6205 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6206 /* If OP1 was not easily negatable, the constant may be OP0. */
6207 if (TREE_CODE (op0) == INTEGER_CST)
6208 {
6209 std::swap (op0, op1);
6210 std::swap (t1, t2);
6211 }
6212 }
6213
6214 if (TREE_CODE (op1) != INTEGER_CST)
6215 break;
6216
6217 /* If either OP1 or C are negative, this optimization is not safe for
6218 some of the division and remainder types while for others we need
6219 to change the code. */
6220 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6221 {
6222 if (code == CEIL_DIV_EXPR)
6223 code = FLOOR_DIV_EXPR;
6224 else if (code == FLOOR_DIV_EXPR)
6225 code = CEIL_DIV_EXPR;
6226 else if (code != MULT_EXPR
6227 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6228 break;
6229 }
6230
6231 /* If it's a multiply or a division/modulus operation of a multiple
6232 of our constant, do the operation and verify it doesn't overflow. */
6233 if (code == MULT_EXPR
6234 || wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6235 {
6236 op1 = const_binop (code, fold_convert (ctype, op1),
6237 fold_convert (ctype, c));
6238 /* We allow the constant to overflow with wrapping semantics. */
6239 if (op1 == 0
6240 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6241 break;
6242 }
6243 else
6244 break;
6245
6246 /* If we have an unsigned type, we cannot widen the operation since it
6247 will change the result if the original computation overflowed. */
6248 if (TYPE_UNSIGNED (ctype) && ctype != type)
6249 break;
6250
6251 /* If we were able to eliminate our operation from the first side,
6252 apply our operation to the second side and reform the PLUS. */
6253 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
6254 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
6255
6256 /* The last case is if we are a multiply. In that case, we can
6257 apply the distributive law to commute the multiply and addition
6258 if the multiplication of the constants doesn't overflow
6259 and overflow is defined. With undefined overflow
6260 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
6261 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
6262 return fold_build2 (tcode, ctype,
6263 fold_build2 (code, ctype,
6264 fold_convert (ctype, op0),
6265 fold_convert (ctype, c)),
6266 op1);
6267
6268 break;
6269
6270 case MULT_EXPR:
6271 /* We have a special case here if we are doing something like
6272 (C * 8) % 4 since we know that's zero. */
6273 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6274 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6275 /* If the multiplication can overflow we cannot optimize this. */
6276 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6277 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6278 && wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6279 {
6280 *strict_overflow_p = true;
6281 return omit_one_operand (type, integer_zero_node, op0);
6282 }
6283
6284 /* ... fall through ... */
6285
6286 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6287 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6288 /* If we can extract our operation from the LHS, do so and return a
6289 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6290 do something only if the second operand is a constant. */
6291 if (same_p
6292 && (t1 = extract_muldiv (op0, c, code, wide_type,
6293 strict_overflow_p)) != 0)
6294 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6295 fold_convert (ctype, op1));
6296 else if (tcode == MULT_EXPR && code == MULT_EXPR
6297 && (t1 = extract_muldiv (op1, c, code, wide_type,
6298 strict_overflow_p)) != 0)
6299 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6300 fold_convert (ctype, t1));
6301 else if (TREE_CODE (op1) != INTEGER_CST)
6302 return 0;
6303
6304 /* If these are the same operation types, we can associate them
6305 assuming no overflow. */
6306 if (tcode == code)
6307 {
6308 bool overflow_p = false;
6309 bool overflow_mul_p;
6310 signop sign = TYPE_SIGN (ctype);
6311 wide_int mul = wi::mul (op1, c, sign, &overflow_mul_p);
6312 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6313 if (overflow_mul_p
6314 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6315 overflow_p = true;
6316 if (!overflow_p)
6317 {
6318 mul = wide_int::from (mul, TYPE_PRECISION (ctype),
6319 TYPE_SIGN (TREE_TYPE (op1)));
6320 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6321 wide_int_to_tree (ctype, mul));
6322 }
6323 }
6324
6325 /* If these operations "cancel" each other, we have the main
6326 optimizations of this pass, which occur when either constant is a
6327 multiple of the other, in which case we replace this with either an
6328 operation or CODE or TCODE.
6329
6330 If we have an unsigned type, we cannot do this since it will change
6331 the result if the original computation overflowed. */
6332 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6333 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6334 || (tcode == MULT_EXPR
6335 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6336 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6337 && code != MULT_EXPR)))
6338 {
6339 if (wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6340 {
6341 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6342 *strict_overflow_p = true;
6343 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6344 fold_convert (ctype,
6345 const_binop (TRUNC_DIV_EXPR,
6346 op1, c)));
6347 }
6348 else if (wi::multiple_of_p (c, op1, TYPE_SIGN (type)))
6349 {
6350 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6351 *strict_overflow_p = true;
6352 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6353 fold_convert (ctype,
6354 const_binop (TRUNC_DIV_EXPR,
6355 c, op1)));
6356 }
6357 }
6358 break;
6359
6360 default:
6361 break;
6362 }
6363
6364 return 0;
6365 }
6366 \f
6367 /* Return a node which has the indicated constant VALUE (either 0 or
6368 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6369 and is of the indicated TYPE. */
6370
6371 tree
6372 constant_boolean_node (bool value, tree type)
6373 {
6374 if (type == integer_type_node)
6375 return value ? integer_one_node : integer_zero_node;
6376 else if (type == boolean_type_node)
6377 return value ? boolean_true_node : boolean_false_node;
6378 else if (TREE_CODE (type) == VECTOR_TYPE)
6379 return build_vector_from_val (type,
6380 build_int_cst (TREE_TYPE (type),
6381 value ? -1 : 0));
6382 else
6383 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6384 }
6385
6386
6387 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6388 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6389 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6390 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6391 COND is the first argument to CODE; otherwise (as in the example
6392 given here), it is the second argument. TYPE is the type of the
6393 original expression. Return NULL_TREE if no simplification is
6394 possible. */
6395
6396 static tree
6397 fold_binary_op_with_conditional_arg (location_t loc,
6398 enum tree_code code,
6399 tree type, tree op0, tree op1,
6400 tree cond, tree arg, int cond_first_p)
6401 {
6402 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6403 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6404 tree test, true_value, false_value;
6405 tree lhs = NULL_TREE;
6406 tree rhs = NULL_TREE;
6407 enum tree_code cond_code = COND_EXPR;
6408
6409 if (TREE_CODE (cond) == COND_EXPR
6410 || TREE_CODE (cond) == VEC_COND_EXPR)
6411 {
6412 test = TREE_OPERAND (cond, 0);
6413 true_value = TREE_OPERAND (cond, 1);
6414 false_value = TREE_OPERAND (cond, 2);
6415 /* If this operand throws an expression, then it does not make
6416 sense to try to perform a logical or arithmetic operation
6417 involving it. */
6418 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6419 lhs = true_value;
6420 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6421 rhs = false_value;
6422 }
6423 else
6424 {
6425 tree testtype = TREE_TYPE (cond);
6426 test = cond;
6427 true_value = constant_boolean_node (true, testtype);
6428 false_value = constant_boolean_node (false, testtype);
6429 }
6430
6431 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6432 cond_code = VEC_COND_EXPR;
6433
6434 /* This transformation is only worthwhile if we don't have to wrap ARG
6435 in a SAVE_EXPR and the operation can be simplified without recursing
6436 on at least one of the branches once its pushed inside the COND_EXPR. */
6437 if (!TREE_CONSTANT (arg)
6438 && (TREE_SIDE_EFFECTS (arg)
6439 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6440 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6441 return NULL_TREE;
6442
6443 arg = fold_convert_loc (loc, arg_type, arg);
6444 if (lhs == 0)
6445 {
6446 true_value = fold_convert_loc (loc, cond_type, true_value);
6447 if (cond_first_p)
6448 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6449 else
6450 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6451 }
6452 if (rhs == 0)
6453 {
6454 false_value = fold_convert_loc (loc, cond_type, false_value);
6455 if (cond_first_p)
6456 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6457 else
6458 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6459 }
6460
6461 /* Check that we have simplified at least one of the branches. */
6462 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6463 return NULL_TREE;
6464
6465 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6466 }
6467
6468 \f
6469 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6470
6471 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6472 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6473 ADDEND is the same as X.
6474
6475 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6476 and finite. The problematic cases are when X is zero, and its mode
6477 has signed zeros. In the case of rounding towards -infinity,
6478 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6479 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6480
6481 bool
6482 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6483 {
6484 if (!real_zerop (addend))
6485 return false;
6486
6487 /* Don't allow the fold with -fsignaling-nans. */
6488 if (HONOR_SNANS (element_mode (type)))
6489 return false;
6490
6491 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6492 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
6493 return true;
6494
6495 /* In a vector or complex, we would need to check the sign of all zeros. */
6496 if (TREE_CODE (addend) != REAL_CST)
6497 return false;
6498
6499 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6500 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6501 negate = !negate;
6502
6503 /* The mode has signed zeros, and we have to honor their sign.
6504 In this situation, there is only one case we can return true for.
6505 X - 0 is the same as X unless rounding towards -infinity is
6506 supported. */
6507 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type));
6508 }
6509
6510 /* Subroutine of fold() that optimizes comparisons of a division by
6511 a nonzero integer constant against an integer constant, i.e.
6512 X/C1 op C2.
6513
6514 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6515 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6516 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6517
6518 The function returns the constant folded tree if a simplification
6519 can be made, and NULL_TREE otherwise. */
6520
6521 static tree
6522 fold_div_compare (location_t loc,
6523 enum tree_code code, tree type, tree arg0, tree arg1)
6524 {
6525 tree prod, tmp, hi, lo;
6526 tree arg00 = TREE_OPERAND (arg0, 0);
6527 tree arg01 = TREE_OPERAND (arg0, 1);
6528 signop sign = TYPE_SIGN (TREE_TYPE (arg0));
6529 bool neg_overflow = false;
6530 bool overflow;
6531
6532 /* We have to do this the hard way to detect unsigned overflow.
6533 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6534 wide_int val = wi::mul (arg01, arg1, sign, &overflow);
6535 prod = force_fit_type (TREE_TYPE (arg00), val, -1, overflow);
6536 neg_overflow = false;
6537
6538 if (sign == UNSIGNED)
6539 {
6540 tmp = int_const_binop (MINUS_EXPR, arg01,
6541 build_int_cst (TREE_TYPE (arg01), 1));
6542 lo = prod;
6543
6544 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6545 val = wi::add (prod, tmp, sign, &overflow);
6546 hi = force_fit_type (TREE_TYPE (arg00), val,
6547 -1, overflow | TREE_OVERFLOW (prod));
6548 }
6549 else if (tree_int_cst_sgn (arg01) >= 0)
6550 {
6551 tmp = int_const_binop (MINUS_EXPR, arg01,
6552 build_int_cst (TREE_TYPE (arg01), 1));
6553 switch (tree_int_cst_sgn (arg1))
6554 {
6555 case -1:
6556 neg_overflow = true;
6557 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6558 hi = prod;
6559 break;
6560
6561 case 0:
6562 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6563 hi = tmp;
6564 break;
6565
6566 case 1:
6567 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6568 lo = prod;
6569 break;
6570
6571 default:
6572 gcc_unreachable ();
6573 }
6574 }
6575 else
6576 {
6577 /* A negative divisor reverses the relational operators. */
6578 code = swap_tree_comparison (code);
6579
6580 tmp = int_const_binop (PLUS_EXPR, arg01,
6581 build_int_cst (TREE_TYPE (arg01), 1));
6582 switch (tree_int_cst_sgn (arg1))
6583 {
6584 case -1:
6585 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6586 lo = prod;
6587 break;
6588
6589 case 0:
6590 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6591 lo = tmp;
6592 break;
6593
6594 case 1:
6595 neg_overflow = true;
6596 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6597 hi = prod;
6598 break;
6599
6600 default:
6601 gcc_unreachable ();
6602 }
6603 }
6604
6605 switch (code)
6606 {
6607 case EQ_EXPR:
6608 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6609 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6610 if (TREE_OVERFLOW (hi))
6611 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6612 if (TREE_OVERFLOW (lo))
6613 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6614 return build_range_check (loc, type, arg00, 1, lo, hi);
6615
6616 case NE_EXPR:
6617 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6618 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6619 if (TREE_OVERFLOW (hi))
6620 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6621 if (TREE_OVERFLOW (lo))
6622 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6623 return build_range_check (loc, type, arg00, 0, lo, hi);
6624
6625 case LT_EXPR:
6626 if (TREE_OVERFLOW (lo))
6627 {
6628 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6629 return omit_one_operand_loc (loc, type, tmp, arg00);
6630 }
6631 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6632
6633 case LE_EXPR:
6634 if (TREE_OVERFLOW (hi))
6635 {
6636 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6637 return omit_one_operand_loc (loc, type, tmp, arg00);
6638 }
6639 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6640
6641 case GT_EXPR:
6642 if (TREE_OVERFLOW (hi))
6643 {
6644 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6645 return omit_one_operand_loc (loc, type, tmp, arg00);
6646 }
6647 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6648
6649 case GE_EXPR:
6650 if (TREE_OVERFLOW (lo))
6651 {
6652 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6653 return omit_one_operand_loc (loc, type, tmp, arg00);
6654 }
6655 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6656
6657 default:
6658 break;
6659 }
6660
6661 return NULL_TREE;
6662 }
6663
6664
6665 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6666 equality/inequality test, then return a simplified form of the test
6667 using a sign testing. Otherwise return NULL. TYPE is the desired
6668 result type. */
6669
6670 static tree
6671 fold_single_bit_test_into_sign_test (location_t loc,
6672 enum tree_code code, tree arg0, tree arg1,
6673 tree result_type)
6674 {
6675 /* If this is testing a single bit, we can optimize the test. */
6676 if ((code == NE_EXPR || code == EQ_EXPR)
6677 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6678 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6679 {
6680 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6681 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6682 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6683
6684 if (arg00 != NULL_TREE
6685 /* This is only a win if casting to a signed type is cheap,
6686 i.e. when arg00's type is not a partial mode. */
6687 && TYPE_PRECISION (TREE_TYPE (arg00))
6688 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6689 {
6690 tree stype = signed_type_for (TREE_TYPE (arg00));
6691 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6692 result_type,
6693 fold_convert_loc (loc, stype, arg00),
6694 build_int_cst (stype, 0));
6695 }
6696 }
6697
6698 return NULL_TREE;
6699 }
6700
6701 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6702 equality/inequality test, then return a simplified form of
6703 the test using shifts and logical operations. Otherwise return
6704 NULL. TYPE is the desired result type. */
6705
6706 tree
6707 fold_single_bit_test (location_t loc, enum tree_code code,
6708 tree arg0, tree arg1, tree result_type)
6709 {
6710 /* If this is testing a single bit, we can optimize the test. */
6711 if ((code == NE_EXPR || code == EQ_EXPR)
6712 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6713 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6714 {
6715 tree inner = TREE_OPERAND (arg0, 0);
6716 tree type = TREE_TYPE (arg0);
6717 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6718 machine_mode operand_mode = TYPE_MODE (type);
6719 int ops_unsigned;
6720 tree signed_type, unsigned_type, intermediate_type;
6721 tree tem, one;
6722
6723 /* First, see if we can fold the single bit test into a sign-bit
6724 test. */
6725 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6726 result_type);
6727 if (tem)
6728 return tem;
6729
6730 /* Otherwise we have (A & C) != 0 where C is a single bit,
6731 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6732 Similarly for (A & C) == 0. */
6733
6734 /* If INNER is a right shift of a constant and it plus BITNUM does
6735 not overflow, adjust BITNUM and INNER. */
6736 if (TREE_CODE (inner) == RSHIFT_EXPR
6737 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6738 && bitnum < TYPE_PRECISION (type)
6739 && wi::ltu_p (TREE_OPERAND (inner, 1),
6740 TYPE_PRECISION (type) - bitnum))
6741 {
6742 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6743 inner = TREE_OPERAND (inner, 0);
6744 }
6745
6746 /* If we are going to be able to omit the AND below, we must do our
6747 operations as unsigned. If we must use the AND, we have a choice.
6748 Normally unsigned is faster, but for some machines signed is. */
6749 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6750 && !flag_syntax_only) ? 0 : 1;
6751
6752 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6753 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6754 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6755 inner = fold_convert_loc (loc, intermediate_type, inner);
6756
6757 if (bitnum != 0)
6758 inner = build2 (RSHIFT_EXPR, intermediate_type,
6759 inner, size_int (bitnum));
6760
6761 one = build_int_cst (intermediate_type, 1);
6762
6763 if (code == EQ_EXPR)
6764 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6765
6766 /* Put the AND last so it can combine with more things. */
6767 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6768
6769 /* Make sure to return the proper type. */
6770 inner = fold_convert_loc (loc, result_type, inner);
6771
6772 return inner;
6773 }
6774 return NULL_TREE;
6775 }
6776
6777 /* Check whether we are allowed to reorder operands arg0 and arg1,
6778 such that the evaluation of arg1 occurs before arg0. */
6779
6780 static bool
6781 reorder_operands_p (const_tree arg0, const_tree arg1)
6782 {
6783 if (! flag_evaluation_order)
6784 return true;
6785 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6786 return true;
6787 return ! TREE_SIDE_EFFECTS (arg0)
6788 && ! TREE_SIDE_EFFECTS (arg1);
6789 }
6790
6791 /* Test whether it is preferable two swap two operands, ARG0 and
6792 ARG1, for example because ARG0 is an integer constant and ARG1
6793 isn't. If REORDER is true, only recommend swapping if we can
6794 evaluate the operands in reverse order. */
6795
6796 bool
6797 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6798 {
6799 if (CONSTANT_CLASS_P (arg1))
6800 return 0;
6801 if (CONSTANT_CLASS_P (arg0))
6802 return 1;
6803
6804 STRIP_NOPS (arg0);
6805 STRIP_NOPS (arg1);
6806
6807 if (TREE_CONSTANT (arg1))
6808 return 0;
6809 if (TREE_CONSTANT (arg0))
6810 return 1;
6811
6812 if (reorder && flag_evaluation_order
6813 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6814 return 0;
6815
6816 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6817 for commutative and comparison operators. Ensuring a canonical
6818 form allows the optimizers to find additional redundancies without
6819 having to explicitly check for both orderings. */
6820 if (TREE_CODE (arg0) == SSA_NAME
6821 && TREE_CODE (arg1) == SSA_NAME
6822 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6823 return 1;
6824
6825 /* Put SSA_NAMEs last. */
6826 if (TREE_CODE (arg1) == SSA_NAME)
6827 return 0;
6828 if (TREE_CODE (arg0) == SSA_NAME)
6829 return 1;
6830
6831 /* Put variables last. */
6832 if (DECL_P (arg1))
6833 return 0;
6834 if (DECL_P (arg0))
6835 return 1;
6836
6837 return 0;
6838 }
6839
6840
6841 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6842 means A >= Y && A != MAX, but in this case we know that
6843 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6844
6845 static tree
6846 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6847 {
6848 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6849
6850 if (TREE_CODE (bound) == LT_EXPR)
6851 a = TREE_OPERAND (bound, 0);
6852 else if (TREE_CODE (bound) == GT_EXPR)
6853 a = TREE_OPERAND (bound, 1);
6854 else
6855 return NULL_TREE;
6856
6857 typea = TREE_TYPE (a);
6858 if (!INTEGRAL_TYPE_P (typea)
6859 && !POINTER_TYPE_P (typea))
6860 return NULL_TREE;
6861
6862 if (TREE_CODE (ineq) == LT_EXPR)
6863 {
6864 a1 = TREE_OPERAND (ineq, 1);
6865 y = TREE_OPERAND (ineq, 0);
6866 }
6867 else if (TREE_CODE (ineq) == GT_EXPR)
6868 {
6869 a1 = TREE_OPERAND (ineq, 0);
6870 y = TREE_OPERAND (ineq, 1);
6871 }
6872 else
6873 return NULL_TREE;
6874
6875 if (TREE_TYPE (a1) != typea)
6876 return NULL_TREE;
6877
6878 if (POINTER_TYPE_P (typea))
6879 {
6880 /* Convert the pointer types into integer before taking the difference. */
6881 tree ta = fold_convert_loc (loc, ssizetype, a);
6882 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
6883 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
6884 }
6885 else
6886 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
6887
6888 if (!diff || !integer_onep (diff))
6889 return NULL_TREE;
6890
6891 return fold_build2_loc (loc, GE_EXPR, type, a, y);
6892 }
6893
6894 /* Fold a sum or difference of at least one multiplication.
6895 Returns the folded tree or NULL if no simplification could be made. */
6896
6897 static tree
6898 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
6899 tree arg0, tree arg1)
6900 {
6901 tree arg00, arg01, arg10, arg11;
6902 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6903
6904 /* (A * C) +- (B * C) -> (A+-B) * C.
6905 (A * C) +- A -> A * (C+-1).
6906 We are most concerned about the case where C is a constant,
6907 but other combinations show up during loop reduction. Since
6908 it is not difficult, try all four possibilities. */
6909
6910 if (TREE_CODE (arg0) == MULT_EXPR)
6911 {
6912 arg00 = TREE_OPERAND (arg0, 0);
6913 arg01 = TREE_OPERAND (arg0, 1);
6914 }
6915 else if (TREE_CODE (arg0) == INTEGER_CST)
6916 {
6917 arg00 = build_one_cst (type);
6918 arg01 = arg0;
6919 }
6920 else
6921 {
6922 /* We cannot generate constant 1 for fract. */
6923 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6924 return NULL_TREE;
6925 arg00 = arg0;
6926 arg01 = build_one_cst (type);
6927 }
6928 if (TREE_CODE (arg1) == MULT_EXPR)
6929 {
6930 arg10 = TREE_OPERAND (arg1, 0);
6931 arg11 = TREE_OPERAND (arg1, 1);
6932 }
6933 else if (TREE_CODE (arg1) == INTEGER_CST)
6934 {
6935 arg10 = build_one_cst (type);
6936 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
6937 the purpose of this canonicalization. */
6938 if (wi::neg_p (arg1, TYPE_SIGN (TREE_TYPE (arg1)))
6939 && negate_expr_p (arg1)
6940 && code == PLUS_EXPR)
6941 {
6942 arg11 = negate_expr (arg1);
6943 code = MINUS_EXPR;
6944 }
6945 else
6946 arg11 = arg1;
6947 }
6948 else
6949 {
6950 /* We cannot generate constant 1 for fract. */
6951 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6952 return NULL_TREE;
6953 arg10 = arg1;
6954 arg11 = build_one_cst (type);
6955 }
6956 same = NULL_TREE;
6957
6958 if (operand_equal_p (arg01, arg11, 0))
6959 same = arg01, alt0 = arg00, alt1 = arg10;
6960 else if (operand_equal_p (arg00, arg10, 0))
6961 same = arg00, alt0 = arg01, alt1 = arg11;
6962 else if (operand_equal_p (arg00, arg11, 0))
6963 same = arg00, alt0 = arg01, alt1 = arg10;
6964 else if (operand_equal_p (arg01, arg10, 0))
6965 same = arg01, alt0 = arg00, alt1 = arg11;
6966
6967 /* No identical multiplicands; see if we can find a common
6968 power-of-two factor in non-power-of-two multiplies. This
6969 can help in multi-dimensional array access. */
6970 else if (tree_fits_shwi_p (arg01)
6971 && tree_fits_shwi_p (arg11))
6972 {
6973 HOST_WIDE_INT int01, int11, tmp;
6974 bool swap = false;
6975 tree maybe_same;
6976 int01 = tree_to_shwi (arg01);
6977 int11 = tree_to_shwi (arg11);
6978
6979 /* Move min of absolute values to int11. */
6980 if (absu_hwi (int01) < absu_hwi (int11))
6981 {
6982 tmp = int01, int01 = int11, int11 = tmp;
6983 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6984 maybe_same = arg01;
6985 swap = true;
6986 }
6987 else
6988 maybe_same = arg11;
6989
6990 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
6991 /* The remainder should not be a constant, otherwise we
6992 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
6993 increased the number of multiplications necessary. */
6994 && TREE_CODE (arg10) != INTEGER_CST)
6995 {
6996 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
6997 build_int_cst (TREE_TYPE (arg00),
6998 int01 / int11));
6999 alt1 = arg10;
7000 same = maybe_same;
7001 if (swap)
7002 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7003 }
7004 }
7005
7006 if (same)
7007 return fold_build2_loc (loc, MULT_EXPR, type,
7008 fold_build2_loc (loc, code, type,
7009 fold_convert_loc (loc, type, alt0),
7010 fold_convert_loc (loc, type, alt1)),
7011 fold_convert_loc (loc, type, same));
7012
7013 return NULL_TREE;
7014 }
7015
7016 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7017 specified by EXPR into the buffer PTR of length LEN bytes.
7018 Return the number of bytes placed in the buffer, or zero
7019 upon failure. */
7020
7021 static int
7022 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7023 {
7024 tree type = TREE_TYPE (expr);
7025 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7026 int byte, offset, word, words;
7027 unsigned char value;
7028
7029 if ((off == -1 && total_bytes > len)
7030 || off >= total_bytes)
7031 return 0;
7032 if (off == -1)
7033 off = 0;
7034 words = total_bytes / UNITS_PER_WORD;
7035
7036 for (byte = 0; byte < total_bytes; byte++)
7037 {
7038 int bitpos = byte * BITS_PER_UNIT;
7039 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7040 number of bytes. */
7041 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7042
7043 if (total_bytes > UNITS_PER_WORD)
7044 {
7045 word = byte / UNITS_PER_WORD;
7046 if (WORDS_BIG_ENDIAN)
7047 word = (words - 1) - word;
7048 offset = word * UNITS_PER_WORD;
7049 if (BYTES_BIG_ENDIAN)
7050 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7051 else
7052 offset += byte % UNITS_PER_WORD;
7053 }
7054 else
7055 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7056 if (offset >= off
7057 && offset - off < len)
7058 ptr[offset - off] = value;
7059 }
7060 return MIN (len, total_bytes - off);
7061 }
7062
7063
7064 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7065 specified by EXPR into the buffer PTR of length LEN bytes.
7066 Return the number of bytes placed in the buffer, or zero
7067 upon failure. */
7068
7069 static int
7070 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7071 {
7072 tree type = TREE_TYPE (expr);
7073 machine_mode mode = TYPE_MODE (type);
7074 int total_bytes = GET_MODE_SIZE (mode);
7075 FIXED_VALUE_TYPE value;
7076 tree i_value, i_type;
7077
7078 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7079 return 0;
7080
7081 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7082
7083 if (NULL_TREE == i_type
7084 || TYPE_PRECISION (i_type) != total_bytes)
7085 return 0;
7086
7087 value = TREE_FIXED_CST (expr);
7088 i_value = double_int_to_tree (i_type, value.data);
7089
7090 return native_encode_int (i_value, ptr, len, off);
7091 }
7092
7093
7094 /* Subroutine of native_encode_expr. Encode the REAL_CST
7095 specified by EXPR into the buffer PTR of length LEN bytes.
7096 Return the number of bytes placed in the buffer, or zero
7097 upon failure. */
7098
7099 static int
7100 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7101 {
7102 tree type = TREE_TYPE (expr);
7103 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7104 int byte, offset, word, words, bitpos;
7105 unsigned char value;
7106
7107 /* There are always 32 bits in each long, no matter the size of
7108 the hosts long. We handle floating point representations with
7109 up to 192 bits. */
7110 long tmp[6];
7111
7112 if ((off == -1 && total_bytes > len)
7113 || off >= total_bytes)
7114 return 0;
7115 if (off == -1)
7116 off = 0;
7117 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7118
7119 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7120
7121 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7122 bitpos += BITS_PER_UNIT)
7123 {
7124 byte = (bitpos / BITS_PER_UNIT) & 3;
7125 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7126
7127 if (UNITS_PER_WORD < 4)
7128 {
7129 word = byte / UNITS_PER_WORD;
7130 if (WORDS_BIG_ENDIAN)
7131 word = (words - 1) - word;
7132 offset = word * UNITS_PER_WORD;
7133 if (BYTES_BIG_ENDIAN)
7134 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7135 else
7136 offset += byte % UNITS_PER_WORD;
7137 }
7138 else
7139 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7140 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7141 if (offset >= off
7142 && offset - off < len)
7143 ptr[offset - off] = value;
7144 }
7145 return MIN (len, total_bytes - off);
7146 }
7147
7148 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7149 specified by EXPR into the buffer PTR of length LEN bytes.
7150 Return the number of bytes placed in the buffer, or zero
7151 upon failure. */
7152
7153 static int
7154 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7155 {
7156 int rsize, isize;
7157 tree part;
7158
7159 part = TREE_REALPART (expr);
7160 rsize = native_encode_expr (part, ptr, len, off);
7161 if (off == -1
7162 && rsize == 0)
7163 return 0;
7164 part = TREE_IMAGPART (expr);
7165 if (off != -1)
7166 off = MAX (0, off - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part))));
7167 isize = native_encode_expr (part, ptr+rsize, len-rsize, off);
7168 if (off == -1
7169 && isize != rsize)
7170 return 0;
7171 return rsize + isize;
7172 }
7173
7174
7175 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7176 specified by EXPR into the buffer PTR of length LEN bytes.
7177 Return the number of bytes placed in the buffer, or zero
7178 upon failure. */
7179
7180 static int
7181 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7182 {
7183 unsigned i, count;
7184 int size, offset;
7185 tree itype, elem;
7186
7187 offset = 0;
7188 count = VECTOR_CST_NELTS (expr);
7189 itype = TREE_TYPE (TREE_TYPE (expr));
7190 size = GET_MODE_SIZE (TYPE_MODE (itype));
7191 for (i = 0; i < count; i++)
7192 {
7193 if (off >= size)
7194 {
7195 off -= size;
7196 continue;
7197 }
7198 elem = VECTOR_CST_ELT (expr, i);
7199 int res = native_encode_expr (elem, ptr+offset, len-offset, off);
7200 if ((off == -1 && res != size)
7201 || res == 0)
7202 return 0;
7203 offset += res;
7204 if (offset >= len)
7205 return offset;
7206 if (off != -1)
7207 off = 0;
7208 }
7209 return offset;
7210 }
7211
7212
7213 /* Subroutine of native_encode_expr. Encode the STRING_CST
7214 specified by EXPR into the buffer PTR of length LEN bytes.
7215 Return the number of bytes placed in the buffer, or zero
7216 upon failure. */
7217
7218 static int
7219 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7220 {
7221 tree type = TREE_TYPE (expr);
7222 HOST_WIDE_INT total_bytes;
7223
7224 if (TREE_CODE (type) != ARRAY_TYPE
7225 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7226 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7227 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7228 return 0;
7229 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7230 if ((off == -1 && total_bytes > len)
7231 || off >= total_bytes)
7232 return 0;
7233 if (off == -1)
7234 off = 0;
7235 if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7236 {
7237 int written = 0;
7238 if (off < TREE_STRING_LENGTH (expr))
7239 {
7240 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7241 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7242 }
7243 memset (ptr + written, 0,
7244 MIN (total_bytes - written, len - written));
7245 }
7246 else
7247 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7248 return MIN (total_bytes - off, len);
7249 }
7250
7251
7252 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7253 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7254 buffer PTR of length LEN bytes. If OFF is not -1 then start
7255 the encoding at byte offset OFF and encode at most LEN bytes.
7256 Return the number of bytes placed in the buffer, or zero upon failure. */
7257
7258 int
7259 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7260 {
7261 /* We don't support starting at negative offset and -1 is special. */
7262 if (off < -1)
7263 return 0;
7264
7265 switch (TREE_CODE (expr))
7266 {
7267 case INTEGER_CST:
7268 return native_encode_int (expr, ptr, len, off);
7269
7270 case REAL_CST:
7271 return native_encode_real (expr, ptr, len, off);
7272
7273 case FIXED_CST:
7274 return native_encode_fixed (expr, ptr, len, off);
7275
7276 case COMPLEX_CST:
7277 return native_encode_complex (expr, ptr, len, off);
7278
7279 case VECTOR_CST:
7280 return native_encode_vector (expr, ptr, len, off);
7281
7282 case STRING_CST:
7283 return native_encode_string (expr, ptr, len, off);
7284
7285 default:
7286 return 0;
7287 }
7288 }
7289
7290
7291 /* Subroutine of native_interpret_expr. Interpret the contents of
7292 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7293 If the buffer cannot be interpreted, return NULL_TREE. */
7294
7295 static tree
7296 native_interpret_int (tree type, const unsigned char *ptr, int len)
7297 {
7298 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7299
7300 if (total_bytes > len
7301 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7302 return NULL_TREE;
7303
7304 wide_int result = wi::from_buffer (ptr, total_bytes);
7305
7306 return wide_int_to_tree (type, result);
7307 }
7308
7309
7310 /* Subroutine of native_interpret_expr. Interpret the contents of
7311 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7312 If the buffer cannot be interpreted, return NULL_TREE. */
7313
7314 static tree
7315 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7316 {
7317 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7318 double_int result;
7319 FIXED_VALUE_TYPE fixed_value;
7320
7321 if (total_bytes > len
7322 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7323 return NULL_TREE;
7324
7325 result = double_int::from_buffer (ptr, total_bytes);
7326 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7327
7328 return build_fixed (type, fixed_value);
7329 }
7330
7331
7332 /* Subroutine of native_interpret_expr. Interpret the contents of
7333 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7334 If the buffer cannot be interpreted, return NULL_TREE. */
7335
7336 static tree
7337 native_interpret_real (tree type, const unsigned char *ptr, int len)
7338 {
7339 machine_mode mode = TYPE_MODE (type);
7340 int total_bytes = GET_MODE_SIZE (mode);
7341 unsigned char value;
7342 /* There are always 32 bits in each long, no matter the size of
7343 the hosts long. We handle floating point representations with
7344 up to 192 bits. */
7345 REAL_VALUE_TYPE r;
7346 long tmp[6];
7347
7348 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7349 if (total_bytes > len || total_bytes > 24)
7350 return NULL_TREE;
7351 int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7352
7353 memset (tmp, 0, sizeof (tmp));
7354 for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7355 bitpos += BITS_PER_UNIT)
7356 {
7357 /* Both OFFSET and BYTE index within a long;
7358 bitpos indexes the whole float. */
7359 int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
7360 if (UNITS_PER_WORD < 4)
7361 {
7362 int word = byte / UNITS_PER_WORD;
7363 if (WORDS_BIG_ENDIAN)
7364 word = (words - 1) - word;
7365 offset = word * UNITS_PER_WORD;
7366 if (BYTES_BIG_ENDIAN)
7367 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7368 else
7369 offset += byte % UNITS_PER_WORD;
7370 }
7371 else
7372 {
7373 offset = byte;
7374 if (BYTES_BIG_ENDIAN)
7375 {
7376 /* Reverse bytes within each long, or within the entire float
7377 if it's smaller than a long (for HFmode). */
7378 offset = MIN (3, total_bytes - 1) - offset;
7379 gcc_assert (offset >= 0);
7380 }
7381 }
7382 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7383
7384 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7385 }
7386
7387 real_from_target (&r, tmp, mode);
7388 return build_real (type, r);
7389 }
7390
7391
7392 /* Subroutine of native_interpret_expr. Interpret the contents of
7393 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7394 If the buffer cannot be interpreted, return NULL_TREE. */
7395
7396 static tree
7397 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7398 {
7399 tree etype, rpart, ipart;
7400 int size;
7401
7402 etype = TREE_TYPE (type);
7403 size = GET_MODE_SIZE (TYPE_MODE (etype));
7404 if (size * 2 > len)
7405 return NULL_TREE;
7406 rpart = native_interpret_expr (etype, ptr, size);
7407 if (!rpart)
7408 return NULL_TREE;
7409 ipart = native_interpret_expr (etype, ptr+size, size);
7410 if (!ipart)
7411 return NULL_TREE;
7412 return build_complex (type, rpart, ipart);
7413 }
7414
7415
7416 /* Subroutine of native_interpret_expr. Interpret the contents of
7417 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7418 If the buffer cannot be interpreted, return NULL_TREE. */
7419
7420 static tree
7421 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7422 {
7423 tree etype, elem;
7424 int i, size, count;
7425 tree *elements;
7426
7427 etype = TREE_TYPE (type);
7428 size = GET_MODE_SIZE (TYPE_MODE (etype));
7429 count = TYPE_VECTOR_SUBPARTS (type);
7430 if (size * count > len)
7431 return NULL_TREE;
7432
7433 elements = XALLOCAVEC (tree, count);
7434 for (i = count - 1; i >= 0; i--)
7435 {
7436 elem = native_interpret_expr (etype, ptr+(i*size), size);
7437 if (!elem)
7438 return NULL_TREE;
7439 elements[i] = elem;
7440 }
7441 return build_vector (type, elements);
7442 }
7443
7444
7445 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7446 the buffer PTR of length LEN as a constant of type TYPE. For
7447 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7448 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7449 return NULL_TREE. */
7450
7451 tree
7452 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7453 {
7454 switch (TREE_CODE (type))
7455 {
7456 case INTEGER_TYPE:
7457 case ENUMERAL_TYPE:
7458 case BOOLEAN_TYPE:
7459 case POINTER_TYPE:
7460 case REFERENCE_TYPE:
7461 return native_interpret_int (type, ptr, len);
7462
7463 case REAL_TYPE:
7464 return native_interpret_real (type, ptr, len);
7465
7466 case FIXED_POINT_TYPE:
7467 return native_interpret_fixed (type, ptr, len);
7468
7469 case COMPLEX_TYPE:
7470 return native_interpret_complex (type, ptr, len);
7471
7472 case VECTOR_TYPE:
7473 return native_interpret_vector (type, ptr, len);
7474
7475 default:
7476 return NULL_TREE;
7477 }
7478 }
7479
7480 /* Returns true if we can interpret the contents of a native encoding
7481 as TYPE. */
7482
7483 static bool
7484 can_native_interpret_type_p (tree type)
7485 {
7486 switch (TREE_CODE (type))
7487 {
7488 case INTEGER_TYPE:
7489 case ENUMERAL_TYPE:
7490 case BOOLEAN_TYPE:
7491 case POINTER_TYPE:
7492 case REFERENCE_TYPE:
7493 case FIXED_POINT_TYPE:
7494 case REAL_TYPE:
7495 case COMPLEX_TYPE:
7496 case VECTOR_TYPE:
7497 return true;
7498 default:
7499 return false;
7500 }
7501 }
7502
7503 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7504 TYPE at compile-time. If we're unable to perform the conversion
7505 return NULL_TREE. */
7506
7507 static tree
7508 fold_view_convert_expr (tree type, tree expr)
7509 {
7510 /* We support up to 512-bit values (for V8DFmode). */
7511 unsigned char buffer[64];
7512 int len;
7513
7514 /* Check that the host and target are sane. */
7515 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7516 return NULL_TREE;
7517
7518 len = native_encode_expr (expr, buffer, sizeof (buffer));
7519 if (len == 0)
7520 return NULL_TREE;
7521
7522 return native_interpret_expr (type, buffer, len);
7523 }
7524
7525 /* Build an expression for the address of T. Folds away INDIRECT_REF
7526 to avoid confusing the gimplify process. */
7527
7528 tree
7529 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7530 {
7531 /* The size of the object is not relevant when talking about its address. */
7532 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7533 t = TREE_OPERAND (t, 0);
7534
7535 if (TREE_CODE (t) == INDIRECT_REF)
7536 {
7537 t = TREE_OPERAND (t, 0);
7538
7539 if (TREE_TYPE (t) != ptrtype)
7540 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7541 }
7542 else if (TREE_CODE (t) == MEM_REF
7543 && integer_zerop (TREE_OPERAND (t, 1)))
7544 return TREE_OPERAND (t, 0);
7545 else if (TREE_CODE (t) == MEM_REF
7546 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7547 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7548 TREE_OPERAND (t, 0),
7549 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7550 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7551 {
7552 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7553
7554 if (TREE_TYPE (t) != ptrtype)
7555 t = fold_convert_loc (loc, ptrtype, t);
7556 }
7557 else
7558 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7559
7560 return t;
7561 }
7562
7563 /* Build an expression for the address of T. */
7564
7565 tree
7566 build_fold_addr_expr_loc (location_t loc, tree t)
7567 {
7568 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7569
7570 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7571 }
7572
7573 /* Fold a unary expression of code CODE and type TYPE with operand
7574 OP0. Return the folded expression if folding is successful.
7575 Otherwise, return NULL_TREE. */
7576
7577 tree
7578 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7579 {
7580 tree tem;
7581 tree arg0;
7582 enum tree_code_class kind = TREE_CODE_CLASS (code);
7583
7584 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7585 && TREE_CODE_LENGTH (code) == 1);
7586
7587 arg0 = op0;
7588 if (arg0)
7589 {
7590 if (CONVERT_EXPR_CODE_P (code)
7591 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7592 {
7593 /* Don't use STRIP_NOPS, because signedness of argument type
7594 matters. */
7595 STRIP_SIGN_NOPS (arg0);
7596 }
7597 else
7598 {
7599 /* Strip any conversions that don't change the mode. This
7600 is safe for every expression, except for a comparison
7601 expression because its signedness is derived from its
7602 operands.
7603
7604 Note that this is done as an internal manipulation within
7605 the constant folder, in order to find the simplest
7606 representation of the arguments so that their form can be
7607 studied. In any cases, the appropriate type conversions
7608 should be put back in the tree that will get out of the
7609 constant folder. */
7610 STRIP_NOPS (arg0);
7611 }
7612
7613 if (CONSTANT_CLASS_P (arg0))
7614 {
7615 tree tem = const_unop (code, type, arg0);
7616 if (tem)
7617 {
7618 if (TREE_TYPE (tem) != type)
7619 tem = fold_convert_loc (loc, type, tem);
7620 return tem;
7621 }
7622 }
7623 }
7624
7625 tem = generic_simplify (loc, code, type, op0);
7626 if (tem)
7627 return tem;
7628
7629 if (TREE_CODE_CLASS (code) == tcc_unary)
7630 {
7631 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7632 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7633 fold_build1_loc (loc, code, type,
7634 fold_convert_loc (loc, TREE_TYPE (op0),
7635 TREE_OPERAND (arg0, 1))));
7636 else if (TREE_CODE (arg0) == COND_EXPR)
7637 {
7638 tree arg01 = TREE_OPERAND (arg0, 1);
7639 tree arg02 = TREE_OPERAND (arg0, 2);
7640 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7641 arg01 = fold_build1_loc (loc, code, type,
7642 fold_convert_loc (loc,
7643 TREE_TYPE (op0), arg01));
7644 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7645 arg02 = fold_build1_loc (loc, code, type,
7646 fold_convert_loc (loc,
7647 TREE_TYPE (op0), arg02));
7648 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7649 arg01, arg02);
7650
7651 /* If this was a conversion, and all we did was to move into
7652 inside the COND_EXPR, bring it back out. But leave it if
7653 it is a conversion from integer to integer and the
7654 result precision is no wider than a word since such a
7655 conversion is cheap and may be optimized away by combine,
7656 while it couldn't if it were outside the COND_EXPR. Then return
7657 so we don't get into an infinite recursion loop taking the
7658 conversion out and then back in. */
7659
7660 if ((CONVERT_EXPR_CODE_P (code)
7661 || code == NON_LVALUE_EXPR)
7662 && TREE_CODE (tem) == COND_EXPR
7663 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7664 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7665 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7666 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7667 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7668 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7669 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7670 && (INTEGRAL_TYPE_P
7671 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7672 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7673 || flag_syntax_only))
7674 tem = build1_loc (loc, code, type,
7675 build3 (COND_EXPR,
7676 TREE_TYPE (TREE_OPERAND
7677 (TREE_OPERAND (tem, 1), 0)),
7678 TREE_OPERAND (tem, 0),
7679 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7680 TREE_OPERAND (TREE_OPERAND (tem, 2),
7681 0)));
7682 return tem;
7683 }
7684 }
7685
7686 switch (code)
7687 {
7688 case NON_LVALUE_EXPR:
7689 if (!maybe_lvalue_p (op0))
7690 return fold_convert_loc (loc, type, op0);
7691 return NULL_TREE;
7692
7693 CASE_CONVERT:
7694 case FLOAT_EXPR:
7695 case FIX_TRUNC_EXPR:
7696 if (COMPARISON_CLASS_P (op0))
7697 {
7698 /* If we have (type) (a CMP b) and type is an integral type, return
7699 new expression involving the new type. Canonicalize
7700 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7701 non-integral type.
7702 Do not fold the result as that would not simplify further, also
7703 folding again results in recursions. */
7704 if (TREE_CODE (type) == BOOLEAN_TYPE)
7705 return build2_loc (loc, TREE_CODE (op0), type,
7706 TREE_OPERAND (op0, 0),
7707 TREE_OPERAND (op0, 1));
7708 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7709 && TREE_CODE (type) != VECTOR_TYPE)
7710 return build3_loc (loc, COND_EXPR, type, op0,
7711 constant_boolean_node (true, type),
7712 constant_boolean_node (false, type));
7713 }
7714
7715 /* Handle (T *)&A.B.C for A being of type T and B and C
7716 living at offset zero. This occurs frequently in
7717 C++ upcasting and then accessing the base. */
7718 if (TREE_CODE (op0) == ADDR_EXPR
7719 && POINTER_TYPE_P (type)
7720 && handled_component_p (TREE_OPERAND (op0, 0)))
7721 {
7722 HOST_WIDE_INT bitsize, bitpos;
7723 tree offset;
7724 machine_mode mode;
7725 int unsignedp, reversep, volatilep;
7726 tree base
7727 = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
7728 &offset, &mode, &unsignedp, &reversep,
7729 &volatilep, false);
7730 /* If the reference was to a (constant) zero offset, we can use
7731 the address of the base if it has the same base type
7732 as the result type and the pointer type is unqualified. */
7733 if (! offset && bitpos == 0
7734 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7735 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7736 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7737 return fold_convert_loc (loc, type,
7738 build_fold_addr_expr_loc (loc, base));
7739 }
7740
7741 if (TREE_CODE (op0) == MODIFY_EXPR
7742 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7743 /* Detect assigning a bitfield. */
7744 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7745 && DECL_BIT_FIELD
7746 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7747 {
7748 /* Don't leave an assignment inside a conversion
7749 unless assigning a bitfield. */
7750 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7751 /* First do the assignment, then return converted constant. */
7752 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7753 TREE_NO_WARNING (tem) = 1;
7754 TREE_USED (tem) = 1;
7755 return tem;
7756 }
7757
7758 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7759 constants (if x has signed type, the sign bit cannot be set
7760 in c). This folds extension into the BIT_AND_EXPR.
7761 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7762 very likely don't have maximal range for their precision and this
7763 transformation effectively doesn't preserve non-maximal ranges. */
7764 if (TREE_CODE (type) == INTEGER_TYPE
7765 && TREE_CODE (op0) == BIT_AND_EXPR
7766 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7767 {
7768 tree and_expr = op0;
7769 tree and0 = TREE_OPERAND (and_expr, 0);
7770 tree and1 = TREE_OPERAND (and_expr, 1);
7771 int change = 0;
7772
7773 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7774 || (TYPE_PRECISION (type)
7775 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7776 change = 1;
7777 else if (TYPE_PRECISION (TREE_TYPE (and1))
7778 <= HOST_BITS_PER_WIDE_INT
7779 && tree_fits_uhwi_p (and1))
7780 {
7781 unsigned HOST_WIDE_INT cst;
7782
7783 cst = tree_to_uhwi (and1);
7784 cst &= HOST_WIDE_INT_M1U
7785 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7786 change = (cst == 0);
7787 if (change
7788 && !flag_syntax_only
7789 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7790 == ZERO_EXTEND))
7791 {
7792 tree uns = unsigned_type_for (TREE_TYPE (and0));
7793 and0 = fold_convert_loc (loc, uns, and0);
7794 and1 = fold_convert_loc (loc, uns, and1);
7795 }
7796 }
7797 if (change)
7798 {
7799 tem = force_fit_type (type, wi::to_widest (and1), 0,
7800 TREE_OVERFLOW (and1));
7801 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7802 fold_convert_loc (loc, type, and0), tem);
7803 }
7804 }
7805
7806 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
7807 cast (T1)X will fold away. We assume that this happens when X itself
7808 is a cast. */
7809 if (POINTER_TYPE_P (type)
7810 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7811 && CONVERT_EXPR_P (TREE_OPERAND (arg0, 0)))
7812 {
7813 tree arg00 = TREE_OPERAND (arg0, 0);
7814 tree arg01 = TREE_OPERAND (arg0, 1);
7815
7816 return fold_build_pointer_plus_loc
7817 (loc, fold_convert_loc (loc, type, arg00), arg01);
7818 }
7819
7820 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7821 of the same precision, and X is an integer type not narrower than
7822 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7823 if (INTEGRAL_TYPE_P (type)
7824 && TREE_CODE (op0) == BIT_NOT_EXPR
7825 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7826 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7827 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7828 {
7829 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7830 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7831 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7832 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7833 fold_convert_loc (loc, type, tem));
7834 }
7835
7836 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7837 type of X and Y (integer types only). */
7838 if (INTEGRAL_TYPE_P (type)
7839 && TREE_CODE (op0) == MULT_EXPR
7840 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7841 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7842 {
7843 /* Be careful not to introduce new overflows. */
7844 tree mult_type;
7845 if (TYPE_OVERFLOW_WRAPS (type))
7846 mult_type = type;
7847 else
7848 mult_type = unsigned_type_for (type);
7849
7850 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7851 {
7852 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7853 fold_convert_loc (loc, mult_type,
7854 TREE_OPERAND (op0, 0)),
7855 fold_convert_loc (loc, mult_type,
7856 TREE_OPERAND (op0, 1)));
7857 return fold_convert_loc (loc, type, tem);
7858 }
7859 }
7860
7861 return NULL_TREE;
7862
7863 case VIEW_CONVERT_EXPR:
7864 if (TREE_CODE (op0) == MEM_REF)
7865 {
7866 tem = fold_build2_loc (loc, MEM_REF, type,
7867 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
7868 REF_REVERSE_STORAGE_ORDER (tem) = REF_REVERSE_STORAGE_ORDER (op0);
7869 return tem;
7870 }
7871
7872 return NULL_TREE;
7873
7874 case NEGATE_EXPR:
7875 tem = fold_negate_expr (loc, arg0);
7876 if (tem)
7877 return fold_convert_loc (loc, type, tem);
7878 return NULL_TREE;
7879
7880 case ABS_EXPR:
7881 /* Convert fabs((double)float) into (double)fabsf(float). */
7882 if (TREE_CODE (arg0) == NOP_EXPR
7883 && TREE_CODE (type) == REAL_TYPE)
7884 {
7885 tree targ0 = strip_float_extensions (arg0);
7886 if (targ0 != arg0)
7887 return fold_convert_loc (loc, type,
7888 fold_build1_loc (loc, ABS_EXPR,
7889 TREE_TYPE (targ0),
7890 targ0));
7891 }
7892 return NULL_TREE;
7893
7894 case BIT_NOT_EXPR:
7895 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7896 if (TREE_CODE (arg0) == BIT_XOR_EXPR
7897 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7898 fold_convert_loc (loc, type,
7899 TREE_OPERAND (arg0, 0)))))
7900 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
7901 fold_convert_loc (loc, type,
7902 TREE_OPERAND (arg0, 1)));
7903 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7904 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7905 fold_convert_loc (loc, type,
7906 TREE_OPERAND (arg0, 1)))))
7907 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
7908 fold_convert_loc (loc, type,
7909 TREE_OPERAND (arg0, 0)), tem);
7910
7911 return NULL_TREE;
7912
7913 case TRUTH_NOT_EXPR:
7914 /* Note that the operand of this must be an int
7915 and its values must be 0 or 1.
7916 ("true" is a fixed value perhaps depending on the language,
7917 but we don't handle values other than 1 correctly yet.) */
7918 tem = fold_truth_not_expr (loc, arg0);
7919 if (!tem)
7920 return NULL_TREE;
7921 return fold_convert_loc (loc, type, tem);
7922
7923 case INDIRECT_REF:
7924 /* Fold *&X to X if X is an lvalue. */
7925 if (TREE_CODE (op0) == ADDR_EXPR)
7926 {
7927 tree op00 = TREE_OPERAND (op0, 0);
7928 if ((TREE_CODE (op00) == VAR_DECL
7929 || TREE_CODE (op00) == PARM_DECL
7930 || TREE_CODE (op00) == RESULT_DECL)
7931 && !TREE_READONLY (op00))
7932 return op00;
7933 }
7934 return NULL_TREE;
7935
7936 default:
7937 return NULL_TREE;
7938 } /* switch (code) */
7939 }
7940
7941
7942 /* If the operation was a conversion do _not_ mark a resulting constant
7943 with TREE_OVERFLOW if the original constant was not. These conversions
7944 have implementation defined behavior and retaining the TREE_OVERFLOW
7945 flag here would confuse later passes such as VRP. */
7946 tree
7947 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
7948 tree type, tree op0)
7949 {
7950 tree res = fold_unary_loc (loc, code, type, op0);
7951 if (res
7952 && TREE_CODE (res) == INTEGER_CST
7953 && TREE_CODE (op0) == INTEGER_CST
7954 && CONVERT_EXPR_CODE_P (code))
7955 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
7956
7957 return res;
7958 }
7959
7960 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
7961 operands OP0 and OP1. LOC is the location of the resulting expression.
7962 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
7963 Return the folded expression if folding is successful. Otherwise,
7964 return NULL_TREE. */
7965 static tree
7966 fold_truth_andor (location_t loc, enum tree_code code, tree type,
7967 tree arg0, tree arg1, tree op0, tree op1)
7968 {
7969 tree tem;
7970
7971 /* We only do these simplifications if we are optimizing. */
7972 if (!optimize)
7973 return NULL_TREE;
7974
7975 /* Check for things like (A || B) && (A || C). We can convert this
7976 to A || (B && C). Note that either operator can be any of the four
7977 truth and/or operations and the transformation will still be
7978 valid. Also note that we only care about order for the
7979 ANDIF and ORIF operators. If B contains side effects, this
7980 might change the truth-value of A. */
7981 if (TREE_CODE (arg0) == TREE_CODE (arg1)
7982 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
7983 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
7984 || TREE_CODE (arg0) == TRUTH_AND_EXPR
7985 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
7986 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
7987 {
7988 tree a00 = TREE_OPERAND (arg0, 0);
7989 tree a01 = TREE_OPERAND (arg0, 1);
7990 tree a10 = TREE_OPERAND (arg1, 0);
7991 tree a11 = TREE_OPERAND (arg1, 1);
7992 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
7993 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
7994 && (code == TRUTH_AND_EXPR
7995 || code == TRUTH_OR_EXPR));
7996
7997 if (operand_equal_p (a00, a10, 0))
7998 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
7999 fold_build2_loc (loc, code, type, a01, a11));
8000 else if (commutative && operand_equal_p (a00, a11, 0))
8001 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8002 fold_build2_loc (loc, code, type, a01, a10));
8003 else if (commutative && operand_equal_p (a01, a10, 0))
8004 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8005 fold_build2_loc (loc, code, type, a00, a11));
8006
8007 /* This case if tricky because we must either have commutative
8008 operators or else A10 must not have side-effects. */
8009
8010 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8011 && operand_equal_p (a01, a11, 0))
8012 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8013 fold_build2_loc (loc, code, type, a00, a10),
8014 a01);
8015 }
8016
8017 /* See if we can build a range comparison. */
8018 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8019 return tem;
8020
8021 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8022 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8023 {
8024 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8025 if (tem)
8026 return fold_build2_loc (loc, code, type, tem, arg1);
8027 }
8028
8029 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8030 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8031 {
8032 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8033 if (tem)
8034 return fold_build2_loc (loc, code, type, arg0, tem);
8035 }
8036
8037 /* Check for the possibility of merging component references. If our
8038 lhs is another similar operation, try to merge its rhs with our
8039 rhs. Then try to merge our lhs and rhs. */
8040 if (TREE_CODE (arg0) == code
8041 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8042 TREE_OPERAND (arg0, 1), arg1)))
8043 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8044
8045 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8046 return tem;
8047
8048 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8049 && (code == TRUTH_AND_EXPR
8050 || code == TRUTH_ANDIF_EXPR
8051 || code == TRUTH_OR_EXPR
8052 || code == TRUTH_ORIF_EXPR))
8053 {
8054 enum tree_code ncode, icode;
8055
8056 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8057 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8058 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8059
8060 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8061 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8062 We don't want to pack more than two leafs to a non-IF AND/OR
8063 expression.
8064 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8065 equal to IF-CODE, then we don't want to add right-hand operand.
8066 If the inner right-hand side of left-hand operand has
8067 side-effects, or isn't simple, then we can't add to it,
8068 as otherwise we might destroy if-sequence. */
8069 if (TREE_CODE (arg0) == icode
8070 && simple_operand_p_2 (arg1)
8071 /* Needed for sequence points to handle trappings, and
8072 side-effects. */
8073 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8074 {
8075 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8076 arg1);
8077 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8078 tem);
8079 }
8080 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8081 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8082 else if (TREE_CODE (arg1) == icode
8083 && simple_operand_p_2 (arg0)
8084 /* Needed for sequence points to handle trappings, and
8085 side-effects. */
8086 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8087 {
8088 tem = fold_build2_loc (loc, ncode, type,
8089 arg0, TREE_OPERAND (arg1, 0));
8090 return fold_build2_loc (loc, icode, type, tem,
8091 TREE_OPERAND (arg1, 1));
8092 }
8093 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8094 into (A OR B).
8095 For sequence point consistancy, we need to check for trapping,
8096 and side-effects. */
8097 else if (code == icode && simple_operand_p_2 (arg0)
8098 && simple_operand_p_2 (arg1))
8099 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8100 }
8101
8102 return NULL_TREE;
8103 }
8104
8105 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8106 by changing CODE to reduce the magnitude of constants involved in
8107 ARG0 of the comparison.
8108 Returns a canonicalized comparison tree if a simplification was
8109 possible, otherwise returns NULL_TREE.
8110 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8111 valid if signed overflow is undefined. */
8112
8113 static tree
8114 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8115 tree arg0, tree arg1,
8116 bool *strict_overflow_p)
8117 {
8118 enum tree_code code0 = TREE_CODE (arg0);
8119 tree t, cst0 = NULL_TREE;
8120 int sgn0;
8121
8122 /* Match A +- CST code arg1. We can change this only if overflow
8123 is undefined. */
8124 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8125 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8126 /* In principle pointers also have undefined overflow behavior,
8127 but that causes problems elsewhere. */
8128 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8129 && (code0 == MINUS_EXPR
8130 || code0 == PLUS_EXPR)
8131 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
8132 return NULL_TREE;
8133
8134 /* Identify the constant in arg0 and its sign. */
8135 cst0 = TREE_OPERAND (arg0, 1);
8136 sgn0 = tree_int_cst_sgn (cst0);
8137
8138 /* Overflowed constants and zero will cause problems. */
8139 if (integer_zerop (cst0)
8140 || TREE_OVERFLOW (cst0))
8141 return NULL_TREE;
8142
8143 /* See if we can reduce the magnitude of the constant in
8144 arg0 by changing the comparison code. */
8145 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8146 if (code == LT_EXPR
8147 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8148 code = LE_EXPR;
8149 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8150 else if (code == GT_EXPR
8151 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8152 code = GE_EXPR;
8153 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8154 else if (code == LE_EXPR
8155 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8156 code = LT_EXPR;
8157 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8158 else if (code == GE_EXPR
8159 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8160 code = GT_EXPR;
8161 else
8162 return NULL_TREE;
8163 *strict_overflow_p = true;
8164
8165 /* Now build the constant reduced in magnitude. But not if that
8166 would produce one outside of its types range. */
8167 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8168 && ((sgn0 == 1
8169 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8170 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8171 || (sgn0 == -1
8172 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8173 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8174 return NULL_TREE;
8175
8176 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8177 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8178 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8179 t = fold_convert (TREE_TYPE (arg1), t);
8180
8181 return fold_build2_loc (loc, code, type, t, arg1);
8182 }
8183
8184 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8185 overflow further. Try to decrease the magnitude of constants involved
8186 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8187 and put sole constants at the second argument position.
8188 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8189
8190 static tree
8191 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8192 tree arg0, tree arg1)
8193 {
8194 tree t;
8195 bool strict_overflow_p;
8196 const char * const warnmsg = G_("assuming signed overflow does not occur "
8197 "when reducing constant in comparison");
8198
8199 /* Try canonicalization by simplifying arg0. */
8200 strict_overflow_p = false;
8201 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8202 &strict_overflow_p);
8203 if (t)
8204 {
8205 if (strict_overflow_p)
8206 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8207 return t;
8208 }
8209
8210 /* Try canonicalization by simplifying arg1 using the swapped
8211 comparison. */
8212 code = swap_tree_comparison (code);
8213 strict_overflow_p = false;
8214 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8215 &strict_overflow_p);
8216 if (t && strict_overflow_p)
8217 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8218 return t;
8219 }
8220
8221 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8222 space. This is used to avoid issuing overflow warnings for
8223 expressions like &p->x which can not wrap. */
8224
8225 static bool
8226 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8227 {
8228 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8229 return true;
8230
8231 if (bitpos < 0)
8232 return true;
8233
8234 wide_int wi_offset;
8235 int precision = TYPE_PRECISION (TREE_TYPE (base));
8236 if (offset == NULL_TREE)
8237 wi_offset = wi::zero (precision);
8238 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8239 return true;
8240 else
8241 wi_offset = offset;
8242
8243 bool overflow;
8244 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8245 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8246 if (overflow)
8247 return true;
8248
8249 if (!wi::fits_uhwi_p (total))
8250 return true;
8251
8252 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8253 if (size <= 0)
8254 return true;
8255
8256 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8257 array. */
8258 if (TREE_CODE (base) == ADDR_EXPR)
8259 {
8260 HOST_WIDE_INT base_size;
8261
8262 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8263 if (base_size > 0 && size < base_size)
8264 size = base_size;
8265 }
8266
8267 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8268 }
8269
8270 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8271 kind INTEGER_CST. This makes sure to properly sign-extend the
8272 constant. */
8273
8274 static HOST_WIDE_INT
8275 size_low_cst (const_tree t)
8276 {
8277 HOST_WIDE_INT w = TREE_INT_CST_ELT (t, 0);
8278 int prec = TYPE_PRECISION (TREE_TYPE (t));
8279 if (prec < HOST_BITS_PER_WIDE_INT)
8280 return sext_hwi (w, prec);
8281 return w;
8282 }
8283
8284 /* Subroutine of fold_binary. This routine performs all of the
8285 transformations that are common to the equality/inequality
8286 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8287 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8288 fold_binary should call fold_binary. Fold a comparison with
8289 tree code CODE and type TYPE with operands OP0 and OP1. Return
8290 the folded comparison or NULL_TREE. */
8291
8292 static tree
8293 fold_comparison (location_t loc, enum tree_code code, tree type,
8294 tree op0, tree op1)
8295 {
8296 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8297 tree arg0, arg1, tem;
8298
8299 arg0 = op0;
8300 arg1 = op1;
8301
8302 STRIP_SIGN_NOPS (arg0);
8303 STRIP_SIGN_NOPS (arg1);
8304
8305 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
8306 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8307 && (equality_code
8308 || (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8309 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
8310 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8311 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8312 && TREE_CODE (arg1) == INTEGER_CST
8313 && !TREE_OVERFLOW (arg1))
8314 {
8315 const enum tree_code
8316 reverse_op = TREE_CODE (arg0) == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR;
8317 tree const1 = TREE_OPERAND (arg0, 1);
8318 tree const2 = fold_convert_loc (loc, TREE_TYPE (const1), arg1);
8319 tree variable = TREE_OPERAND (arg0, 0);
8320 tree new_const = int_const_binop (reverse_op, const2, const1);
8321
8322 /* If the constant operation overflowed this can be
8323 simplified as a comparison against INT_MAX/INT_MIN. */
8324 if (TREE_OVERFLOW (new_const)
8325 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
8326 {
8327 int const1_sgn = tree_int_cst_sgn (const1);
8328 enum tree_code code2 = code;
8329
8330 /* Get the sign of the constant on the lhs if the
8331 operation were VARIABLE + CONST1. */
8332 if (TREE_CODE (arg0) == MINUS_EXPR)
8333 const1_sgn = -const1_sgn;
8334
8335 /* The sign of the constant determines if we overflowed
8336 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8337 Canonicalize to the INT_MIN overflow by swapping the comparison
8338 if necessary. */
8339 if (const1_sgn == -1)
8340 code2 = swap_tree_comparison (code);
8341
8342 /* We now can look at the canonicalized case
8343 VARIABLE + 1 CODE2 INT_MIN
8344 and decide on the result. */
8345 switch (code2)
8346 {
8347 case EQ_EXPR:
8348 case LT_EXPR:
8349 case LE_EXPR:
8350 return
8351 omit_one_operand_loc (loc, type, boolean_false_node, variable);
8352
8353 case NE_EXPR:
8354 case GE_EXPR:
8355 case GT_EXPR:
8356 return
8357 omit_one_operand_loc (loc, type, boolean_true_node, variable);
8358
8359 default:
8360 gcc_unreachable ();
8361 }
8362 }
8363 else
8364 {
8365 if (!equality_code)
8366 fold_overflow_warning ("assuming signed overflow does not occur "
8367 "when changing X +- C1 cmp C2 to "
8368 "X cmp C2 -+ C1",
8369 WARN_STRICT_OVERFLOW_COMPARISON);
8370 return fold_build2_loc (loc, code, type, variable, new_const);
8371 }
8372 }
8373
8374 /* For comparisons of pointers we can decompose it to a compile time
8375 comparison of the base objects and the offsets into the object.
8376 This requires at least one operand being an ADDR_EXPR or a
8377 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8378 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8379 && (TREE_CODE (arg0) == ADDR_EXPR
8380 || TREE_CODE (arg1) == ADDR_EXPR
8381 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8382 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8383 {
8384 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8385 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8386 machine_mode mode;
8387 int volatilep, reversep, unsignedp;
8388 bool indirect_base0 = false, indirect_base1 = false;
8389
8390 /* Get base and offset for the access. Strip ADDR_EXPR for
8391 get_inner_reference, but put it back by stripping INDIRECT_REF
8392 off the base object if possible. indirect_baseN will be true
8393 if baseN is not an address but refers to the object itself. */
8394 base0 = arg0;
8395 if (TREE_CODE (arg0) == ADDR_EXPR)
8396 {
8397 base0
8398 = get_inner_reference (TREE_OPERAND (arg0, 0),
8399 &bitsize, &bitpos0, &offset0, &mode,
8400 &unsignedp, &reversep, &volatilep, false);
8401 if (TREE_CODE (base0) == INDIRECT_REF)
8402 base0 = TREE_OPERAND (base0, 0);
8403 else
8404 indirect_base0 = true;
8405 }
8406 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8407 {
8408 base0 = TREE_OPERAND (arg0, 0);
8409 STRIP_SIGN_NOPS (base0);
8410 if (TREE_CODE (base0) == ADDR_EXPR)
8411 {
8412 base0 = TREE_OPERAND (base0, 0);
8413 indirect_base0 = true;
8414 }
8415 offset0 = TREE_OPERAND (arg0, 1);
8416 if (tree_fits_shwi_p (offset0))
8417 {
8418 HOST_WIDE_INT off = size_low_cst (offset0);
8419 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8420 * BITS_PER_UNIT)
8421 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8422 {
8423 bitpos0 = off * BITS_PER_UNIT;
8424 offset0 = NULL_TREE;
8425 }
8426 }
8427 }
8428
8429 base1 = arg1;
8430 if (TREE_CODE (arg1) == ADDR_EXPR)
8431 {
8432 base1
8433 = get_inner_reference (TREE_OPERAND (arg1, 0),
8434 &bitsize, &bitpos1, &offset1, &mode,
8435 &unsignedp, &reversep, &volatilep, false);
8436 if (TREE_CODE (base1) == INDIRECT_REF)
8437 base1 = TREE_OPERAND (base1, 0);
8438 else
8439 indirect_base1 = true;
8440 }
8441 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8442 {
8443 base1 = TREE_OPERAND (arg1, 0);
8444 STRIP_SIGN_NOPS (base1);
8445 if (TREE_CODE (base1) == ADDR_EXPR)
8446 {
8447 base1 = TREE_OPERAND (base1, 0);
8448 indirect_base1 = true;
8449 }
8450 offset1 = TREE_OPERAND (arg1, 1);
8451 if (tree_fits_shwi_p (offset1))
8452 {
8453 HOST_WIDE_INT off = size_low_cst (offset1);
8454 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8455 * BITS_PER_UNIT)
8456 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8457 {
8458 bitpos1 = off * BITS_PER_UNIT;
8459 offset1 = NULL_TREE;
8460 }
8461 }
8462 }
8463
8464 /* If we have equivalent bases we might be able to simplify. */
8465 if (indirect_base0 == indirect_base1
8466 && operand_equal_p (base0, base1,
8467 indirect_base0 ? OEP_ADDRESS_OF : 0))
8468 {
8469 /* We can fold this expression to a constant if the non-constant
8470 offset parts are equal. */
8471 if ((offset0 == offset1
8472 || (offset0 && offset1
8473 && operand_equal_p (offset0, offset1, 0)))
8474 && (code == EQ_EXPR
8475 || code == NE_EXPR
8476 || (indirect_base0 && DECL_P (base0))
8477 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8478
8479 {
8480 if (!equality_code
8481 && bitpos0 != bitpos1
8482 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8483 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8484 fold_overflow_warning (("assuming pointer wraparound does not "
8485 "occur when comparing P +- C1 with "
8486 "P +- C2"),
8487 WARN_STRICT_OVERFLOW_CONDITIONAL);
8488
8489 switch (code)
8490 {
8491 case EQ_EXPR:
8492 return constant_boolean_node (bitpos0 == bitpos1, type);
8493 case NE_EXPR:
8494 return constant_boolean_node (bitpos0 != bitpos1, type);
8495 case LT_EXPR:
8496 return constant_boolean_node (bitpos0 < bitpos1, type);
8497 case LE_EXPR:
8498 return constant_boolean_node (bitpos0 <= bitpos1, type);
8499 case GE_EXPR:
8500 return constant_boolean_node (bitpos0 >= bitpos1, type);
8501 case GT_EXPR:
8502 return constant_boolean_node (bitpos0 > bitpos1, type);
8503 default:;
8504 }
8505 }
8506 /* We can simplify the comparison to a comparison of the variable
8507 offset parts if the constant offset parts are equal.
8508 Be careful to use signed sizetype here because otherwise we
8509 mess with array offsets in the wrong way. This is possible
8510 because pointer arithmetic is restricted to retain within an
8511 object and overflow on pointer differences is undefined as of
8512 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8513 else if (bitpos0 == bitpos1
8514 && (equality_code
8515 || (indirect_base0 && DECL_P (base0))
8516 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8517 {
8518 /* By converting to signed sizetype we cover middle-end pointer
8519 arithmetic which operates on unsigned pointer types of size
8520 type size and ARRAY_REF offsets which are properly sign or
8521 zero extended from their type in case it is narrower than
8522 sizetype. */
8523 if (offset0 == NULL_TREE)
8524 offset0 = build_int_cst (ssizetype, 0);
8525 else
8526 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8527 if (offset1 == NULL_TREE)
8528 offset1 = build_int_cst (ssizetype, 0);
8529 else
8530 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8531
8532 if (!equality_code
8533 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8534 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8535 fold_overflow_warning (("assuming pointer wraparound does not "
8536 "occur when comparing P +- C1 with "
8537 "P +- C2"),
8538 WARN_STRICT_OVERFLOW_COMPARISON);
8539
8540 return fold_build2_loc (loc, code, type, offset0, offset1);
8541 }
8542 }
8543 /* For equal offsets we can simplify to a comparison of the
8544 base addresses. */
8545 else if (bitpos0 == bitpos1
8546 && (indirect_base0
8547 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8548 && (indirect_base1
8549 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8550 && ((offset0 == offset1)
8551 || (offset0 && offset1
8552 && operand_equal_p (offset0, offset1, 0))))
8553 {
8554 if (indirect_base0)
8555 base0 = build_fold_addr_expr_loc (loc, base0);
8556 if (indirect_base1)
8557 base1 = build_fold_addr_expr_loc (loc, base1);
8558 return fold_build2_loc (loc, code, type, base0, base1);
8559 }
8560 }
8561
8562 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8563 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8564 the resulting offset is smaller in absolute value than the
8565 original one and has the same sign. */
8566 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8567 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8568 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8569 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8570 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8571 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8572 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8573 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8574 {
8575 tree const1 = TREE_OPERAND (arg0, 1);
8576 tree const2 = TREE_OPERAND (arg1, 1);
8577 tree variable1 = TREE_OPERAND (arg0, 0);
8578 tree variable2 = TREE_OPERAND (arg1, 0);
8579 tree cst;
8580 const char * const warnmsg = G_("assuming signed overflow does not "
8581 "occur when combining constants around "
8582 "a comparison");
8583
8584 /* Put the constant on the side where it doesn't overflow and is
8585 of lower absolute value and of same sign than before. */
8586 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8587 ? MINUS_EXPR : PLUS_EXPR,
8588 const2, const1);
8589 if (!TREE_OVERFLOW (cst)
8590 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
8591 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
8592 {
8593 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8594 return fold_build2_loc (loc, code, type,
8595 variable1,
8596 fold_build2_loc (loc, TREE_CODE (arg1),
8597 TREE_TYPE (arg1),
8598 variable2, cst));
8599 }
8600
8601 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8602 ? MINUS_EXPR : PLUS_EXPR,
8603 const1, const2);
8604 if (!TREE_OVERFLOW (cst)
8605 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
8606 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
8607 {
8608 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8609 return fold_build2_loc (loc, code, type,
8610 fold_build2_loc (loc, TREE_CODE (arg0),
8611 TREE_TYPE (arg0),
8612 variable1, cst),
8613 variable2);
8614 }
8615 }
8616
8617 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
8618 if (tem)
8619 return tem;
8620
8621 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8622 constant, we can simplify it. */
8623 if (TREE_CODE (arg1) == INTEGER_CST
8624 && (TREE_CODE (arg0) == MIN_EXPR
8625 || TREE_CODE (arg0) == MAX_EXPR)
8626 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8627 {
8628 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
8629 if (tem)
8630 return tem;
8631 }
8632
8633 /* If we are comparing an expression that just has comparisons
8634 of two integer values, arithmetic expressions of those comparisons,
8635 and constants, we can simplify it. There are only three cases
8636 to check: the two values can either be equal, the first can be
8637 greater, or the second can be greater. Fold the expression for
8638 those three values. Since each value must be 0 or 1, we have
8639 eight possibilities, each of which corresponds to the constant 0
8640 or 1 or one of the six possible comparisons.
8641
8642 This handles common cases like (a > b) == 0 but also handles
8643 expressions like ((x > y) - (y > x)) > 0, which supposedly
8644 occur in macroized code. */
8645
8646 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8647 {
8648 tree cval1 = 0, cval2 = 0;
8649 int save_p = 0;
8650
8651 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8652 /* Don't handle degenerate cases here; they should already
8653 have been handled anyway. */
8654 && cval1 != 0 && cval2 != 0
8655 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8656 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8657 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8658 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8659 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8660 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8661 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8662 {
8663 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8664 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8665
8666 /* We can't just pass T to eval_subst in case cval1 or cval2
8667 was the same as ARG1. */
8668
8669 tree high_result
8670 = fold_build2_loc (loc, code, type,
8671 eval_subst (loc, arg0, cval1, maxval,
8672 cval2, minval),
8673 arg1);
8674 tree equal_result
8675 = fold_build2_loc (loc, code, type,
8676 eval_subst (loc, arg0, cval1, maxval,
8677 cval2, maxval),
8678 arg1);
8679 tree low_result
8680 = fold_build2_loc (loc, code, type,
8681 eval_subst (loc, arg0, cval1, minval,
8682 cval2, maxval),
8683 arg1);
8684
8685 /* All three of these results should be 0 or 1. Confirm they are.
8686 Then use those values to select the proper code to use. */
8687
8688 if (TREE_CODE (high_result) == INTEGER_CST
8689 && TREE_CODE (equal_result) == INTEGER_CST
8690 && TREE_CODE (low_result) == INTEGER_CST)
8691 {
8692 /* Make a 3-bit mask with the high-order bit being the
8693 value for `>', the next for '=', and the low for '<'. */
8694 switch ((integer_onep (high_result) * 4)
8695 + (integer_onep (equal_result) * 2)
8696 + integer_onep (low_result))
8697 {
8698 case 0:
8699 /* Always false. */
8700 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
8701 case 1:
8702 code = LT_EXPR;
8703 break;
8704 case 2:
8705 code = EQ_EXPR;
8706 break;
8707 case 3:
8708 code = LE_EXPR;
8709 break;
8710 case 4:
8711 code = GT_EXPR;
8712 break;
8713 case 5:
8714 code = NE_EXPR;
8715 break;
8716 case 6:
8717 code = GE_EXPR;
8718 break;
8719 case 7:
8720 /* Always true. */
8721 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
8722 }
8723
8724 if (save_p)
8725 {
8726 tem = save_expr (build2 (code, type, cval1, cval2));
8727 SET_EXPR_LOCATION (tem, loc);
8728 return tem;
8729 }
8730 return fold_build2_loc (loc, code, type, cval1, cval2);
8731 }
8732 }
8733 }
8734
8735 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8736 into a single range test. */
8737 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
8738 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
8739 && TREE_CODE (arg1) == INTEGER_CST
8740 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8741 && !integer_zerop (TREE_OPERAND (arg0, 1))
8742 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8743 && !TREE_OVERFLOW (arg1))
8744 {
8745 tem = fold_div_compare (loc, code, type, arg0, arg1);
8746 if (tem != NULL_TREE)
8747 return tem;
8748 }
8749
8750 return NULL_TREE;
8751 }
8752
8753
8754 /* Subroutine of fold_binary. Optimize complex multiplications of the
8755 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8756 argument EXPR represents the expression "z" of type TYPE. */
8757
8758 static tree
8759 fold_mult_zconjz (location_t loc, tree type, tree expr)
8760 {
8761 tree itype = TREE_TYPE (type);
8762 tree rpart, ipart, tem;
8763
8764 if (TREE_CODE (expr) == COMPLEX_EXPR)
8765 {
8766 rpart = TREE_OPERAND (expr, 0);
8767 ipart = TREE_OPERAND (expr, 1);
8768 }
8769 else if (TREE_CODE (expr) == COMPLEX_CST)
8770 {
8771 rpart = TREE_REALPART (expr);
8772 ipart = TREE_IMAGPART (expr);
8773 }
8774 else
8775 {
8776 expr = save_expr (expr);
8777 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
8778 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
8779 }
8780
8781 rpart = save_expr (rpart);
8782 ipart = save_expr (ipart);
8783 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
8784 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
8785 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
8786 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
8787 build_zero_cst (itype));
8788 }
8789
8790
8791 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
8792 CONSTRUCTOR ARG into array ELTS and return true if successful. */
8793
8794 static bool
8795 vec_cst_ctor_to_array (tree arg, tree *elts)
8796 {
8797 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
8798
8799 if (TREE_CODE (arg) == VECTOR_CST)
8800 {
8801 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
8802 elts[i] = VECTOR_CST_ELT (arg, i);
8803 }
8804 else if (TREE_CODE (arg) == CONSTRUCTOR)
8805 {
8806 constructor_elt *elt;
8807
8808 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
8809 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
8810 return false;
8811 else
8812 elts[i] = elt->value;
8813 }
8814 else
8815 return false;
8816 for (; i < nelts; i++)
8817 elts[i]
8818 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
8819 return true;
8820 }
8821
8822 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
8823 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
8824 NULL_TREE otherwise. */
8825
8826 static tree
8827 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
8828 {
8829 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8830 tree *elts;
8831 bool need_ctor = false;
8832
8833 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
8834 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
8835 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
8836 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
8837 return NULL_TREE;
8838
8839 elts = XALLOCAVEC (tree, nelts * 3);
8840 if (!vec_cst_ctor_to_array (arg0, elts)
8841 || !vec_cst_ctor_to_array (arg1, elts + nelts))
8842 return NULL_TREE;
8843
8844 for (i = 0; i < nelts; i++)
8845 {
8846 if (!CONSTANT_CLASS_P (elts[sel[i]]))
8847 need_ctor = true;
8848 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
8849 }
8850
8851 if (need_ctor)
8852 {
8853 vec<constructor_elt, va_gc> *v;
8854 vec_alloc (v, nelts);
8855 for (i = 0; i < nelts; i++)
8856 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
8857 return build_constructor (type, v);
8858 }
8859 else
8860 return build_vector (type, &elts[2 * nelts]);
8861 }
8862
8863 /* Try to fold a pointer difference of type TYPE two address expressions of
8864 array references AREF0 and AREF1 using location LOC. Return a
8865 simplified expression for the difference or NULL_TREE. */
8866
8867 static tree
8868 fold_addr_of_array_ref_difference (location_t loc, tree type,
8869 tree aref0, tree aref1)
8870 {
8871 tree base0 = TREE_OPERAND (aref0, 0);
8872 tree base1 = TREE_OPERAND (aref1, 0);
8873 tree base_offset = build_int_cst (type, 0);
8874
8875 /* If the bases are array references as well, recurse. If the bases
8876 are pointer indirections compute the difference of the pointers.
8877 If the bases are equal, we are set. */
8878 if ((TREE_CODE (base0) == ARRAY_REF
8879 && TREE_CODE (base1) == ARRAY_REF
8880 && (base_offset
8881 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
8882 || (INDIRECT_REF_P (base0)
8883 && INDIRECT_REF_P (base1)
8884 && (base_offset
8885 = fold_binary_loc (loc, MINUS_EXPR, type,
8886 fold_convert (type, TREE_OPERAND (base0, 0)),
8887 fold_convert (type,
8888 TREE_OPERAND (base1, 0)))))
8889 || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
8890 {
8891 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
8892 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
8893 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
8894 tree diff = build2 (MINUS_EXPR, type, op0, op1);
8895 return fold_build2_loc (loc, PLUS_EXPR, type,
8896 base_offset,
8897 fold_build2_loc (loc, MULT_EXPR, type,
8898 diff, esz));
8899 }
8900 return NULL_TREE;
8901 }
8902
8903 /* If the real or vector real constant CST of type TYPE has an exact
8904 inverse, return it, else return NULL. */
8905
8906 tree
8907 exact_inverse (tree type, tree cst)
8908 {
8909 REAL_VALUE_TYPE r;
8910 tree unit_type, *elts;
8911 machine_mode mode;
8912 unsigned vec_nelts, i;
8913
8914 switch (TREE_CODE (cst))
8915 {
8916 case REAL_CST:
8917 r = TREE_REAL_CST (cst);
8918
8919 if (exact_real_inverse (TYPE_MODE (type), &r))
8920 return build_real (type, r);
8921
8922 return NULL_TREE;
8923
8924 case VECTOR_CST:
8925 vec_nelts = VECTOR_CST_NELTS (cst);
8926 elts = XALLOCAVEC (tree, vec_nelts);
8927 unit_type = TREE_TYPE (type);
8928 mode = TYPE_MODE (unit_type);
8929
8930 for (i = 0; i < vec_nelts; i++)
8931 {
8932 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
8933 if (!exact_real_inverse (mode, &r))
8934 return NULL_TREE;
8935 elts[i] = build_real (unit_type, r);
8936 }
8937
8938 return build_vector (type, elts);
8939
8940 default:
8941 return NULL_TREE;
8942 }
8943 }
8944
8945 /* Mask out the tz least significant bits of X of type TYPE where
8946 tz is the number of trailing zeroes in Y. */
8947 static wide_int
8948 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
8949 {
8950 int tz = wi::ctz (y);
8951 if (tz > 0)
8952 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
8953 return x;
8954 }
8955
8956 /* Return true when T is an address and is known to be nonzero.
8957 For floating point we further ensure that T is not denormal.
8958 Similar logic is present in nonzero_address in rtlanal.h.
8959
8960 If the return value is based on the assumption that signed overflow
8961 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
8962 change *STRICT_OVERFLOW_P. */
8963
8964 static bool
8965 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
8966 {
8967 tree type = TREE_TYPE (t);
8968 enum tree_code code;
8969
8970 /* Doing something useful for floating point would need more work. */
8971 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
8972 return false;
8973
8974 code = TREE_CODE (t);
8975 switch (TREE_CODE_CLASS (code))
8976 {
8977 case tcc_unary:
8978 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
8979 strict_overflow_p);
8980 case tcc_binary:
8981 case tcc_comparison:
8982 return tree_binary_nonzero_warnv_p (code, type,
8983 TREE_OPERAND (t, 0),
8984 TREE_OPERAND (t, 1),
8985 strict_overflow_p);
8986 case tcc_constant:
8987 case tcc_declaration:
8988 case tcc_reference:
8989 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
8990
8991 default:
8992 break;
8993 }
8994
8995 switch (code)
8996 {
8997 case TRUTH_NOT_EXPR:
8998 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
8999 strict_overflow_p);
9000
9001 case TRUTH_AND_EXPR:
9002 case TRUTH_OR_EXPR:
9003 case TRUTH_XOR_EXPR:
9004 return tree_binary_nonzero_warnv_p (code, type,
9005 TREE_OPERAND (t, 0),
9006 TREE_OPERAND (t, 1),
9007 strict_overflow_p);
9008
9009 case COND_EXPR:
9010 case CONSTRUCTOR:
9011 case OBJ_TYPE_REF:
9012 case ASSERT_EXPR:
9013 case ADDR_EXPR:
9014 case WITH_SIZE_EXPR:
9015 case SSA_NAME:
9016 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9017
9018 case COMPOUND_EXPR:
9019 case MODIFY_EXPR:
9020 case BIND_EXPR:
9021 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
9022 strict_overflow_p);
9023
9024 case SAVE_EXPR:
9025 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
9026 strict_overflow_p);
9027
9028 case CALL_EXPR:
9029 {
9030 tree fndecl = get_callee_fndecl (t);
9031 if (!fndecl) return false;
9032 if (flag_delete_null_pointer_checks && !flag_check_new
9033 && DECL_IS_OPERATOR_NEW (fndecl)
9034 && !TREE_NOTHROW (fndecl))
9035 return true;
9036 if (flag_delete_null_pointer_checks
9037 && lookup_attribute ("returns_nonnull",
9038 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9039 return true;
9040 return alloca_call_p (t);
9041 }
9042
9043 default:
9044 break;
9045 }
9046 return false;
9047 }
9048
9049 /* Return true when T is an address and is known to be nonzero.
9050 Handle warnings about undefined signed overflow. */
9051
9052 static bool
9053 tree_expr_nonzero_p (tree t)
9054 {
9055 bool ret, strict_overflow_p;
9056
9057 strict_overflow_p = false;
9058 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9059 if (strict_overflow_p)
9060 fold_overflow_warning (("assuming signed overflow does not occur when "
9061 "determining that expression is always "
9062 "non-zero"),
9063 WARN_STRICT_OVERFLOW_MISC);
9064 return ret;
9065 }
9066
9067 /* Fold a binary expression of code CODE and type TYPE with operands
9068 OP0 and OP1. LOC is the location of the resulting expression.
9069 Return the folded expression if folding is successful. Otherwise,
9070 return NULL_TREE. */
9071
9072 tree
9073 fold_binary_loc (location_t loc,
9074 enum tree_code code, tree type, tree op0, tree op1)
9075 {
9076 enum tree_code_class kind = TREE_CODE_CLASS (code);
9077 tree arg0, arg1, tem;
9078 tree t1 = NULL_TREE;
9079 bool strict_overflow_p;
9080 unsigned int prec;
9081
9082 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9083 && TREE_CODE_LENGTH (code) == 2
9084 && op0 != NULL_TREE
9085 && op1 != NULL_TREE);
9086
9087 arg0 = op0;
9088 arg1 = op1;
9089
9090 /* Strip any conversions that don't change the mode. This is
9091 safe for every expression, except for a comparison expression
9092 because its signedness is derived from its operands. So, in
9093 the latter case, only strip conversions that don't change the
9094 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9095 preserved.
9096
9097 Note that this is done as an internal manipulation within the
9098 constant folder, in order to find the simplest representation
9099 of the arguments so that their form can be studied. In any
9100 cases, the appropriate type conversions should be put back in
9101 the tree that will get out of the constant folder. */
9102
9103 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9104 {
9105 STRIP_SIGN_NOPS (arg0);
9106 STRIP_SIGN_NOPS (arg1);
9107 }
9108 else
9109 {
9110 STRIP_NOPS (arg0);
9111 STRIP_NOPS (arg1);
9112 }
9113
9114 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9115 constant but we can't do arithmetic on them. */
9116 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
9117 {
9118 tem = const_binop (code, type, arg0, arg1);
9119 if (tem != NULL_TREE)
9120 {
9121 if (TREE_TYPE (tem) != type)
9122 tem = fold_convert_loc (loc, type, tem);
9123 return tem;
9124 }
9125 }
9126
9127 /* If this is a commutative operation, and ARG0 is a constant, move it
9128 to ARG1 to reduce the number of tests below. */
9129 if (commutative_tree_code (code)
9130 && tree_swap_operands_p (arg0, arg1, true))
9131 return fold_build2_loc (loc, code, type, op1, op0);
9132
9133 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9134 to ARG1 to reduce the number of tests below. */
9135 if (kind == tcc_comparison
9136 && tree_swap_operands_p (arg0, arg1, true))
9137 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9138
9139 tem = generic_simplify (loc, code, type, op0, op1);
9140 if (tem)
9141 return tem;
9142
9143 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9144
9145 First check for cases where an arithmetic operation is applied to a
9146 compound, conditional, or comparison operation. Push the arithmetic
9147 operation inside the compound or conditional to see if any folding
9148 can then be done. Convert comparison to conditional for this purpose.
9149 The also optimizes non-constant cases that used to be done in
9150 expand_expr.
9151
9152 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9153 one of the operands is a comparison and the other is a comparison, a
9154 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9155 code below would make the expression more complex. Change it to a
9156 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9157 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9158
9159 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9160 || code == EQ_EXPR || code == NE_EXPR)
9161 && TREE_CODE (type) != VECTOR_TYPE
9162 && ((truth_value_p (TREE_CODE (arg0))
9163 && (truth_value_p (TREE_CODE (arg1))
9164 || (TREE_CODE (arg1) == BIT_AND_EXPR
9165 && integer_onep (TREE_OPERAND (arg1, 1)))))
9166 || (truth_value_p (TREE_CODE (arg1))
9167 && (truth_value_p (TREE_CODE (arg0))
9168 || (TREE_CODE (arg0) == BIT_AND_EXPR
9169 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9170 {
9171 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9172 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9173 : TRUTH_XOR_EXPR,
9174 boolean_type_node,
9175 fold_convert_loc (loc, boolean_type_node, arg0),
9176 fold_convert_loc (loc, boolean_type_node, arg1));
9177
9178 if (code == EQ_EXPR)
9179 tem = invert_truthvalue_loc (loc, tem);
9180
9181 return fold_convert_loc (loc, type, tem);
9182 }
9183
9184 if (TREE_CODE_CLASS (code) == tcc_binary
9185 || TREE_CODE_CLASS (code) == tcc_comparison)
9186 {
9187 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9188 {
9189 tem = fold_build2_loc (loc, code, type,
9190 fold_convert_loc (loc, TREE_TYPE (op0),
9191 TREE_OPERAND (arg0, 1)), op1);
9192 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9193 tem);
9194 }
9195 if (TREE_CODE (arg1) == COMPOUND_EXPR
9196 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9197 {
9198 tem = fold_build2_loc (loc, code, type, op0,
9199 fold_convert_loc (loc, TREE_TYPE (op1),
9200 TREE_OPERAND (arg1, 1)));
9201 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9202 tem);
9203 }
9204
9205 if (TREE_CODE (arg0) == COND_EXPR
9206 || TREE_CODE (arg0) == VEC_COND_EXPR
9207 || COMPARISON_CLASS_P (arg0))
9208 {
9209 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9210 arg0, arg1,
9211 /*cond_first_p=*/1);
9212 if (tem != NULL_TREE)
9213 return tem;
9214 }
9215
9216 if (TREE_CODE (arg1) == COND_EXPR
9217 || TREE_CODE (arg1) == VEC_COND_EXPR
9218 || COMPARISON_CLASS_P (arg1))
9219 {
9220 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9221 arg1, arg0,
9222 /*cond_first_p=*/0);
9223 if (tem != NULL_TREE)
9224 return tem;
9225 }
9226 }
9227
9228 switch (code)
9229 {
9230 case MEM_REF:
9231 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9232 if (TREE_CODE (arg0) == ADDR_EXPR
9233 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9234 {
9235 tree iref = TREE_OPERAND (arg0, 0);
9236 return fold_build2 (MEM_REF, type,
9237 TREE_OPERAND (iref, 0),
9238 int_const_binop (PLUS_EXPR, arg1,
9239 TREE_OPERAND (iref, 1)));
9240 }
9241
9242 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9243 if (TREE_CODE (arg0) == ADDR_EXPR
9244 && handled_component_p (TREE_OPERAND (arg0, 0)))
9245 {
9246 tree base;
9247 HOST_WIDE_INT coffset;
9248 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9249 &coffset);
9250 if (!base)
9251 return NULL_TREE;
9252 return fold_build2 (MEM_REF, type,
9253 build_fold_addr_expr (base),
9254 int_const_binop (PLUS_EXPR, arg1,
9255 size_int (coffset)));
9256 }
9257
9258 return NULL_TREE;
9259
9260 case POINTER_PLUS_EXPR:
9261 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9262 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9263 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9264 return fold_convert_loc (loc, type,
9265 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9266 fold_convert_loc (loc, sizetype,
9267 arg1),
9268 fold_convert_loc (loc, sizetype,
9269 arg0)));
9270
9271 return NULL_TREE;
9272
9273 case PLUS_EXPR:
9274 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
9275 {
9276 /* X + (X / CST) * -CST is X % CST. */
9277 if (TREE_CODE (arg1) == MULT_EXPR
9278 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9279 && operand_equal_p (arg0,
9280 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9281 {
9282 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9283 tree cst1 = TREE_OPERAND (arg1, 1);
9284 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9285 cst1, cst0);
9286 if (sum && integer_zerop (sum))
9287 return fold_convert_loc (loc, type,
9288 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9289 TREE_TYPE (arg0), arg0,
9290 cst0));
9291 }
9292 }
9293
9294 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
9295 one. Make sure the type is not saturating and has the signedness of
9296 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9297 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9298 if ((TREE_CODE (arg0) == MULT_EXPR
9299 || TREE_CODE (arg1) == MULT_EXPR)
9300 && !TYPE_SATURATING (type)
9301 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9302 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9303 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9304 {
9305 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9306 if (tem)
9307 return tem;
9308 }
9309
9310 if (! FLOAT_TYPE_P (type))
9311 {
9312 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9313 (plus (plus (mult) (mult)) (foo)) so that we can
9314 take advantage of the factoring cases below. */
9315 if (ANY_INTEGRAL_TYPE_P (type)
9316 && TYPE_OVERFLOW_WRAPS (type)
9317 && (((TREE_CODE (arg0) == PLUS_EXPR
9318 || TREE_CODE (arg0) == MINUS_EXPR)
9319 && TREE_CODE (arg1) == MULT_EXPR)
9320 || ((TREE_CODE (arg1) == PLUS_EXPR
9321 || TREE_CODE (arg1) == MINUS_EXPR)
9322 && TREE_CODE (arg0) == MULT_EXPR)))
9323 {
9324 tree parg0, parg1, parg, marg;
9325 enum tree_code pcode;
9326
9327 if (TREE_CODE (arg1) == MULT_EXPR)
9328 parg = arg0, marg = arg1;
9329 else
9330 parg = arg1, marg = arg0;
9331 pcode = TREE_CODE (parg);
9332 parg0 = TREE_OPERAND (parg, 0);
9333 parg1 = TREE_OPERAND (parg, 1);
9334 STRIP_NOPS (parg0);
9335 STRIP_NOPS (parg1);
9336
9337 if (TREE_CODE (parg0) == MULT_EXPR
9338 && TREE_CODE (parg1) != MULT_EXPR)
9339 return fold_build2_loc (loc, pcode, type,
9340 fold_build2_loc (loc, PLUS_EXPR, type,
9341 fold_convert_loc (loc, type,
9342 parg0),
9343 fold_convert_loc (loc, type,
9344 marg)),
9345 fold_convert_loc (loc, type, parg1));
9346 if (TREE_CODE (parg0) != MULT_EXPR
9347 && TREE_CODE (parg1) == MULT_EXPR)
9348 return
9349 fold_build2_loc (loc, PLUS_EXPR, type,
9350 fold_convert_loc (loc, type, parg0),
9351 fold_build2_loc (loc, pcode, type,
9352 fold_convert_loc (loc, type, marg),
9353 fold_convert_loc (loc, type,
9354 parg1)));
9355 }
9356 }
9357 else
9358 {
9359 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9360 to __complex__ ( x, y ). This is not the same for SNaNs or
9361 if signed zeros are involved. */
9362 if (!HONOR_SNANS (element_mode (arg0))
9363 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9364 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9365 {
9366 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9367 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9368 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9369 bool arg0rz = false, arg0iz = false;
9370 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9371 || (arg0i && (arg0iz = real_zerop (arg0i))))
9372 {
9373 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9374 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9375 if (arg0rz && arg1i && real_zerop (arg1i))
9376 {
9377 tree rp = arg1r ? arg1r
9378 : build1 (REALPART_EXPR, rtype, arg1);
9379 tree ip = arg0i ? arg0i
9380 : build1 (IMAGPART_EXPR, rtype, arg0);
9381 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9382 }
9383 else if (arg0iz && arg1r && real_zerop (arg1r))
9384 {
9385 tree rp = arg0r ? arg0r
9386 : build1 (REALPART_EXPR, rtype, arg0);
9387 tree ip = arg1i ? arg1i
9388 : build1 (IMAGPART_EXPR, rtype, arg1);
9389 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9390 }
9391 }
9392 }
9393
9394 if (flag_unsafe_math_optimizations
9395 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9396 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9397 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9398 return tem;
9399
9400 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9401 We associate floats only if the user has specified
9402 -fassociative-math. */
9403 if (flag_associative_math
9404 && TREE_CODE (arg1) == PLUS_EXPR
9405 && TREE_CODE (arg0) != MULT_EXPR)
9406 {
9407 tree tree10 = TREE_OPERAND (arg1, 0);
9408 tree tree11 = TREE_OPERAND (arg1, 1);
9409 if (TREE_CODE (tree11) == MULT_EXPR
9410 && TREE_CODE (tree10) == MULT_EXPR)
9411 {
9412 tree tree0;
9413 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
9414 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
9415 }
9416 }
9417 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9418 We associate floats only if the user has specified
9419 -fassociative-math. */
9420 if (flag_associative_math
9421 && TREE_CODE (arg0) == PLUS_EXPR
9422 && TREE_CODE (arg1) != MULT_EXPR)
9423 {
9424 tree tree00 = TREE_OPERAND (arg0, 0);
9425 tree tree01 = TREE_OPERAND (arg0, 1);
9426 if (TREE_CODE (tree01) == MULT_EXPR
9427 && TREE_CODE (tree00) == MULT_EXPR)
9428 {
9429 tree tree0;
9430 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
9431 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
9432 }
9433 }
9434 }
9435
9436 bit_rotate:
9437 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9438 is a rotate of A by C1 bits. */
9439 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9440 is a rotate of A by B bits. */
9441 {
9442 enum tree_code code0, code1;
9443 tree rtype;
9444 code0 = TREE_CODE (arg0);
9445 code1 = TREE_CODE (arg1);
9446 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9447 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9448 && operand_equal_p (TREE_OPERAND (arg0, 0),
9449 TREE_OPERAND (arg1, 0), 0)
9450 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9451 TYPE_UNSIGNED (rtype))
9452 /* Only create rotates in complete modes. Other cases are not
9453 expanded properly. */
9454 && (element_precision (rtype)
9455 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
9456 {
9457 tree tree01, tree11;
9458 enum tree_code code01, code11;
9459
9460 tree01 = TREE_OPERAND (arg0, 1);
9461 tree11 = TREE_OPERAND (arg1, 1);
9462 STRIP_NOPS (tree01);
9463 STRIP_NOPS (tree11);
9464 code01 = TREE_CODE (tree01);
9465 code11 = TREE_CODE (tree11);
9466 if (code01 == INTEGER_CST
9467 && code11 == INTEGER_CST
9468 && (wi::to_widest (tree01) + wi::to_widest (tree11)
9469 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9470 {
9471 tem = build2_loc (loc, LROTATE_EXPR,
9472 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9473 TREE_OPERAND (arg0, 0),
9474 code0 == LSHIFT_EXPR
9475 ? TREE_OPERAND (arg0, 1)
9476 : TREE_OPERAND (arg1, 1));
9477 return fold_convert_loc (loc, type, tem);
9478 }
9479 else if (code11 == MINUS_EXPR)
9480 {
9481 tree tree110, tree111;
9482 tree110 = TREE_OPERAND (tree11, 0);
9483 tree111 = TREE_OPERAND (tree11, 1);
9484 STRIP_NOPS (tree110);
9485 STRIP_NOPS (tree111);
9486 if (TREE_CODE (tree110) == INTEGER_CST
9487 && 0 == compare_tree_int (tree110,
9488 element_precision
9489 (TREE_TYPE (TREE_OPERAND
9490 (arg0, 0))))
9491 && operand_equal_p (tree01, tree111, 0))
9492 return
9493 fold_convert_loc (loc, type,
9494 build2 ((code0 == LSHIFT_EXPR
9495 ? LROTATE_EXPR
9496 : RROTATE_EXPR),
9497 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9498 TREE_OPERAND (arg0, 0),
9499 TREE_OPERAND (arg0, 1)));
9500 }
9501 else if (code01 == MINUS_EXPR)
9502 {
9503 tree tree010, tree011;
9504 tree010 = TREE_OPERAND (tree01, 0);
9505 tree011 = TREE_OPERAND (tree01, 1);
9506 STRIP_NOPS (tree010);
9507 STRIP_NOPS (tree011);
9508 if (TREE_CODE (tree010) == INTEGER_CST
9509 && 0 == compare_tree_int (tree010,
9510 element_precision
9511 (TREE_TYPE (TREE_OPERAND
9512 (arg0, 0))))
9513 && operand_equal_p (tree11, tree011, 0))
9514 return fold_convert_loc
9515 (loc, type,
9516 build2 ((code0 != LSHIFT_EXPR
9517 ? LROTATE_EXPR
9518 : RROTATE_EXPR),
9519 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9520 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1)));
9521 }
9522 }
9523 }
9524
9525 associate:
9526 /* In most languages, can't associate operations on floats through
9527 parentheses. Rather than remember where the parentheses were, we
9528 don't associate floats at all, unless the user has specified
9529 -fassociative-math.
9530 And, we need to make sure type is not saturating. */
9531
9532 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9533 && !TYPE_SATURATING (type))
9534 {
9535 tree var0, con0, lit0, minus_lit0;
9536 tree var1, con1, lit1, minus_lit1;
9537 tree atype = type;
9538 bool ok = true;
9539
9540 /* Split both trees into variables, constants, and literals. Then
9541 associate each group together, the constants with literals,
9542 then the result with variables. This increases the chances of
9543 literals being recombined later and of generating relocatable
9544 expressions for the sum of a constant and literal. */
9545 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
9546 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
9547 code == MINUS_EXPR);
9548
9549 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9550 if (code == MINUS_EXPR)
9551 code = PLUS_EXPR;
9552
9553 /* With undefined overflow prefer doing association in a type
9554 which wraps on overflow, if that is one of the operand types. */
9555 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9556 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9557 {
9558 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9559 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
9560 atype = TREE_TYPE (arg0);
9561 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9562 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
9563 atype = TREE_TYPE (arg1);
9564 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
9565 }
9566
9567 /* With undefined overflow we can only associate constants with one
9568 variable, and constants whose association doesn't overflow. */
9569 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9570 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
9571 {
9572 if (var0 && var1)
9573 {
9574 tree tmp0 = var0;
9575 tree tmp1 = var1;
9576 bool one_neg = false;
9577
9578 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9579 {
9580 tmp0 = TREE_OPERAND (tmp0, 0);
9581 one_neg = !one_neg;
9582 }
9583 if (CONVERT_EXPR_P (tmp0)
9584 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9585 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9586 <= TYPE_PRECISION (atype)))
9587 tmp0 = TREE_OPERAND (tmp0, 0);
9588 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9589 {
9590 tmp1 = TREE_OPERAND (tmp1, 0);
9591 one_neg = !one_neg;
9592 }
9593 if (CONVERT_EXPR_P (tmp1)
9594 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9595 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9596 <= TYPE_PRECISION (atype)))
9597 tmp1 = TREE_OPERAND (tmp1, 0);
9598 /* The only case we can still associate with two variables
9599 is if they cancel out. */
9600 if (!one_neg
9601 || !operand_equal_p (tmp0, tmp1, 0))
9602 ok = false;
9603 }
9604 }
9605
9606 /* Only do something if we found more than two objects. Otherwise,
9607 nothing has changed and we risk infinite recursion. */
9608 if (ok
9609 && (2 < ((var0 != 0) + (var1 != 0)
9610 + (con0 != 0) + (con1 != 0)
9611 + (lit0 != 0) + (lit1 != 0)
9612 + (minus_lit0 != 0) + (minus_lit1 != 0))))
9613 {
9614 bool any_overflows = false;
9615 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
9616 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
9617 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
9618 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
9619 var0 = associate_trees (loc, var0, var1, code, atype);
9620 con0 = associate_trees (loc, con0, con1, code, atype);
9621 lit0 = associate_trees (loc, lit0, lit1, code, atype);
9622 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
9623 code, atype);
9624
9625 /* Preserve the MINUS_EXPR if the negative part of the literal is
9626 greater than the positive part. Otherwise, the multiplicative
9627 folding code (i.e extract_muldiv) may be fooled in case
9628 unsigned constants are subtracted, like in the following
9629 example: ((X*2 + 4) - 8U)/2. */
9630 if (minus_lit0 && lit0)
9631 {
9632 if (TREE_CODE (lit0) == INTEGER_CST
9633 && TREE_CODE (minus_lit0) == INTEGER_CST
9634 && tree_int_cst_lt (lit0, minus_lit0))
9635 {
9636 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
9637 MINUS_EXPR, atype);
9638 lit0 = 0;
9639 }
9640 else
9641 {
9642 lit0 = associate_trees (loc, lit0, minus_lit0,
9643 MINUS_EXPR, atype);
9644 minus_lit0 = 0;
9645 }
9646 }
9647
9648 /* Don't introduce overflows through reassociation. */
9649 if (!any_overflows
9650 && ((lit0 && TREE_OVERFLOW_P (lit0))
9651 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0))))
9652 return NULL_TREE;
9653
9654 if (minus_lit0)
9655 {
9656 if (con0 == 0)
9657 return
9658 fold_convert_loc (loc, type,
9659 associate_trees (loc, var0, minus_lit0,
9660 MINUS_EXPR, atype));
9661 else
9662 {
9663 con0 = associate_trees (loc, con0, minus_lit0,
9664 MINUS_EXPR, atype);
9665 return
9666 fold_convert_loc (loc, type,
9667 associate_trees (loc, var0, con0,
9668 PLUS_EXPR, atype));
9669 }
9670 }
9671
9672 con0 = associate_trees (loc, con0, lit0, code, atype);
9673 return
9674 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
9675 code, atype));
9676 }
9677 }
9678
9679 return NULL_TREE;
9680
9681 case MINUS_EXPR:
9682 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9683 if (TREE_CODE (arg0) == NEGATE_EXPR
9684 && negate_expr_p (op1)
9685 && reorder_operands_p (arg0, arg1))
9686 return fold_build2_loc (loc, MINUS_EXPR, type,
9687 negate_expr (op1),
9688 fold_convert_loc (loc, type,
9689 TREE_OPERAND (arg0, 0)));
9690
9691 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
9692 __complex__ ( x, -y ). This is not the same for SNaNs or if
9693 signed zeros are involved. */
9694 if (!HONOR_SNANS (element_mode (arg0))
9695 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9696 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9697 {
9698 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9699 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9700 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9701 bool arg0rz = false, arg0iz = false;
9702 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9703 || (arg0i && (arg0iz = real_zerop (arg0i))))
9704 {
9705 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9706 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9707 if (arg0rz && arg1i && real_zerop (arg1i))
9708 {
9709 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9710 arg1r ? arg1r
9711 : build1 (REALPART_EXPR, rtype, arg1));
9712 tree ip = arg0i ? arg0i
9713 : build1 (IMAGPART_EXPR, rtype, arg0);
9714 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9715 }
9716 else if (arg0iz && arg1r && real_zerop (arg1r))
9717 {
9718 tree rp = arg0r ? arg0r
9719 : build1 (REALPART_EXPR, rtype, arg0);
9720 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9721 arg1i ? arg1i
9722 : build1 (IMAGPART_EXPR, rtype, arg1));
9723 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9724 }
9725 }
9726 }
9727
9728 /* A - B -> A + (-B) if B is easily negatable. */
9729 if (negate_expr_p (op1)
9730 && ! TYPE_OVERFLOW_SANITIZED (type)
9731 && ((FLOAT_TYPE_P (type)
9732 /* Avoid this transformation if B is a positive REAL_CST. */
9733 && (TREE_CODE (op1) != REAL_CST
9734 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1))))
9735 || INTEGRAL_TYPE_P (type)))
9736 return fold_build2_loc (loc, PLUS_EXPR, type,
9737 fold_convert_loc (loc, type, arg0),
9738 negate_expr (op1));
9739
9740 /* Fold &a[i] - &a[j] to i-j. */
9741 if (TREE_CODE (arg0) == ADDR_EXPR
9742 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9743 && TREE_CODE (arg1) == ADDR_EXPR
9744 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9745 {
9746 tree tem = fold_addr_of_array_ref_difference (loc, type,
9747 TREE_OPERAND (arg0, 0),
9748 TREE_OPERAND (arg1, 0));
9749 if (tem)
9750 return tem;
9751 }
9752
9753 if (FLOAT_TYPE_P (type)
9754 && flag_unsafe_math_optimizations
9755 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9756 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9757 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9758 return tem;
9759
9760 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
9761 one. Make sure the type is not saturating and has the signedness of
9762 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9763 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9764 if ((TREE_CODE (arg0) == MULT_EXPR
9765 || TREE_CODE (arg1) == MULT_EXPR)
9766 && !TYPE_SATURATING (type)
9767 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9768 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9769 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9770 {
9771 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9772 if (tem)
9773 return tem;
9774 }
9775
9776 goto associate;
9777
9778 case MULT_EXPR:
9779 if (! FLOAT_TYPE_P (type))
9780 {
9781 /* Transform x * -C into -x * C if x is easily negatable. */
9782 if (TREE_CODE (op1) == INTEGER_CST
9783 && tree_int_cst_sgn (op1) == -1
9784 && negate_expr_p (op0)
9785 && (tem = negate_expr (op1)) != op1
9786 && ! TREE_OVERFLOW (tem))
9787 return fold_build2_loc (loc, MULT_EXPR, type,
9788 fold_convert_loc (loc, type,
9789 negate_expr (op0)), tem);
9790
9791 /* (A + A) * C -> A * 2 * C */
9792 if (TREE_CODE (arg0) == PLUS_EXPR
9793 && TREE_CODE (arg1) == INTEGER_CST
9794 && operand_equal_p (TREE_OPERAND (arg0, 0),
9795 TREE_OPERAND (arg0, 1), 0))
9796 return fold_build2_loc (loc, MULT_EXPR, type,
9797 omit_one_operand_loc (loc, type,
9798 TREE_OPERAND (arg0, 0),
9799 TREE_OPERAND (arg0, 1)),
9800 fold_build2_loc (loc, MULT_EXPR, type,
9801 build_int_cst (type, 2) , arg1));
9802
9803 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
9804 sign-changing only. */
9805 if (TREE_CODE (arg1) == INTEGER_CST
9806 && TREE_CODE (arg0) == EXACT_DIV_EXPR
9807 && operand_equal_p (arg1, TREE_OPERAND (arg0, 1), 0))
9808 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
9809
9810 strict_overflow_p = false;
9811 if (TREE_CODE (arg1) == INTEGER_CST
9812 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
9813 &strict_overflow_p)))
9814 {
9815 if (strict_overflow_p)
9816 fold_overflow_warning (("assuming signed overflow does not "
9817 "occur when simplifying "
9818 "multiplication"),
9819 WARN_STRICT_OVERFLOW_MISC);
9820 return fold_convert_loc (loc, type, tem);
9821 }
9822
9823 /* Optimize z * conj(z) for integer complex numbers. */
9824 if (TREE_CODE (arg0) == CONJ_EXPR
9825 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9826 return fold_mult_zconjz (loc, type, arg1);
9827 if (TREE_CODE (arg1) == CONJ_EXPR
9828 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9829 return fold_mult_zconjz (loc, type, arg0);
9830 }
9831 else
9832 {
9833 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
9834 This is not the same for NaNs or if signed zeros are
9835 involved. */
9836 if (!HONOR_NANS (arg0)
9837 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9838 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9839 && TREE_CODE (arg1) == COMPLEX_CST
9840 && real_zerop (TREE_REALPART (arg1)))
9841 {
9842 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9843 if (real_onep (TREE_IMAGPART (arg1)))
9844 return
9845 fold_build2_loc (loc, COMPLEX_EXPR, type,
9846 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
9847 rtype, arg0)),
9848 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
9849 else if (real_minus_onep (TREE_IMAGPART (arg1)))
9850 return
9851 fold_build2_loc (loc, COMPLEX_EXPR, type,
9852 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
9853 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
9854 rtype, arg0)));
9855 }
9856
9857 /* Optimize z * conj(z) for floating point complex numbers.
9858 Guarded by flag_unsafe_math_optimizations as non-finite
9859 imaginary components don't produce scalar results. */
9860 if (flag_unsafe_math_optimizations
9861 && TREE_CODE (arg0) == CONJ_EXPR
9862 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9863 return fold_mult_zconjz (loc, type, arg1);
9864 if (flag_unsafe_math_optimizations
9865 && TREE_CODE (arg1) == CONJ_EXPR
9866 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9867 return fold_mult_zconjz (loc, type, arg0);
9868
9869 if (flag_unsafe_math_optimizations)
9870 {
9871
9872 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
9873 if (!in_gimple_form
9874 && optimize
9875 && operand_equal_p (arg0, arg1, 0))
9876 {
9877 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
9878
9879 if (powfn)
9880 {
9881 tree arg = build_real (type, dconst2);
9882 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
9883 }
9884 }
9885 }
9886 }
9887 goto associate;
9888
9889 case BIT_IOR_EXPR:
9890 /* Canonicalize (X & C1) | C2. */
9891 if (TREE_CODE (arg0) == BIT_AND_EXPR
9892 && TREE_CODE (arg1) == INTEGER_CST
9893 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9894 {
9895 int width = TYPE_PRECISION (type), w;
9896 wide_int c1 = TREE_OPERAND (arg0, 1);
9897 wide_int c2 = arg1;
9898
9899 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
9900 if ((c1 & c2) == c1)
9901 return omit_one_operand_loc (loc, type, arg1,
9902 TREE_OPERAND (arg0, 0));
9903
9904 wide_int msk = wi::mask (width, false,
9905 TYPE_PRECISION (TREE_TYPE (arg1)));
9906
9907 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
9908 if (msk.and_not (c1 | c2) == 0)
9909 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
9910 TREE_OPERAND (arg0, 0), arg1);
9911
9912 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
9913 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
9914 mode which allows further optimizations. */
9915 c1 &= msk;
9916 c2 &= msk;
9917 wide_int c3 = c1.and_not (c2);
9918 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
9919 {
9920 wide_int mask = wi::mask (w, false,
9921 TYPE_PRECISION (type));
9922 if (((c1 | c2) & mask) == mask && c1.and_not (mask) == 0)
9923 {
9924 c3 = mask;
9925 break;
9926 }
9927 }
9928
9929 if (c3 != c1)
9930 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
9931 fold_build2_loc (loc, BIT_AND_EXPR, type,
9932 TREE_OPERAND (arg0, 0),
9933 wide_int_to_tree (type,
9934 c3)),
9935 arg1);
9936 }
9937
9938 /* See if this can be simplified into a rotate first. If that
9939 is unsuccessful continue in the association code. */
9940 goto bit_rotate;
9941
9942 case BIT_XOR_EXPR:
9943 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
9944 if (TREE_CODE (arg0) == BIT_AND_EXPR
9945 && INTEGRAL_TYPE_P (type)
9946 && integer_onep (TREE_OPERAND (arg0, 1))
9947 && integer_onep (arg1))
9948 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
9949 build_zero_cst (TREE_TYPE (arg0)));
9950
9951 /* See if this can be simplified into a rotate first. If that
9952 is unsuccessful continue in the association code. */
9953 goto bit_rotate;
9954
9955 case BIT_AND_EXPR:
9956 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
9957 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9958 && INTEGRAL_TYPE_P (type)
9959 && integer_onep (TREE_OPERAND (arg0, 1))
9960 && integer_onep (arg1))
9961 {
9962 tree tem2;
9963 tem = TREE_OPERAND (arg0, 0);
9964 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
9965 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
9966 tem, tem2);
9967 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
9968 build_zero_cst (TREE_TYPE (tem)));
9969 }
9970 /* Fold ~X & 1 as (X & 1) == 0. */
9971 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9972 && INTEGRAL_TYPE_P (type)
9973 && integer_onep (arg1))
9974 {
9975 tree tem2;
9976 tem = TREE_OPERAND (arg0, 0);
9977 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
9978 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
9979 tem, tem2);
9980 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
9981 build_zero_cst (TREE_TYPE (tem)));
9982 }
9983 /* Fold !X & 1 as X == 0. */
9984 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
9985 && integer_onep (arg1))
9986 {
9987 tem = TREE_OPERAND (arg0, 0);
9988 return fold_build2_loc (loc, EQ_EXPR, type, tem,
9989 build_zero_cst (TREE_TYPE (tem)));
9990 }
9991
9992 /* Fold (X ^ Y) & Y as ~X & Y. */
9993 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9994 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9995 {
9996 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
9997 return fold_build2_loc (loc, BIT_AND_EXPR, type,
9998 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
9999 fold_convert_loc (loc, type, arg1));
10000 }
10001 /* Fold (X ^ Y) & X as ~Y & X. */
10002 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10003 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10004 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10005 {
10006 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10007 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10008 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10009 fold_convert_loc (loc, type, arg1));
10010 }
10011 /* Fold X & (X ^ Y) as X & ~Y. */
10012 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10013 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10014 {
10015 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10016 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10017 fold_convert_loc (loc, type, arg0),
10018 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
10019 }
10020 /* Fold X & (Y ^ X) as ~Y & X. */
10021 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10022 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10023 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10024 {
10025 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10026 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10027 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10028 fold_convert_loc (loc, type, arg0));
10029 }
10030
10031 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
10032 multiple of 1 << CST. */
10033 if (TREE_CODE (arg1) == INTEGER_CST)
10034 {
10035 wide_int cst1 = arg1;
10036 wide_int ncst1 = -cst1;
10037 if ((cst1 & ncst1) == ncst1
10038 && multiple_of_p (type, arg0,
10039 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
10040 return fold_convert_loc (loc, type, arg0);
10041 }
10042
10043 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
10044 bits from CST2. */
10045 if (TREE_CODE (arg1) == INTEGER_CST
10046 && TREE_CODE (arg0) == MULT_EXPR
10047 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10048 {
10049 wide_int warg1 = arg1;
10050 wide_int masked = mask_with_tz (type, warg1, TREE_OPERAND (arg0, 1));
10051
10052 if (masked == 0)
10053 return omit_two_operands_loc (loc, type, build_zero_cst (type),
10054 arg0, arg1);
10055 else if (masked != warg1)
10056 {
10057 /* Avoid the transform if arg1 is a mask of some
10058 mode which allows further optimizations. */
10059 int pop = wi::popcount (warg1);
10060 if (!(pop >= BITS_PER_UNIT
10061 && exact_log2 (pop) != -1
10062 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
10063 return fold_build2_loc (loc, code, type, op0,
10064 wide_int_to_tree (type, masked));
10065 }
10066 }
10067
10068 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
10069 ((A & N) + B) & M -> (A + B) & M
10070 Similarly if (N & M) == 0,
10071 ((A | N) + B) & M -> (A + B) & M
10072 and for - instead of + (or unary - instead of +)
10073 and/or ^ instead of |.
10074 If B is constant and (B & M) == 0, fold into A & M. */
10075 if (TREE_CODE (arg1) == INTEGER_CST)
10076 {
10077 wide_int cst1 = arg1;
10078 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
10079 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10080 && (TREE_CODE (arg0) == PLUS_EXPR
10081 || TREE_CODE (arg0) == MINUS_EXPR
10082 || TREE_CODE (arg0) == NEGATE_EXPR)
10083 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
10084 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
10085 {
10086 tree pmop[2];
10087 int which = 0;
10088 wide_int cst0;
10089
10090 /* Now we know that arg0 is (C + D) or (C - D) or
10091 -C and arg1 (M) is == (1LL << cst) - 1.
10092 Store C into PMOP[0] and D into PMOP[1]. */
10093 pmop[0] = TREE_OPERAND (arg0, 0);
10094 pmop[1] = NULL;
10095 if (TREE_CODE (arg0) != NEGATE_EXPR)
10096 {
10097 pmop[1] = TREE_OPERAND (arg0, 1);
10098 which = 1;
10099 }
10100
10101 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
10102 which = -1;
10103
10104 for (; which >= 0; which--)
10105 switch (TREE_CODE (pmop[which]))
10106 {
10107 case BIT_AND_EXPR:
10108 case BIT_IOR_EXPR:
10109 case BIT_XOR_EXPR:
10110 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
10111 != INTEGER_CST)
10112 break;
10113 cst0 = TREE_OPERAND (pmop[which], 1);
10114 cst0 &= cst1;
10115 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
10116 {
10117 if (cst0 != cst1)
10118 break;
10119 }
10120 else if (cst0 != 0)
10121 break;
10122 /* If C or D is of the form (A & N) where
10123 (N & M) == M, or of the form (A | N) or
10124 (A ^ N) where (N & M) == 0, replace it with A. */
10125 pmop[which] = TREE_OPERAND (pmop[which], 0);
10126 break;
10127 case INTEGER_CST:
10128 /* If C or D is a N where (N & M) == 0, it can be
10129 omitted (assumed 0). */
10130 if ((TREE_CODE (arg0) == PLUS_EXPR
10131 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
10132 && (cst1 & pmop[which]) == 0)
10133 pmop[which] = NULL;
10134 break;
10135 default:
10136 break;
10137 }
10138
10139 /* Only build anything new if we optimized one or both arguments
10140 above. */
10141 if (pmop[0] != TREE_OPERAND (arg0, 0)
10142 || (TREE_CODE (arg0) != NEGATE_EXPR
10143 && pmop[1] != TREE_OPERAND (arg0, 1)))
10144 {
10145 tree utype = TREE_TYPE (arg0);
10146 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10147 {
10148 /* Perform the operations in a type that has defined
10149 overflow behavior. */
10150 utype = unsigned_type_for (TREE_TYPE (arg0));
10151 if (pmop[0] != NULL)
10152 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
10153 if (pmop[1] != NULL)
10154 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
10155 }
10156
10157 if (TREE_CODE (arg0) == NEGATE_EXPR)
10158 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
10159 else if (TREE_CODE (arg0) == PLUS_EXPR)
10160 {
10161 if (pmop[0] != NULL && pmop[1] != NULL)
10162 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
10163 pmop[0], pmop[1]);
10164 else if (pmop[0] != NULL)
10165 tem = pmop[0];
10166 else if (pmop[1] != NULL)
10167 tem = pmop[1];
10168 else
10169 return build_int_cst (type, 0);
10170 }
10171 else if (pmop[0] == NULL)
10172 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
10173 else
10174 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
10175 pmop[0], pmop[1]);
10176 /* TEM is now the new binary +, - or unary - replacement. */
10177 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
10178 fold_convert_loc (loc, utype, arg1));
10179 return fold_convert_loc (loc, type, tem);
10180 }
10181 }
10182 }
10183
10184 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10185 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10186 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10187 {
10188 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10189
10190 wide_int mask = wide_int::from (arg1, prec, UNSIGNED);
10191 if (mask == -1)
10192 return
10193 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10194 }
10195
10196 goto associate;
10197
10198 case RDIV_EXPR:
10199 /* Don't touch a floating-point divide by zero unless the mode
10200 of the constant can represent infinity. */
10201 if (TREE_CODE (arg1) == REAL_CST
10202 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10203 && real_zerop (arg1))
10204 return NULL_TREE;
10205
10206 /* (-A) / (-B) -> A / B */
10207 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10208 return fold_build2_loc (loc, RDIV_EXPR, type,
10209 TREE_OPERAND (arg0, 0),
10210 negate_expr (arg1));
10211 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10212 return fold_build2_loc (loc, RDIV_EXPR, type,
10213 negate_expr (arg0),
10214 TREE_OPERAND (arg1, 0));
10215 return NULL_TREE;
10216
10217 case TRUNC_DIV_EXPR:
10218 /* Fall through */
10219
10220 case FLOOR_DIV_EXPR:
10221 /* Simplify A / (B << N) where A and B are positive and B is
10222 a power of 2, to A >> (N + log2(B)). */
10223 strict_overflow_p = false;
10224 if (TREE_CODE (arg1) == LSHIFT_EXPR
10225 && (TYPE_UNSIGNED (type)
10226 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
10227 {
10228 tree sval = TREE_OPERAND (arg1, 0);
10229 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10230 {
10231 tree sh_cnt = TREE_OPERAND (arg1, 1);
10232 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
10233 wi::exact_log2 (sval));
10234
10235 if (strict_overflow_p)
10236 fold_overflow_warning (("assuming signed overflow does not "
10237 "occur when simplifying A / (B << N)"),
10238 WARN_STRICT_OVERFLOW_MISC);
10239
10240 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
10241 sh_cnt, pow2);
10242 return fold_build2_loc (loc, RSHIFT_EXPR, type,
10243 fold_convert_loc (loc, type, arg0), sh_cnt);
10244 }
10245 }
10246
10247 /* Fall through */
10248
10249 case ROUND_DIV_EXPR:
10250 case CEIL_DIV_EXPR:
10251 case EXACT_DIV_EXPR:
10252 if (integer_zerop (arg1))
10253 return NULL_TREE;
10254
10255 /* Convert -A / -B to A / B when the type is signed and overflow is
10256 undefined. */
10257 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10258 && TREE_CODE (arg0) == NEGATE_EXPR
10259 && negate_expr_p (op1))
10260 {
10261 if (INTEGRAL_TYPE_P (type))
10262 fold_overflow_warning (("assuming signed overflow does not occur "
10263 "when distributing negation across "
10264 "division"),
10265 WARN_STRICT_OVERFLOW_MISC);
10266 return fold_build2_loc (loc, code, type,
10267 fold_convert_loc (loc, type,
10268 TREE_OPERAND (arg0, 0)),
10269 negate_expr (op1));
10270 }
10271 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10272 && TREE_CODE (arg1) == NEGATE_EXPR
10273 && negate_expr_p (op0))
10274 {
10275 if (INTEGRAL_TYPE_P (type))
10276 fold_overflow_warning (("assuming signed overflow does not occur "
10277 "when distributing negation across "
10278 "division"),
10279 WARN_STRICT_OVERFLOW_MISC);
10280 return fold_build2_loc (loc, code, type,
10281 negate_expr (op0),
10282 fold_convert_loc (loc, type,
10283 TREE_OPERAND (arg1, 0)));
10284 }
10285
10286 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10287 operation, EXACT_DIV_EXPR.
10288
10289 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10290 At one time others generated faster code, it's not clear if they do
10291 after the last round to changes to the DIV code in expmed.c. */
10292 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10293 && multiple_of_p (type, arg0, arg1))
10294 return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
10295 fold_convert (type, arg0),
10296 fold_convert (type, arg1));
10297
10298 strict_overflow_p = false;
10299 if (TREE_CODE (arg1) == INTEGER_CST
10300 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10301 &strict_overflow_p)))
10302 {
10303 if (strict_overflow_p)
10304 fold_overflow_warning (("assuming signed overflow does not occur "
10305 "when simplifying division"),
10306 WARN_STRICT_OVERFLOW_MISC);
10307 return fold_convert_loc (loc, type, tem);
10308 }
10309
10310 return NULL_TREE;
10311
10312 case CEIL_MOD_EXPR:
10313 case FLOOR_MOD_EXPR:
10314 case ROUND_MOD_EXPR:
10315 case TRUNC_MOD_EXPR:
10316 strict_overflow_p = false;
10317 if (TREE_CODE (arg1) == INTEGER_CST
10318 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10319 &strict_overflow_p)))
10320 {
10321 if (strict_overflow_p)
10322 fold_overflow_warning (("assuming signed overflow does not occur "
10323 "when simplifying modulus"),
10324 WARN_STRICT_OVERFLOW_MISC);
10325 return fold_convert_loc (loc, type, tem);
10326 }
10327
10328 return NULL_TREE;
10329
10330 case LROTATE_EXPR:
10331 case RROTATE_EXPR:
10332 case RSHIFT_EXPR:
10333 case LSHIFT_EXPR:
10334 /* Since negative shift count is not well-defined,
10335 don't try to compute it in the compiler. */
10336 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10337 return NULL_TREE;
10338
10339 prec = element_precision (type);
10340
10341 /* If we have a rotate of a bit operation with the rotate count and
10342 the second operand of the bit operation both constant,
10343 permute the two operations. */
10344 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10345 && (TREE_CODE (arg0) == BIT_AND_EXPR
10346 || TREE_CODE (arg0) == BIT_IOR_EXPR
10347 || TREE_CODE (arg0) == BIT_XOR_EXPR)
10348 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10349 return fold_build2_loc (loc, TREE_CODE (arg0), type,
10350 fold_build2_loc (loc, code, type,
10351 TREE_OPERAND (arg0, 0), arg1),
10352 fold_build2_loc (loc, code, type,
10353 TREE_OPERAND (arg0, 1), arg1));
10354
10355 /* Two consecutive rotates adding up to the some integer
10356 multiple of the precision of the type can be ignored. */
10357 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10358 && TREE_CODE (arg0) == RROTATE_EXPR
10359 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10360 && wi::umod_trunc (wi::add (arg1, TREE_OPERAND (arg0, 1)),
10361 prec) == 0)
10362 return TREE_OPERAND (arg0, 0);
10363
10364 return NULL_TREE;
10365
10366 case MIN_EXPR:
10367 case MAX_EXPR:
10368 goto associate;
10369
10370 case TRUTH_ANDIF_EXPR:
10371 /* Note that the operands of this must be ints
10372 and their values must be 0 or 1.
10373 ("true" is a fixed value perhaps depending on the language.) */
10374 /* If first arg is constant zero, return it. */
10375 if (integer_zerop (arg0))
10376 return fold_convert_loc (loc, type, arg0);
10377 case TRUTH_AND_EXPR:
10378 /* If either arg is constant true, drop it. */
10379 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10380 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10381 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10382 /* Preserve sequence points. */
10383 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10384 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10385 /* If second arg is constant zero, result is zero, but first arg
10386 must be evaluated. */
10387 if (integer_zerop (arg1))
10388 return omit_one_operand_loc (loc, type, arg1, arg0);
10389 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10390 case will be handled here. */
10391 if (integer_zerop (arg0))
10392 return omit_one_operand_loc (loc, type, arg0, arg1);
10393
10394 /* !X && X is always false. */
10395 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10396 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10397 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
10398 /* X && !X is always false. */
10399 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10400 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10401 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10402
10403 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10404 means A >= Y && A != MAX, but in this case we know that
10405 A < X <= MAX. */
10406
10407 if (!TREE_SIDE_EFFECTS (arg0)
10408 && !TREE_SIDE_EFFECTS (arg1))
10409 {
10410 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
10411 if (tem && !operand_equal_p (tem, arg0, 0))
10412 return fold_build2_loc (loc, code, type, tem, arg1);
10413
10414 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
10415 if (tem && !operand_equal_p (tem, arg1, 0))
10416 return fold_build2_loc (loc, code, type, arg0, tem);
10417 }
10418
10419 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10420 != NULL_TREE)
10421 return tem;
10422
10423 return NULL_TREE;
10424
10425 case TRUTH_ORIF_EXPR:
10426 /* Note that the operands of this must be ints
10427 and their values must be 0 or true.
10428 ("true" is a fixed value perhaps depending on the language.) */
10429 /* If first arg is constant true, return it. */
10430 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10431 return fold_convert_loc (loc, type, arg0);
10432 case TRUTH_OR_EXPR:
10433 /* If either arg is constant zero, drop it. */
10434 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10435 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10436 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10437 /* Preserve sequence points. */
10438 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10439 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10440 /* If second arg is constant true, result is true, but we must
10441 evaluate first arg. */
10442 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10443 return omit_one_operand_loc (loc, type, arg1, arg0);
10444 /* Likewise for first arg, but note this only occurs here for
10445 TRUTH_OR_EXPR. */
10446 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10447 return omit_one_operand_loc (loc, type, arg0, arg1);
10448
10449 /* !X || X is always true. */
10450 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10451 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10452 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10453 /* X || !X is always true. */
10454 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10455 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10456 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10457
10458 /* (X && !Y) || (!X && Y) is X ^ Y */
10459 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
10460 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
10461 {
10462 tree a0, a1, l0, l1, n0, n1;
10463
10464 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10465 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10466
10467 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10468 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10469
10470 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
10471 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
10472
10473 if ((operand_equal_p (n0, a0, 0)
10474 && operand_equal_p (n1, a1, 0))
10475 || (operand_equal_p (n0, a1, 0)
10476 && operand_equal_p (n1, a0, 0)))
10477 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
10478 }
10479
10480 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10481 != NULL_TREE)
10482 return tem;
10483
10484 return NULL_TREE;
10485
10486 case TRUTH_XOR_EXPR:
10487 /* If the second arg is constant zero, drop it. */
10488 if (integer_zerop (arg1))
10489 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10490 /* If the second arg is constant true, this is a logical inversion. */
10491 if (integer_onep (arg1))
10492 {
10493 tem = invert_truthvalue_loc (loc, arg0);
10494 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
10495 }
10496 /* Identical arguments cancel to zero. */
10497 if (operand_equal_p (arg0, arg1, 0))
10498 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10499
10500 /* !X ^ X is always true. */
10501 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10502 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10503 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10504
10505 /* X ^ !X is always true. */
10506 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10507 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10508 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10509
10510 return NULL_TREE;
10511
10512 case EQ_EXPR:
10513 case NE_EXPR:
10514 STRIP_NOPS (arg0);
10515 STRIP_NOPS (arg1);
10516
10517 tem = fold_comparison (loc, code, type, op0, op1);
10518 if (tem != NULL_TREE)
10519 return tem;
10520
10521 /* bool_var != 1 becomes !bool_var. */
10522 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10523 && code == NE_EXPR)
10524 return fold_convert_loc (loc, type,
10525 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10526 TREE_TYPE (arg0), arg0));
10527
10528 /* bool_var == 0 becomes !bool_var. */
10529 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10530 && code == EQ_EXPR)
10531 return fold_convert_loc (loc, type,
10532 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10533 TREE_TYPE (arg0), arg0));
10534
10535 /* !exp != 0 becomes !exp */
10536 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
10537 && code == NE_EXPR)
10538 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10539
10540 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
10541 if ((TREE_CODE (arg0) == PLUS_EXPR
10542 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
10543 || TREE_CODE (arg0) == MINUS_EXPR)
10544 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
10545 0)),
10546 arg1, 0)
10547 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10548 || POINTER_TYPE_P (TREE_TYPE (arg0))))
10549 {
10550 tree val = TREE_OPERAND (arg0, 1);
10551 return omit_two_operands_loc (loc, type,
10552 fold_build2_loc (loc, code, type,
10553 val,
10554 build_int_cst (TREE_TYPE (val),
10555 0)),
10556 TREE_OPERAND (arg0, 0), arg1);
10557 }
10558
10559 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
10560 if (TREE_CODE (arg0) == MINUS_EXPR
10561 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
10562 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
10563 1)),
10564 arg1, 0)
10565 && wi::extract_uhwi (TREE_OPERAND (arg0, 0), 0, 1) == 1)
10566 {
10567 return omit_two_operands_loc (loc, type,
10568 code == NE_EXPR
10569 ? boolean_true_node : boolean_false_node,
10570 TREE_OPERAND (arg0, 1), arg1);
10571 }
10572
10573 /* If this is an EQ or NE comparison with zero and ARG0 is
10574 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10575 two operations, but the latter can be done in one less insn
10576 on machines that have only two-operand insns or on which a
10577 constant cannot be the first operand. */
10578 if (TREE_CODE (arg0) == BIT_AND_EXPR
10579 && integer_zerop (arg1))
10580 {
10581 tree arg00 = TREE_OPERAND (arg0, 0);
10582 tree arg01 = TREE_OPERAND (arg0, 1);
10583 if (TREE_CODE (arg00) == LSHIFT_EXPR
10584 && integer_onep (TREE_OPERAND (arg00, 0)))
10585 {
10586 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
10587 arg01, TREE_OPERAND (arg00, 1));
10588 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10589 build_int_cst (TREE_TYPE (arg0), 1));
10590 return fold_build2_loc (loc, code, type,
10591 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10592 arg1);
10593 }
10594 else if (TREE_CODE (arg01) == LSHIFT_EXPR
10595 && integer_onep (TREE_OPERAND (arg01, 0)))
10596 {
10597 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
10598 arg00, TREE_OPERAND (arg01, 1));
10599 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10600 build_int_cst (TREE_TYPE (arg0), 1));
10601 return fold_build2_loc (loc, code, type,
10602 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10603 arg1);
10604 }
10605 }
10606
10607 /* If this is an NE or EQ comparison of zero against the result of a
10608 signed MOD operation whose second operand is a power of 2, make
10609 the MOD operation unsigned since it is simpler and equivalent. */
10610 if (integer_zerop (arg1)
10611 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10612 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10613 || TREE_CODE (arg0) == CEIL_MOD_EXPR
10614 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10615 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10616 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10617 {
10618 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
10619 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
10620 fold_convert_loc (loc, newtype,
10621 TREE_OPERAND (arg0, 0)),
10622 fold_convert_loc (loc, newtype,
10623 TREE_OPERAND (arg0, 1)));
10624
10625 return fold_build2_loc (loc, code, type, newmod,
10626 fold_convert_loc (loc, newtype, arg1));
10627 }
10628
10629 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10630 C1 is a valid shift constant, and C2 is a power of two, i.e.
10631 a single bit. */
10632 if (TREE_CODE (arg0) == BIT_AND_EXPR
10633 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10634 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10635 == INTEGER_CST
10636 && integer_pow2p (TREE_OPERAND (arg0, 1))
10637 && integer_zerop (arg1))
10638 {
10639 tree itype = TREE_TYPE (arg0);
10640 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10641 prec = TYPE_PRECISION (itype);
10642
10643 /* Check for a valid shift count. */
10644 if (wi::ltu_p (arg001, prec))
10645 {
10646 tree arg01 = TREE_OPERAND (arg0, 1);
10647 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10648 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10649 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10650 can be rewritten as (X & (C2 << C1)) != 0. */
10651 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
10652 {
10653 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
10654 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
10655 return fold_build2_loc (loc, code, type, tem,
10656 fold_convert_loc (loc, itype, arg1));
10657 }
10658 /* Otherwise, for signed (arithmetic) shifts,
10659 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10660 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10661 else if (!TYPE_UNSIGNED (itype))
10662 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10663 arg000, build_int_cst (itype, 0));
10664 /* Otherwise, of unsigned (logical) shifts,
10665 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10666 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10667 else
10668 return omit_one_operand_loc (loc, type,
10669 code == EQ_EXPR ? integer_one_node
10670 : integer_zero_node,
10671 arg000);
10672 }
10673 }
10674
10675 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
10676 Similarly for NE_EXPR. */
10677 if (TREE_CODE (arg0) == BIT_AND_EXPR
10678 && TREE_CODE (arg1) == INTEGER_CST
10679 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10680 {
10681 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
10682 TREE_TYPE (TREE_OPERAND (arg0, 1)),
10683 TREE_OPERAND (arg0, 1));
10684 tree dandnotc
10685 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
10686 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
10687 notc);
10688 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10689 if (integer_nonzerop (dandnotc))
10690 return omit_one_operand_loc (loc, type, rslt, arg0);
10691 }
10692
10693 /* If this is a comparison of a field, we may be able to simplify it. */
10694 if ((TREE_CODE (arg0) == COMPONENT_REF
10695 || TREE_CODE (arg0) == BIT_FIELD_REF)
10696 /* Handle the constant case even without -O
10697 to make sure the warnings are given. */
10698 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
10699 {
10700 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
10701 if (t1)
10702 return t1;
10703 }
10704
10705 /* Optimize comparisons of strlen vs zero to a compare of the
10706 first character of the string vs zero. To wit,
10707 strlen(ptr) == 0 => *ptr == 0
10708 strlen(ptr) != 0 => *ptr != 0
10709 Other cases should reduce to one of these two (or a constant)
10710 due to the return value of strlen being unsigned. */
10711 if (TREE_CODE (arg0) == CALL_EXPR
10712 && integer_zerop (arg1))
10713 {
10714 tree fndecl = get_callee_fndecl (arg0);
10715
10716 if (fndecl
10717 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
10718 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
10719 && call_expr_nargs (arg0) == 1
10720 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
10721 {
10722 tree iref = build_fold_indirect_ref_loc (loc,
10723 CALL_EXPR_ARG (arg0, 0));
10724 return fold_build2_loc (loc, code, type, iref,
10725 build_int_cst (TREE_TYPE (iref), 0));
10726 }
10727 }
10728
10729 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10730 of X. Similarly fold (X >> C) == 0 into X >= 0. */
10731 if (TREE_CODE (arg0) == RSHIFT_EXPR
10732 && integer_zerop (arg1)
10733 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10734 {
10735 tree arg00 = TREE_OPERAND (arg0, 0);
10736 tree arg01 = TREE_OPERAND (arg0, 1);
10737 tree itype = TREE_TYPE (arg00);
10738 if (wi::eq_p (arg01, element_precision (itype) - 1))
10739 {
10740 if (TYPE_UNSIGNED (itype))
10741 {
10742 itype = signed_type_for (itype);
10743 arg00 = fold_convert_loc (loc, itype, arg00);
10744 }
10745 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
10746 type, arg00, build_zero_cst (itype));
10747 }
10748 }
10749
10750 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
10751 (X & C) == 0 when C is a single bit. */
10752 if (TREE_CODE (arg0) == BIT_AND_EXPR
10753 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
10754 && integer_zerop (arg1)
10755 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10756 {
10757 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
10758 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
10759 TREE_OPERAND (arg0, 1));
10760 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
10761 type, tem,
10762 fold_convert_loc (loc, TREE_TYPE (arg0),
10763 arg1));
10764 }
10765
10766 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
10767 constant C is a power of two, i.e. a single bit. */
10768 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10769 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10770 && integer_zerop (arg1)
10771 && integer_pow2p (TREE_OPERAND (arg0, 1))
10772 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10773 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10774 {
10775 tree arg00 = TREE_OPERAND (arg0, 0);
10776 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10777 arg00, build_int_cst (TREE_TYPE (arg00), 0));
10778 }
10779
10780 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
10781 when is C is a power of two, i.e. a single bit. */
10782 if (TREE_CODE (arg0) == BIT_AND_EXPR
10783 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
10784 && integer_zerop (arg1)
10785 && integer_pow2p (TREE_OPERAND (arg0, 1))
10786 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10787 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10788 {
10789 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10790 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
10791 arg000, TREE_OPERAND (arg0, 1));
10792 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10793 tem, build_int_cst (TREE_TYPE (tem), 0));
10794 }
10795
10796 if (integer_zerop (arg1)
10797 && tree_expr_nonzero_p (arg0))
10798 {
10799 tree res = constant_boolean_node (code==NE_EXPR, type);
10800 return omit_one_operand_loc (loc, type, res, arg0);
10801 }
10802
10803 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
10804 if (TREE_CODE (arg0) == BIT_AND_EXPR
10805 && TREE_CODE (arg1) == BIT_AND_EXPR)
10806 {
10807 tree arg00 = TREE_OPERAND (arg0, 0);
10808 tree arg01 = TREE_OPERAND (arg0, 1);
10809 tree arg10 = TREE_OPERAND (arg1, 0);
10810 tree arg11 = TREE_OPERAND (arg1, 1);
10811 tree itype = TREE_TYPE (arg0);
10812
10813 if (operand_equal_p (arg01, arg11, 0))
10814 return fold_build2_loc (loc, code, type,
10815 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10816 fold_build2_loc (loc,
10817 BIT_XOR_EXPR, itype,
10818 arg00, arg10),
10819 arg01),
10820 build_zero_cst (itype));
10821
10822 if (operand_equal_p (arg01, arg10, 0))
10823 return fold_build2_loc (loc, code, type,
10824 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10825 fold_build2_loc (loc,
10826 BIT_XOR_EXPR, itype,
10827 arg00, arg11),
10828 arg01),
10829 build_zero_cst (itype));
10830
10831 if (operand_equal_p (arg00, arg11, 0))
10832 return fold_build2_loc (loc, code, type,
10833 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10834 fold_build2_loc (loc,
10835 BIT_XOR_EXPR, itype,
10836 arg01, arg10),
10837 arg00),
10838 build_zero_cst (itype));
10839
10840 if (operand_equal_p (arg00, arg10, 0))
10841 return fold_build2_loc (loc, code, type,
10842 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10843 fold_build2_loc (loc,
10844 BIT_XOR_EXPR, itype,
10845 arg01, arg11),
10846 arg00),
10847 build_zero_cst (itype));
10848 }
10849
10850 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10851 && TREE_CODE (arg1) == BIT_XOR_EXPR)
10852 {
10853 tree arg00 = TREE_OPERAND (arg0, 0);
10854 tree arg01 = TREE_OPERAND (arg0, 1);
10855 tree arg10 = TREE_OPERAND (arg1, 0);
10856 tree arg11 = TREE_OPERAND (arg1, 1);
10857 tree itype = TREE_TYPE (arg0);
10858
10859 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
10860 operand_equal_p guarantees no side-effects so we don't need
10861 to use omit_one_operand on Z. */
10862 if (operand_equal_p (arg01, arg11, 0))
10863 return fold_build2_loc (loc, code, type, arg00,
10864 fold_convert_loc (loc, TREE_TYPE (arg00),
10865 arg10));
10866 if (operand_equal_p (arg01, arg10, 0))
10867 return fold_build2_loc (loc, code, type, arg00,
10868 fold_convert_loc (loc, TREE_TYPE (arg00),
10869 arg11));
10870 if (operand_equal_p (arg00, arg11, 0))
10871 return fold_build2_loc (loc, code, type, arg01,
10872 fold_convert_loc (loc, TREE_TYPE (arg01),
10873 arg10));
10874 if (operand_equal_p (arg00, arg10, 0))
10875 return fold_build2_loc (loc, code, type, arg01,
10876 fold_convert_loc (loc, TREE_TYPE (arg01),
10877 arg11));
10878
10879 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
10880 if (TREE_CODE (arg01) == INTEGER_CST
10881 && TREE_CODE (arg11) == INTEGER_CST)
10882 {
10883 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
10884 fold_convert_loc (loc, itype, arg11));
10885 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
10886 return fold_build2_loc (loc, code, type, tem,
10887 fold_convert_loc (loc, itype, arg10));
10888 }
10889 }
10890
10891 /* Attempt to simplify equality/inequality comparisons of complex
10892 values. Only lower the comparison if the result is known or
10893 can be simplified to a single scalar comparison. */
10894 if ((TREE_CODE (arg0) == COMPLEX_EXPR
10895 || TREE_CODE (arg0) == COMPLEX_CST)
10896 && (TREE_CODE (arg1) == COMPLEX_EXPR
10897 || TREE_CODE (arg1) == COMPLEX_CST))
10898 {
10899 tree real0, imag0, real1, imag1;
10900 tree rcond, icond;
10901
10902 if (TREE_CODE (arg0) == COMPLEX_EXPR)
10903 {
10904 real0 = TREE_OPERAND (arg0, 0);
10905 imag0 = TREE_OPERAND (arg0, 1);
10906 }
10907 else
10908 {
10909 real0 = TREE_REALPART (arg0);
10910 imag0 = TREE_IMAGPART (arg0);
10911 }
10912
10913 if (TREE_CODE (arg1) == COMPLEX_EXPR)
10914 {
10915 real1 = TREE_OPERAND (arg1, 0);
10916 imag1 = TREE_OPERAND (arg1, 1);
10917 }
10918 else
10919 {
10920 real1 = TREE_REALPART (arg1);
10921 imag1 = TREE_IMAGPART (arg1);
10922 }
10923
10924 rcond = fold_binary_loc (loc, code, type, real0, real1);
10925 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
10926 {
10927 if (integer_zerop (rcond))
10928 {
10929 if (code == EQ_EXPR)
10930 return omit_two_operands_loc (loc, type, boolean_false_node,
10931 imag0, imag1);
10932 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
10933 }
10934 else
10935 {
10936 if (code == NE_EXPR)
10937 return omit_two_operands_loc (loc, type, boolean_true_node,
10938 imag0, imag1);
10939 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
10940 }
10941 }
10942
10943 icond = fold_binary_loc (loc, code, type, imag0, imag1);
10944 if (icond && TREE_CODE (icond) == INTEGER_CST)
10945 {
10946 if (integer_zerop (icond))
10947 {
10948 if (code == EQ_EXPR)
10949 return omit_two_operands_loc (loc, type, boolean_false_node,
10950 real0, real1);
10951 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
10952 }
10953 else
10954 {
10955 if (code == NE_EXPR)
10956 return omit_two_operands_loc (loc, type, boolean_true_node,
10957 real0, real1);
10958 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
10959 }
10960 }
10961 }
10962
10963 return NULL_TREE;
10964
10965 case LT_EXPR:
10966 case GT_EXPR:
10967 case LE_EXPR:
10968 case GE_EXPR:
10969 tem = fold_comparison (loc, code, type, op0, op1);
10970 if (tem != NULL_TREE)
10971 return tem;
10972
10973 /* Transform comparisons of the form X +- C CMP X. */
10974 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
10975 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10976 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
10977 && !HONOR_SNANS (arg0))
10978 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10979 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
10980 {
10981 tree arg01 = TREE_OPERAND (arg0, 1);
10982 enum tree_code code0 = TREE_CODE (arg0);
10983 int is_positive;
10984
10985 if (TREE_CODE (arg01) == REAL_CST)
10986 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
10987 else
10988 is_positive = tree_int_cst_sgn (arg01);
10989
10990 /* (X - c) > X becomes false. */
10991 if (code == GT_EXPR
10992 && ((code0 == MINUS_EXPR && is_positive >= 0)
10993 || (code0 == PLUS_EXPR && is_positive <= 0)))
10994 {
10995 if (TREE_CODE (arg01) == INTEGER_CST
10996 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
10997 fold_overflow_warning (("assuming signed overflow does not "
10998 "occur when assuming that (X - c) > X "
10999 "is always false"),
11000 WARN_STRICT_OVERFLOW_ALL);
11001 return constant_boolean_node (0, type);
11002 }
11003
11004 /* Likewise (X + c) < X becomes false. */
11005 if (code == LT_EXPR
11006 && ((code0 == PLUS_EXPR && is_positive >= 0)
11007 || (code0 == MINUS_EXPR && is_positive <= 0)))
11008 {
11009 if (TREE_CODE (arg01) == INTEGER_CST
11010 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11011 fold_overflow_warning (("assuming signed overflow does not "
11012 "occur when assuming that "
11013 "(X + c) < X is always false"),
11014 WARN_STRICT_OVERFLOW_ALL);
11015 return constant_boolean_node (0, type);
11016 }
11017
11018 /* Convert (X - c) <= X to true. */
11019 if (!HONOR_NANS (arg1)
11020 && code == LE_EXPR
11021 && ((code0 == MINUS_EXPR && is_positive >= 0)
11022 || (code0 == PLUS_EXPR && is_positive <= 0)))
11023 {
11024 if (TREE_CODE (arg01) == INTEGER_CST
11025 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11026 fold_overflow_warning (("assuming signed overflow does not "
11027 "occur when assuming that "
11028 "(X - c) <= X is always true"),
11029 WARN_STRICT_OVERFLOW_ALL);
11030 return constant_boolean_node (1, type);
11031 }
11032
11033 /* Convert (X + c) >= X to true. */
11034 if (!HONOR_NANS (arg1)
11035 && code == GE_EXPR
11036 && ((code0 == PLUS_EXPR && is_positive >= 0)
11037 || (code0 == MINUS_EXPR && is_positive <= 0)))
11038 {
11039 if (TREE_CODE (arg01) == INTEGER_CST
11040 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11041 fold_overflow_warning (("assuming signed overflow does not "
11042 "occur when assuming that "
11043 "(X + c) >= X is always true"),
11044 WARN_STRICT_OVERFLOW_ALL);
11045 return constant_boolean_node (1, type);
11046 }
11047
11048 if (TREE_CODE (arg01) == INTEGER_CST)
11049 {
11050 /* Convert X + c > X and X - c < X to true for integers. */
11051 if (code == GT_EXPR
11052 && ((code0 == PLUS_EXPR && is_positive > 0)
11053 || (code0 == MINUS_EXPR && is_positive < 0)))
11054 {
11055 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11056 fold_overflow_warning (("assuming signed overflow does "
11057 "not occur when assuming that "
11058 "(X + c) > X is always true"),
11059 WARN_STRICT_OVERFLOW_ALL);
11060 return constant_boolean_node (1, type);
11061 }
11062
11063 if (code == LT_EXPR
11064 && ((code0 == MINUS_EXPR && is_positive > 0)
11065 || (code0 == PLUS_EXPR && is_positive < 0)))
11066 {
11067 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11068 fold_overflow_warning (("assuming signed overflow does "
11069 "not occur when assuming that "
11070 "(X - c) < X is always true"),
11071 WARN_STRICT_OVERFLOW_ALL);
11072 return constant_boolean_node (1, type);
11073 }
11074
11075 /* Convert X + c <= X and X - c >= X to false for integers. */
11076 if (code == LE_EXPR
11077 && ((code0 == PLUS_EXPR && is_positive > 0)
11078 || (code0 == MINUS_EXPR && is_positive < 0)))
11079 {
11080 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11081 fold_overflow_warning (("assuming signed overflow does "
11082 "not occur when assuming that "
11083 "(X + c) <= X is always false"),
11084 WARN_STRICT_OVERFLOW_ALL);
11085 return constant_boolean_node (0, type);
11086 }
11087
11088 if (code == GE_EXPR
11089 && ((code0 == MINUS_EXPR && is_positive > 0)
11090 || (code0 == PLUS_EXPR && is_positive < 0)))
11091 {
11092 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11093 fold_overflow_warning (("assuming signed overflow does "
11094 "not occur when assuming that "
11095 "(X - c) >= X is always false"),
11096 WARN_STRICT_OVERFLOW_ALL);
11097 return constant_boolean_node (0, type);
11098 }
11099 }
11100 }
11101
11102 /* If we are comparing an ABS_EXPR with a constant, we can
11103 convert all the cases into explicit comparisons, but they may
11104 well not be faster than doing the ABS and one comparison.
11105 But ABS (X) <= C is a range comparison, which becomes a subtraction
11106 and a comparison, and is probably faster. */
11107 if (code == LE_EXPR
11108 && TREE_CODE (arg1) == INTEGER_CST
11109 && TREE_CODE (arg0) == ABS_EXPR
11110 && ! TREE_SIDE_EFFECTS (arg0)
11111 && (0 != (tem = negate_expr (arg1)))
11112 && TREE_CODE (tem) == INTEGER_CST
11113 && !TREE_OVERFLOW (tem))
11114 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
11115 build2 (GE_EXPR, type,
11116 TREE_OPERAND (arg0, 0), tem),
11117 build2 (LE_EXPR, type,
11118 TREE_OPERAND (arg0, 0), arg1));
11119
11120 /* Convert ABS_EXPR<x> >= 0 to true. */
11121 strict_overflow_p = false;
11122 if (code == GE_EXPR
11123 && (integer_zerop (arg1)
11124 || (! HONOR_NANS (arg0)
11125 && real_zerop (arg1)))
11126 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11127 {
11128 if (strict_overflow_p)
11129 fold_overflow_warning (("assuming signed overflow does not occur "
11130 "when simplifying comparison of "
11131 "absolute value and zero"),
11132 WARN_STRICT_OVERFLOW_CONDITIONAL);
11133 return omit_one_operand_loc (loc, type,
11134 constant_boolean_node (true, type),
11135 arg0);
11136 }
11137
11138 /* Convert ABS_EXPR<x> < 0 to false. */
11139 strict_overflow_p = false;
11140 if (code == LT_EXPR
11141 && (integer_zerop (arg1) || real_zerop (arg1))
11142 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11143 {
11144 if (strict_overflow_p)
11145 fold_overflow_warning (("assuming signed overflow does not occur "
11146 "when simplifying comparison of "
11147 "absolute value and zero"),
11148 WARN_STRICT_OVERFLOW_CONDITIONAL);
11149 return omit_one_operand_loc (loc, type,
11150 constant_boolean_node (false, type),
11151 arg0);
11152 }
11153
11154 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11155 and similarly for >= into !=. */
11156 if ((code == LT_EXPR || code == GE_EXPR)
11157 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11158 && TREE_CODE (arg1) == LSHIFT_EXPR
11159 && integer_onep (TREE_OPERAND (arg1, 0)))
11160 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11161 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11162 TREE_OPERAND (arg1, 1)),
11163 build_zero_cst (TREE_TYPE (arg0)));
11164
11165 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
11166 otherwise Y might be >= # of bits in X's type and thus e.g.
11167 (unsigned char) (1 << Y) for Y 15 might be 0.
11168 If the cast is widening, then 1 << Y should have unsigned type,
11169 otherwise if Y is number of bits in the signed shift type minus 1,
11170 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
11171 31 might be 0xffffffff80000000. */
11172 if ((code == LT_EXPR || code == GE_EXPR)
11173 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11174 && CONVERT_EXPR_P (arg1)
11175 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
11176 && (element_precision (TREE_TYPE (arg1))
11177 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
11178 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
11179 || (element_precision (TREE_TYPE (arg1))
11180 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
11181 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11182 {
11183 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11184 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
11185 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11186 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
11187 build_zero_cst (TREE_TYPE (arg0)));
11188 }
11189
11190 return NULL_TREE;
11191
11192 case UNORDERED_EXPR:
11193 case ORDERED_EXPR:
11194 case UNLT_EXPR:
11195 case UNLE_EXPR:
11196 case UNGT_EXPR:
11197 case UNGE_EXPR:
11198 case UNEQ_EXPR:
11199 case LTGT_EXPR:
11200 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
11201 {
11202 tree targ0 = strip_float_extensions (arg0);
11203 tree targ1 = strip_float_extensions (arg1);
11204 tree newtype = TREE_TYPE (targ0);
11205
11206 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
11207 newtype = TREE_TYPE (targ1);
11208
11209 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
11210 return fold_build2_loc (loc, code, type,
11211 fold_convert_loc (loc, newtype, targ0),
11212 fold_convert_loc (loc, newtype, targ1));
11213 }
11214
11215 return NULL_TREE;
11216
11217 case COMPOUND_EXPR:
11218 /* When pedantic, a compound expression can be neither an lvalue
11219 nor an integer constant expression. */
11220 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11221 return NULL_TREE;
11222 /* Don't let (0, 0) be null pointer constant. */
11223 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11224 : fold_convert_loc (loc, type, arg1);
11225 return pedantic_non_lvalue_loc (loc, tem);
11226
11227 case ASSERT_EXPR:
11228 /* An ASSERT_EXPR should never be passed to fold_binary. */
11229 gcc_unreachable ();
11230
11231 default:
11232 return NULL_TREE;
11233 } /* switch (code) */
11234 }
11235
11236 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
11237 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
11238 of GOTO_EXPR. */
11239
11240 static tree
11241 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
11242 {
11243 switch (TREE_CODE (*tp))
11244 {
11245 case LABEL_EXPR:
11246 return *tp;
11247
11248 case GOTO_EXPR:
11249 *walk_subtrees = 0;
11250
11251 /* ... fall through ... */
11252
11253 default:
11254 return NULL_TREE;
11255 }
11256 }
11257
11258 /* Return whether the sub-tree ST contains a label which is accessible from
11259 outside the sub-tree. */
11260
11261 static bool
11262 contains_label_p (tree st)
11263 {
11264 return
11265 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
11266 }
11267
11268 /* Fold a ternary expression of code CODE and type TYPE with operands
11269 OP0, OP1, and OP2. Return the folded expression if folding is
11270 successful. Otherwise, return NULL_TREE. */
11271
11272 tree
11273 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
11274 tree op0, tree op1, tree op2)
11275 {
11276 tree tem;
11277 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
11278 enum tree_code_class kind = TREE_CODE_CLASS (code);
11279
11280 gcc_assert (IS_EXPR_CODE_CLASS (kind)
11281 && TREE_CODE_LENGTH (code) == 3);
11282
11283 /* If this is a commutative operation, and OP0 is a constant, move it
11284 to OP1 to reduce the number of tests below. */
11285 if (commutative_ternary_tree_code (code)
11286 && tree_swap_operands_p (op0, op1, true))
11287 return fold_build3_loc (loc, code, type, op1, op0, op2);
11288
11289 tem = generic_simplify (loc, code, type, op0, op1, op2);
11290 if (tem)
11291 return tem;
11292
11293 /* Strip any conversions that don't change the mode. This is safe
11294 for every expression, except for a comparison expression because
11295 its signedness is derived from its operands. So, in the latter
11296 case, only strip conversions that don't change the signedness.
11297
11298 Note that this is done as an internal manipulation within the
11299 constant folder, in order to find the simplest representation of
11300 the arguments so that their form can be studied. In any cases,
11301 the appropriate type conversions should be put back in the tree
11302 that will get out of the constant folder. */
11303 if (op0)
11304 {
11305 arg0 = op0;
11306 STRIP_NOPS (arg0);
11307 }
11308
11309 if (op1)
11310 {
11311 arg1 = op1;
11312 STRIP_NOPS (arg1);
11313 }
11314
11315 if (op2)
11316 {
11317 arg2 = op2;
11318 STRIP_NOPS (arg2);
11319 }
11320
11321 switch (code)
11322 {
11323 case COMPONENT_REF:
11324 if (TREE_CODE (arg0) == CONSTRUCTOR
11325 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11326 {
11327 unsigned HOST_WIDE_INT idx;
11328 tree field, value;
11329 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11330 if (field == arg1)
11331 return value;
11332 }
11333 return NULL_TREE;
11334
11335 case COND_EXPR:
11336 case VEC_COND_EXPR:
11337 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11338 so all simple results must be passed through pedantic_non_lvalue. */
11339 if (TREE_CODE (arg0) == INTEGER_CST)
11340 {
11341 tree unused_op = integer_zerop (arg0) ? op1 : op2;
11342 tem = integer_zerop (arg0) ? op2 : op1;
11343 /* Only optimize constant conditions when the selected branch
11344 has the same type as the COND_EXPR. This avoids optimizing
11345 away "c ? x : throw", where the throw has a void type.
11346 Avoid throwing away that operand which contains label. */
11347 if ((!TREE_SIDE_EFFECTS (unused_op)
11348 || !contains_label_p (unused_op))
11349 && (! VOID_TYPE_P (TREE_TYPE (tem))
11350 || VOID_TYPE_P (type)))
11351 return pedantic_non_lvalue_loc (loc, tem);
11352 return NULL_TREE;
11353 }
11354 else if (TREE_CODE (arg0) == VECTOR_CST)
11355 {
11356 if ((TREE_CODE (arg1) == VECTOR_CST
11357 || TREE_CODE (arg1) == CONSTRUCTOR)
11358 && (TREE_CODE (arg2) == VECTOR_CST
11359 || TREE_CODE (arg2) == CONSTRUCTOR))
11360 {
11361 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
11362 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
11363 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
11364 for (i = 0; i < nelts; i++)
11365 {
11366 tree val = VECTOR_CST_ELT (arg0, i);
11367 if (integer_all_onesp (val))
11368 sel[i] = i;
11369 else if (integer_zerop (val))
11370 sel[i] = nelts + i;
11371 else /* Currently unreachable. */
11372 return NULL_TREE;
11373 }
11374 tree t = fold_vec_perm (type, arg1, arg2, sel);
11375 if (t != NULL_TREE)
11376 return t;
11377 }
11378 }
11379
11380 /* If we have A op B ? A : C, we may be able to convert this to a
11381 simpler expression, depending on the operation and the values
11382 of B and C. Signed zeros prevent all of these transformations,
11383 for reasons given above each one.
11384
11385 Also try swapping the arguments and inverting the conditional. */
11386 if (COMPARISON_CLASS_P (arg0)
11387 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11388 arg1, TREE_OPERAND (arg0, 1))
11389 && !HONOR_SIGNED_ZEROS (element_mode (arg1)))
11390 {
11391 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
11392 if (tem)
11393 return tem;
11394 }
11395
11396 if (COMPARISON_CLASS_P (arg0)
11397 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11398 op2,
11399 TREE_OPERAND (arg0, 1))
11400 && !HONOR_SIGNED_ZEROS (element_mode (op2)))
11401 {
11402 location_t loc0 = expr_location_or (arg0, loc);
11403 tem = fold_invert_truthvalue (loc0, arg0);
11404 if (tem && COMPARISON_CLASS_P (tem))
11405 {
11406 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
11407 if (tem)
11408 return tem;
11409 }
11410 }
11411
11412 /* If the second operand is simpler than the third, swap them
11413 since that produces better jump optimization results. */
11414 if (truth_value_p (TREE_CODE (arg0))
11415 && tree_swap_operands_p (op1, op2, false))
11416 {
11417 location_t loc0 = expr_location_or (arg0, loc);
11418 /* See if this can be inverted. If it can't, possibly because
11419 it was a floating-point inequality comparison, don't do
11420 anything. */
11421 tem = fold_invert_truthvalue (loc0, arg0);
11422 if (tem)
11423 return fold_build3_loc (loc, code, type, tem, op2, op1);
11424 }
11425
11426 /* Convert A ? 1 : 0 to simply A. */
11427 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
11428 : (integer_onep (op1)
11429 && !VECTOR_TYPE_P (type)))
11430 && integer_zerop (op2)
11431 /* If we try to convert OP0 to our type, the
11432 call to fold will try to move the conversion inside
11433 a COND, which will recurse. In that case, the COND_EXPR
11434 is probably the best choice, so leave it alone. */
11435 && type == TREE_TYPE (arg0))
11436 return pedantic_non_lvalue_loc (loc, arg0);
11437
11438 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11439 over COND_EXPR in cases such as floating point comparisons. */
11440 if (integer_zerop (op1)
11441 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
11442 : (integer_onep (op2)
11443 && !VECTOR_TYPE_P (type)))
11444 && truth_value_p (TREE_CODE (arg0)))
11445 return pedantic_non_lvalue_loc (loc,
11446 fold_convert_loc (loc, type,
11447 invert_truthvalue_loc (loc,
11448 arg0)));
11449
11450 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11451 if (TREE_CODE (arg0) == LT_EXPR
11452 && integer_zerop (TREE_OPERAND (arg0, 1))
11453 && integer_zerop (op2)
11454 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11455 {
11456 /* sign_bit_p looks through both zero and sign extensions,
11457 but for this optimization only sign extensions are
11458 usable. */
11459 tree tem2 = TREE_OPERAND (arg0, 0);
11460 while (tem != tem2)
11461 {
11462 if (TREE_CODE (tem2) != NOP_EXPR
11463 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
11464 {
11465 tem = NULL_TREE;
11466 break;
11467 }
11468 tem2 = TREE_OPERAND (tem2, 0);
11469 }
11470 /* sign_bit_p only checks ARG1 bits within A's precision.
11471 If <sign bit of A> has wider type than A, bits outside
11472 of A's precision in <sign bit of A> need to be checked.
11473 If they are all 0, this optimization needs to be done
11474 in unsigned A's type, if they are all 1 in signed A's type,
11475 otherwise this can't be done. */
11476 if (tem
11477 && TYPE_PRECISION (TREE_TYPE (tem))
11478 < TYPE_PRECISION (TREE_TYPE (arg1))
11479 && TYPE_PRECISION (TREE_TYPE (tem))
11480 < TYPE_PRECISION (type))
11481 {
11482 int inner_width, outer_width;
11483 tree tem_type;
11484
11485 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
11486 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
11487 if (outer_width > TYPE_PRECISION (type))
11488 outer_width = TYPE_PRECISION (type);
11489
11490 wide_int mask = wi::shifted_mask
11491 (inner_width, outer_width - inner_width, false,
11492 TYPE_PRECISION (TREE_TYPE (arg1)));
11493
11494 wide_int common = mask & arg1;
11495 if (common == mask)
11496 {
11497 tem_type = signed_type_for (TREE_TYPE (tem));
11498 tem = fold_convert_loc (loc, tem_type, tem);
11499 }
11500 else if (common == 0)
11501 {
11502 tem_type = unsigned_type_for (TREE_TYPE (tem));
11503 tem = fold_convert_loc (loc, tem_type, tem);
11504 }
11505 else
11506 tem = NULL;
11507 }
11508
11509 if (tem)
11510 return
11511 fold_convert_loc (loc, type,
11512 fold_build2_loc (loc, BIT_AND_EXPR,
11513 TREE_TYPE (tem), tem,
11514 fold_convert_loc (loc,
11515 TREE_TYPE (tem),
11516 arg1)));
11517 }
11518
11519 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11520 already handled above. */
11521 if (TREE_CODE (arg0) == BIT_AND_EXPR
11522 && integer_onep (TREE_OPERAND (arg0, 1))
11523 && integer_zerop (op2)
11524 && integer_pow2p (arg1))
11525 {
11526 tree tem = TREE_OPERAND (arg0, 0);
11527 STRIP_NOPS (tem);
11528 if (TREE_CODE (tem) == RSHIFT_EXPR
11529 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
11530 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
11531 tree_to_uhwi (TREE_OPERAND (tem, 1)))
11532 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11533 TREE_OPERAND (tem, 0), arg1);
11534 }
11535
11536 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11537 is probably obsolete because the first operand should be a
11538 truth value (that's why we have the two cases above), but let's
11539 leave it in until we can confirm this for all front-ends. */
11540 if (integer_zerop (op2)
11541 && TREE_CODE (arg0) == NE_EXPR
11542 && integer_zerop (TREE_OPERAND (arg0, 1))
11543 && integer_pow2p (arg1)
11544 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11545 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11546 arg1, OEP_ONLY_CONST))
11547 return pedantic_non_lvalue_loc (loc,
11548 fold_convert_loc (loc, type,
11549 TREE_OPERAND (arg0, 0)));
11550
11551 /* Disable the transformations below for vectors, since
11552 fold_binary_op_with_conditional_arg may undo them immediately,
11553 yielding an infinite loop. */
11554 if (code == VEC_COND_EXPR)
11555 return NULL_TREE;
11556
11557 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11558 if (integer_zerop (op2)
11559 && truth_value_p (TREE_CODE (arg0))
11560 && truth_value_p (TREE_CODE (arg1))
11561 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11562 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
11563 : TRUTH_ANDIF_EXPR,
11564 type, fold_convert_loc (loc, type, arg0), arg1);
11565
11566 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11567 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
11568 && truth_value_p (TREE_CODE (arg0))
11569 && truth_value_p (TREE_CODE (arg1))
11570 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11571 {
11572 location_t loc0 = expr_location_or (arg0, loc);
11573 /* Only perform transformation if ARG0 is easily inverted. */
11574 tem = fold_invert_truthvalue (loc0, arg0);
11575 if (tem)
11576 return fold_build2_loc (loc, code == VEC_COND_EXPR
11577 ? BIT_IOR_EXPR
11578 : TRUTH_ORIF_EXPR,
11579 type, fold_convert_loc (loc, type, tem),
11580 arg1);
11581 }
11582
11583 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11584 if (integer_zerop (arg1)
11585 && truth_value_p (TREE_CODE (arg0))
11586 && truth_value_p (TREE_CODE (op2))
11587 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11588 {
11589 location_t loc0 = expr_location_or (arg0, loc);
11590 /* Only perform transformation if ARG0 is easily inverted. */
11591 tem = fold_invert_truthvalue (loc0, arg0);
11592 if (tem)
11593 return fold_build2_loc (loc, code == VEC_COND_EXPR
11594 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
11595 type, fold_convert_loc (loc, type, tem),
11596 op2);
11597 }
11598
11599 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11600 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
11601 && truth_value_p (TREE_CODE (arg0))
11602 && truth_value_p (TREE_CODE (op2))
11603 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11604 return fold_build2_loc (loc, code == VEC_COND_EXPR
11605 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
11606 type, fold_convert_loc (loc, type, arg0), op2);
11607
11608 return NULL_TREE;
11609
11610 case CALL_EXPR:
11611 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
11612 of fold_ternary on them. */
11613 gcc_unreachable ();
11614
11615 case BIT_FIELD_REF:
11616 if ((TREE_CODE (arg0) == VECTOR_CST
11617 || (TREE_CODE (arg0) == CONSTRUCTOR
11618 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
11619 && (type == TREE_TYPE (TREE_TYPE (arg0))
11620 || (TREE_CODE (type) == VECTOR_TYPE
11621 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
11622 {
11623 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
11624 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
11625 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
11626 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
11627
11628 if (n != 0
11629 && (idx % width) == 0
11630 && (n % width) == 0
11631 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
11632 {
11633 idx = idx / width;
11634 n = n / width;
11635
11636 if (TREE_CODE (arg0) == VECTOR_CST)
11637 {
11638 if (n == 1)
11639 return VECTOR_CST_ELT (arg0, idx);
11640
11641 tree *vals = XALLOCAVEC (tree, n);
11642 for (unsigned i = 0; i < n; ++i)
11643 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
11644 return build_vector (type, vals);
11645 }
11646
11647 /* Constructor elements can be subvectors. */
11648 unsigned HOST_WIDE_INT k = 1;
11649 if (CONSTRUCTOR_NELTS (arg0) != 0)
11650 {
11651 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
11652 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
11653 k = TYPE_VECTOR_SUBPARTS (cons_elem);
11654 }
11655
11656 /* We keep an exact subset of the constructor elements. */
11657 if ((idx % k) == 0 && (n % k) == 0)
11658 {
11659 if (CONSTRUCTOR_NELTS (arg0) == 0)
11660 return build_constructor (type, NULL);
11661 idx /= k;
11662 n /= k;
11663 if (n == 1)
11664 {
11665 if (idx < CONSTRUCTOR_NELTS (arg0))
11666 return CONSTRUCTOR_ELT (arg0, idx)->value;
11667 return build_zero_cst (type);
11668 }
11669
11670 vec<constructor_elt, va_gc> *vals;
11671 vec_alloc (vals, n);
11672 for (unsigned i = 0;
11673 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
11674 ++i)
11675 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
11676 CONSTRUCTOR_ELT
11677 (arg0, idx + i)->value);
11678 return build_constructor (type, vals);
11679 }
11680 /* The bitfield references a single constructor element. */
11681 else if (idx + n <= (idx / k + 1) * k)
11682 {
11683 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
11684 return build_zero_cst (type);
11685 else if (n == k)
11686 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
11687 else
11688 return fold_build3_loc (loc, code, type,
11689 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
11690 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
11691 }
11692 }
11693 }
11694
11695 /* A bit-field-ref that referenced the full argument can be stripped. */
11696 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11697 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1)
11698 && integer_zerop (op2))
11699 return fold_convert_loc (loc, type, arg0);
11700
11701 /* On constants we can use native encode/interpret to constant
11702 fold (nearly) all BIT_FIELD_REFs. */
11703 if (CONSTANT_CLASS_P (arg0)
11704 && can_native_interpret_type_p (type)
11705 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0)))
11706 /* This limitation should not be necessary, we just need to
11707 round this up to mode size. */
11708 && tree_to_uhwi (op1) % BITS_PER_UNIT == 0
11709 /* Need bit-shifting of the buffer to relax the following. */
11710 && tree_to_uhwi (op2) % BITS_PER_UNIT == 0)
11711 {
11712 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11713 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
11714 unsigned HOST_WIDE_INT clen;
11715 clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0)));
11716 /* ??? We cannot tell native_encode_expr to start at
11717 some random byte only. So limit us to a reasonable amount
11718 of work. */
11719 if (clen <= 4096)
11720 {
11721 unsigned char *b = XALLOCAVEC (unsigned char, clen);
11722 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
11723 if (len > 0
11724 && len * BITS_PER_UNIT >= bitpos + bitsize)
11725 {
11726 tree v = native_interpret_expr (type,
11727 b + bitpos / BITS_PER_UNIT,
11728 bitsize / BITS_PER_UNIT);
11729 if (v)
11730 return v;
11731 }
11732 }
11733 }
11734
11735 return NULL_TREE;
11736
11737 case FMA_EXPR:
11738 /* For integers we can decompose the FMA if possible. */
11739 if (TREE_CODE (arg0) == INTEGER_CST
11740 && TREE_CODE (arg1) == INTEGER_CST)
11741 return fold_build2_loc (loc, PLUS_EXPR, type,
11742 const_binop (MULT_EXPR, arg0, arg1), arg2);
11743 if (integer_zerop (arg2))
11744 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11745
11746 return fold_fma (loc, type, arg0, arg1, arg2);
11747
11748 case VEC_PERM_EXPR:
11749 if (TREE_CODE (arg2) == VECTOR_CST)
11750 {
11751 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask, mask2;
11752 unsigned char *sel = XALLOCAVEC (unsigned char, 2 * nelts);
11753 unsigned char *sel2 = sel + nelts;
11754 bool need_mask_canon = false;
11755 bool need_mask_canon2 = false;
11756 bool all_in_vec0 = true;
11757 bool all_in_vec1 = true;
11758 bool maybe_identity = true;
11759 bool single_arg = (op0 == op1);
11760 bool changed = false;
11761
11762 mask2 = 2 * nelts - 1;
11763 mask = single_arg ? (nelts - 1) : mask2;
11764 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
11765 for (i = 0; i < nelts; i++)
11766 {
11767 tree val = VECTOR_CST_ELT (arg2, i);
11768 if (TREE_CODE (val) != INTEGER_CST)
11769 return NULL_TREE;
11770
11771 /* Make sure that the perm value is in an acceptable
11772 range. */
11773 wide_int t = val;
11774 need_mask_canon |= wi::gtu_p (t, mask);
11775 need_mask_canon2 |= wi::gtu_p (t, mask2);
11776 sel[i] = t.to_uhwi () & mask;
11777 sel2[i] = t.to_uhwi () & mask2;
11778
11779 if (sel[i] < nelts)
11780 all_in_vec1 = false;
11781 else
11782 all_in_vec0 = false;
11783
11784 if ((sel[i] & (nelts-1)) != i)
11785 maybe_identity = false;
11786 }
11787
11788 if (maybe_identity)
11789 {
11790 if (all_in_vec0)
11791 return op0;
11792 if (all_in_vec1)
11793 return op1;
11794 }
11795
11796 if (all_in_vec0)
11797 op1 = op0;
11798 else if (all_in_vec1)
11799 {
11800 op0 = op1;
11801 for (i = 0; i < nelts; i++)
11802 sel[i] -= nelts;
11803 need_mask_canon = true;
11804 }
11805
11806 if ((TREE_CODE (op0) == VECTOR_CST
11807 || TREE_CODE (op0) == CONSTRUCTOR)
11808 && (TREE_CODE (op1) == VECTOR_CST
11809 || TREE_CODE (op1) == CONSTRUCTOR))
11810 {
11811 tree t = fold_vec_perm (type, op0, op1, sel);
11812 if (t != NULL_TREE)
11813 return t;
11814 }
11815
11816 if (op0 == op1 && !single_arg)
11817 changed = true;
11818
11819 /* Some targets are deficient and fail to expand a single
11820 argument permutation while still allowing an equivalent
11821 2-argument version. */
11822 if (need_mask_canon && arg2 == op2
11823 && !can_vec_perm_p (TYPE_MODE (type), false, sel)
11824 && can_vec_perm_p (TYPE_MODE (type), false, sel2))
11825 {
11826 need_mask_canon = need_mask_canon2;
11827 sel = sel2;
11828 }
11829
11830 if (need_mask_canon && arg2 == op2)
11831 {
11832 tree *tsel = XALLOCAVEC (tree, nelts);
11833 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
11834 for (i = 0; i < nelts; i++)
11835 tsel[i] = build_int_cst (eltype, sel[i]);
11836 op2 = build_vector (TREE_TYPE (arg2), tsel);
11837 changed = true;
11838 }
11839
11840 if (changed)
11841 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
11842 }
11843 return NULL_TREE;
11844
11845 default:
11846 return NULL_TREE;
11847 } /* switch (code) */
11848 }
11849
11850 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
11851 of an array (or vector). */
11852
11853 tree
11854 get_array_ctor_element_at_index (tree ctor, offset_int access_index)
11855 {
11856 tree index_type = NULL_TREE;
11857 offset_int low_bound = 0;
11858
11859 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
11860 {
11861 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
11862 if (domain_type && TYPE_MIN_VALUE (domain_type))
11863 {
11864 /* Static constructors for variably sized objects makes no sense. */
11865 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
11866 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
11867 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
11868 }
11869 }
11870
11871 if (index_type)
11872 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
11873 TYPE_SIGN (index_type));
11874
11875 offset_int index = low_bound - 1;
11876 if (index_type)
11877 index = wi::ext (index, TYPE_PRECISION (index_type),
11878 TYPE_SIGN (index_type));
11879
11880 offset_int max_index;
11881 unsigned HOST_WIDE_INT cnt;
11882 tree cfield, cval;
11883
11884 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
11885 {
11886 /* Array constructor might explicitly set index, or specify a range,
11887 or leave index NULL meaning that it is next index after previous
11888 one. */
11889 if (cfield)
11890 {
11891 if (TREE_CODE (cfield) == INTEGER_CST)
11892 max_index = index = wi::to_offset (cfield);
11893 else
11894 {
11895 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
11896 index = wi::to_offset (TREE_OPERAND (cfield, 0));
11897 max_index = wi::to_offset (TREE_OPERAND (cfield, 1));
11898 }
11899 }
11900 else
11901 {
11902 index += 1;
11903 if (index_type)
11904 index = wi::ext (index, TYPE_PRECISION (index_type),
11905 TYPE_SIGN (index_type));
11906 max_index = index;
11907 }
11908
11909 /* Do we have match? */
11910 if (wi::cmpu (access_index, index) >= 0
11911 && wi::cmpu (access_index, max_index) <= 0)
11912 return cval;
11913 }
11914 return NULL_TREE;
11915 }
11916
11917 /* Perform constant folding and related simplification of EXPR.
11918 The related simplifications include x*1 => x, x*0 => 0, etc.,
11919 and application of the associative law.
11920 NOP_EXPR conversions may be removed freely (as long as we
11921 are careful not to change the type of the overall expression).
11922 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
11923 but we can constant-fold them if they have constant operands. */
11924
11925 #ifdef ENABLE_FOLD_CHECKING
11926 # define fold(x) fold_1 (x)
11927 static tree fold_1 (tree);
11928 static
11929 #endif
11930 tree
11931 fold (tree expr)
11932 {
11933 const tree t = expr;
11934 enum tree_code code = TREE_CODE (t);
11935 enum tree_code_class kind = TREE_CODE_CLASS (code);
11936 tree tem;
11937 location_t loc = EXPR_LOCATION (expr);
11938
11939 /* Return right away if a constant. */
11940 if (kind == tcc_constant)
11941 return t;
11942
11943 /* CALL_EXPR-like objects with variable numbers of operands are
11944 treated specially. */
11945 if (kind == tcc_vl_exp)
11946 {
11947 if (code == CALL_EXPR)
11948 {
11949 tem = fold_call_expr (loc, expr, false);
11950 return tem ? tem : expr;
11951 }
11952 return expr;
11953 }
11954
11955 if (IS_EXPR_CODE_CLASS (kind))
11956 {
11957 tree type = TREE_TYPE (t);
11958 tree op0, op1, op2;
11959
11960 switch (TREE_CODE_LENGTH (code))
11961 {
11962 case 1:
11963 op0 = TREE_OPERAND (t, 0);
11964 tem = fold_unary_loc (loc, code, type, op0);
11965 return tem ? tem : expr;
11966 case 2:
11967 op0 = TREE_OPERAND (t, 0);
11968 op1 = TREE_OPERAND (t, 1);
11969 tem = fold_binary_loc (loc, code, type, op0, op1);
11970 return tem ? tem : expr;
11971 case 3:
11972 op0 = TREE_OPERAND (t, 0);
11973 op1 = TREE_OPERAND (t, 1);
11974 op2 = TREE_OPERAND (t, 2);
11975 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
11976 return tem ? tem : expr;
11977 default:
11978 break;
11979 }
11980 }
11981
11982 switch (code)
11983 {
11984 case ARRAY_REF:
11985 {
11986 tree op0 = TREE_OPERAND (t, 0);
11987 tree op1 = TREE_OPERAND (t, 1);
11988
11989 if (TREE_CODE (op1) == INTEGER_CST
11990 && TREE_CODE (op0) == CONSTRUCTOR
11991 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
11992 {
11993 tree val = get_array_ctor_element_at_index (op0,
11994 wi::to_offset (op1));
11995 if (val)
11996 return val;
11997 }
11998
11999 return t;
12000 }
12001
12002 /* Return a VECTOR_CST if possible. */
12003 case CONSTRUCTOR:
12004 {
12005 tree type = TREE_TYPE (t);
12006 if (TREE_CODE (type) != VECTOR_TYPE)
12007 return t;
12008
12009 unsigned i;
12010 tree val;
12011 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
12012 if (! CONSTANT_CLASS_P (val))
12013 return t;
12014
12015 return build_vector_from_ctor (type, CONSTRUCTOR_ELTS (t));
12016 }
12017
12018 case CONST_DECL:
12019 return fold (DECL_INITIAL (t));
12020
12021 default:
12022 return t;
12023 } /* switch (code) */
12024 }
12025
12026 #ifdef ENABLE_FOLD_CHECKING
12027 #undef fold
12028
12029 static void fold_checksum_tree (const_tree, struct md5_ctx *,
12030 hash_table<nofree_ptr_hash<const tree_node> > *);
12031 static void fold_check_failed (const_tree, const_tree);
12032 void print_fold_checksum (const_tree);
12033
12034 /* When --enable-checking=fold, compute a digest of expr before
12035 and after actual fold call to see if fold did not accidentally
12036 change original expr. */
12037
12038 tree
12039 fold (tree expr)
12040 {
12041 tree ret;
12042 struct md5_ctx ctx;
12043 unsigned char checksum_before[16], checksum_after[16];
12044 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12045
12046 md5_init_ctx (&ctx);
12047 fold_checksum_tree (expr, &ctx, &ht);
12048 md5_finish_ctx (&ctx, checksum_before);
12049 ht.empty ();
12050
12051 ret = fold_1 (expr);
12052
12053 md5_init_ctx (&ctx);
12054 fold_checksum_tree (expr, &ctx, &ht);
12055 md5_finish_ctx (&ctx, checksum_after);
12056
12057 if (memcmp (checksum_before, checksum_after, 16))
12058 fold_check_failed (expr, ret);
12059
12060 return ret;
12061 }
12062
12063 void
12064 print_fold_checksum (const_tree expr)
12065 {
12066 struct md5_ctx ctx;
12067 unsigned char checksum[16], cnt;
12068 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12069
12070 md5_init_ctx (&ctx);
12071 fold_checksum_tree (expr, &ctx, &ht);
12072 md5_finish_ctx (&ctx, checksum);
12073 for (cnt = 0; cnt < 16; ++cnt)
12074 fprintf (stderr, "%02x", checksum[cnt]);
12075 putc ('\n', stderr);
12076 }
12077
12078 static void
12079 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
12080 {
12081 internal_error ("fold check: original tree changed by fold");
12082 }
12083
12084 static void
12085 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
12086 hash_table<nofree_ptr_hash <const tree_node> > *ht)
12087 {
12088 const tree_node **slot;
12089 enum tree_code code;
12090 union tree_node buf;
12091 int i, len;
12092
12093 recursive_label:
12094 if (expr == NULL)
12095 return;
12096 slot = ht->find_slot (expr, INSERT);
12097 if (*slot != NULL)
12098 return;
12099 *slot = expr;
12100 code = TREE_CODE (expr);
12101 if (TREE_CODE_CLASS (code) == tcc_declaration
12102 && HAS_DECL_ASSEMBLER_NAME_P (expr))
12103 {
12104 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
12105 memcpy ((char *) &buf, expr, tree_size (expr));
12106 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
12107 buf.decl_with_vis.symtab_node = NULL;
12108 expr = (tree) &buf;
12109 }
12110 else if (TREE_CODE_CLASS (code) == tcc_type
12111 && (TYPE_POINTER_TO (expr)
12112 || TYPE_REFERENCE_TO (expr)
12113 || TYPE_CACHED_VALUES_P (expr)
12114 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
12115 || TYPE_NEXT_VARIANT (expr)))
12116 {
12117 /* Allow these fields to be modified. */
12118 tree tmp;
12119 memcpy ((char *) &buf, expr, tree_size (expr));
12120 expr = tmp = (tree) &buf;
12121 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
12122 TYPE_POINTER_TO (tmp) = NULL;
12123 TYPE_REFERENCE_TO (tmp) = NULL;
12124 TYPE_NEXT_VARIANT (tmp) = NULL;
12125 if (TYPE_CACHED_VALUES_P (tmp))
12126 {
12127 TYPE_CACHED_VALUES_P (tmp) = 0;
12128 TYPE_CACHED_VALUES (tmp) = NULL;
12129 }
12130 }
12131 md5_process_bytes (expr, tree_size (expr), ctx);
12132 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
12133 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
12134 if (TREE_CODE_CLASS (code) != tcc_type
12135 && TREE_CODE_CLASS (code) != tcc_declaration
12136 && code != TREE_LIST
12137 && code != SSA_NAME
12138 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
12139 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
12140 switch (TREE_CODE_CLASS (code))
12141 {
12142 case tcc_constant:
12143 switch (code)
12144 {
12145 case STRING_CST:
12146 md5_process_bytes (TREE_STRING_POINTER (expr),
12147 TREE_STRING_LENGTH (expr), ctx);
12148 break;
12149 case COMPLEX_CST:
12150 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
12151 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
12152 break;
12153 case VECTOR_CST:
12154 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
12155 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
12156 break;
12157 default:
12158 break;
12159 }
12160 break;
12161 case tcc_exceptional:
12162 switch (code)
12163 {
12164 case TREE_LIST:
12165 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
12166 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
12167 expr = TREE_CHAIN (expr);
12168 goto recursive_label;
12169 break;
12170 case TREE_VEC:
12171 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
12172 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
12173 break;
12174 default:
12175 break;
12176 }
12177 break;
12178 case tcc_expression:
12179 case tcc_reference:
12180 case tcc_comparison:
12181 case tcc_unary:
12182 case tcc_binary:
12183 case tcc_statement:
12184 case tcc_vl_exp:
12185 len = TREE_OPERAND_LENGTH (expr);
12186 for (i = 0; i < len; ++i)
12187 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
12188 break;
12189 case tcc_declaration:
12190 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
12191 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
12192 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
12193 {
12194 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
12195 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
12196 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
12197 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
12198 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
12199 }
12200
12201 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
12202 {
12203 if (TREE_CODE (expr) == FUNCTION_DECL)
12204 {
12205 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
12206 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
12207 }
12208 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
12209 }
12210 break;
12211 case tcc_type:
12212 if (TREE_CODE (expr) == ENUMERAL_TYPE)
12213 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
12214 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
12215 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
12216 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
12217 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
12218 if (INTEGRAL_TYPE_P (expr)
12219 || SCALAR_FLOAT_TYPE_P (expr))
12220 {
12221 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12222 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
12223 }
12224 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
12225 if (TREE_CODE (expr) == RECORD_TYPE
12226 || TREE_CODE (expr) == UNION_TYPE
12227 || TREE_CODE (expr) == QUAL_UNION_TYPE)
12228 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
12229 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12230 break;
12231 default:
12232 break;
12233 }
12234 }
12235
12236 /* Helper function for outputting the checksum of a tree T. When
12237 debugging with gdb, you can "define mynext" to be "next" followed
12238 by "call debug_fold_checksum (op0)", then just trace down till the
12239 outputs differ. */
12240
12241 DEBUG_FUNCTION void
12242 debug_fold_checksum (const_tree t)
12243 {
12244 int i;
12245 unsigned char checksum[16];
12246 struct md5_ctx ctx;
12247 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12248
12249 md5_init_ctx (&ctx);
12250 fold_checksum_tree (t, &ctx, &ht);
12251 md5_finish_ctx (&ctx, checksum);
12252 ht.empty ();
12253
12254 for (i = 0; i < 16; i++)
12255 fprintf (stderr, "%d ", checksum[i]);
12256
12257 fprintf (stderr, "\n");
12258 }
12259
12260 #endif
12261
12262 /* Fold a unary tree expression with code CODE of type TYPE with an
12263 operand OP0. LOC is the location of the resulting expression.
12264 Return a folded expression if successful. Otherwise, return a tree
12265 expression with code CODE of type TYPE with an operand OP0. */
12266
12267 tree
12268 fold_build1_stat_loc (location_t loc,
12269 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
12270 {
12271 tree tem;
12272 #ifdef ENABLE_FOLD_CHECKING
12273 unsigned char checksum_before[16], checksum_after[16];
12274 struct md5_ctx ctx;
12275 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12276
12277 md5_init_ctx (&ctx);
12278 fold_checksum_tree (op0, &ctx, &ht);
12279 md5_finish_ctx (&ctx, checksum_before);
12280 ht.empty ();
12281 #endif
12282
12283 tem = fold_unary_loc (loc, code, type, op0);
12284 if (!tem)
12285 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
12286
12287 #ifdef ENABLE_FOLD_CHECKING
12288 md5_init_ctx (&ctx);
12289 fold_checksum_tree (op0, &ctx, &ht);
12290 md5_finish_ctx (&ctx, checksum_after);
12291
12292 if (memcmp (checksum_before, checksum_after, 16))
12293 fold_check_failed (op0, tem);
12294 #endif
12295 return tem;
12296 }
12297
12298 /* Fold a binary tree expression with code CODE of type TYPE with
12299 operands OP0 and OP1. LOC is the location of the resulting
12300 expression. Return a folded expression if successful. Otherwise,
12301 return a tree expression with code CODE of type TYPE with operands
12302 OP0 and OP1. */
12303
12304 tree
12305 fold_build2_stat_loc (location_t loc,
12306 enum tree_code code, tree type, tree op0, tree op1
12307 MEM_STAT_DECL)
12308 {
12309 tree tem;
12310 #ifdef ENABLE_FOLD_CHECKING
12311 unsigned char checksum_before_op0[16],
12312 checksum_before_op1[16],
12313 checksum_after_op0[16],
12314 checksum_after_op1[16];
12315 struct md5_ctx ctx;
12316 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12317
12318 md5_init_ctx (&ctx);
12319 fold_checksum_tree (op0, &ctx, &ht);
12320 md5_finish_ctx (&ctx, checksum_before_op0);
12321 ht.empty ();
12322
12323 md5_init_ctx (&ctx);
12324 fold_checksum_tree (op1, &ctx, &ht);
12325 md5_finish_ctx (&ctx, checksum_before_op1);
12326 ht.empty ();
12327 #endif
12328
12329 tem = fold_binary_loc (loc, code, type, op0, op1);
12330 if (!tem)
12331 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
12332
12333 #ifdef ENABLE_FOLD_CHECKING
12334 md5_init_ctx (&ctx);
12335 fold_checksum_tree (op0, &ctx, &ht);
12336 md5_finish_ctx (&ctx, checksum_after_op0);
12337 ht.empty ();
12338
12339 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12340 fold_check_failed (op0, tem);
12341
12342 md5_init_ctx (&ctx);
12343 fold_checksum_tree (op1, &ctx, &ht);
12344 md5_finish_ctx (&ctx, checksum_after_op1);
12345
12346 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12347 fold_check_failed (op1, tem);
12348 #endif
12349 return tem;
12350 }
12351
12352 /* Fold a ternary tree expression with code CODE of type TYPE with
12353 operands OP0, OP1, and OP2. Return a folded expression if
12354 successful. Otherwise, return a tree expression with code CODE of
12355 type TYPE with operands OP0, OP1, and OP2. */
12356
12357 tree
12358 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
12359 tree op0, tree op1, tree op2 MEM_STAT_DECL)
12360 {
12361 tree tem;
12362 #ifdef ENABLE_FOLD_CHECKING
12363 unsigned char checksum_before_op0[16],
12364 checksum_before_op1[16],
12365 checksum_before_op2[16],
12366 checksum_after_op0[16],
12367 checksum_after_op1[16],
12368 checksum_after_op2[16];
12369 struct md5_ctx ctx;
12370 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12371
12372 md5_init_ctx (&ctx);
12373 fold_checksum_tree (op0, &ctx, &ht);
12374 md5_finish_ctx (&ctx, checksum_before_op0);
12375 ht.empty ();
12376
12377 md5_init_ctx (&ctx);
12378 fold_checksum_tree (op1, &ctx, &ht);
12379 md5_finish_ctx (&ctx, checksum_before_op1);
12380 ht.empty ();
12381
12382 md5_init_ctx (&ctx);
12383 fold_checksum_tree (op2, &ctx, &ht);
12384 md5_finish_ctx (&ctx, checksum_before_op2);
12385 ht.empty ();
12386 #endif
12387
12388 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
12389 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12390 if (!tem)
12391 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
12392
12393 #ifdef ENABLE_FOLD_CHECKING
12394 md5_init_ctx (&ctx);
12395 fold_checksum_tree (op0, &ctx, &ht);
12396 md5_finish_ctx (&ctx, checksum_after_op0);
12397 ht.empty ();
12398
12399 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12400 fold_check_failed (op0, tem);
12401
12402 md5_init_ctx (&ctx);
12403 fold_checksum_tree (op1, &ctx, &ht);
12404 md5_finish_ctx (&ctx, checksum_after_op1);
12405 ht.empty ();
12406
12407 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12408 fold_check_failed (op1, tem);
12409
12410 md5_init_ctx (&ctx);
12411 fold_checksum_tree (op2, &ctx, &ht);
12412 md5_finish_ctx (&ctx, checksum_after_op2);
12413
12414 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12415 fold_check_failed (op2, tem);
12416 #endif
12417 return tem;
12418 }
12419
12420 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
12421 arguments in ARGARRAY, and a null static chain.
12422 Return a folded expression if successful. Otherwise, return a CALL_EXPR
12423 of type TYPE from the given operands as constructed by build_call_array. */
12424
12425 tree
12426 fold_build_call_array_loc (location_t loc, tree type, tree fn,
12427 int nargs, tree *argarray)
12428 {
12429 tree tem;
12430 #ifdef ENABLE_FOLD_CHECKING
12431 unsigned char checksum_before_fn[16],
12432 checksum_before_arglist[16],
12433 checksum_after_fn[16],
12434 checksum_after_arglist[16];
12435 struct md5_ctx ctx;
12436 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12437 int i;
12438
12439 md5_init_ctx (&ctx);
12440 fold_checksum_tree (fn, &ctx, &ht);
12441 md5_finish_ctx (&ctx, checksum_before_fn);
12442 ht.empty ();
12443
12444 md5_init_ctx (&ctx);
12445 for (i = 0; i < nargs; i++)
12446 fold_checksum_tree (argarray[i], &ctx, &ht);
12447 md5_finish_ctx (&ctx, checksum_before_arglist);
12448 ht.empty ();
12449 #endif
12450
12451 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
12452 if (!tem)
12453 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
12454
12455 #ifdef ENABLE_FOLD_CHECKING
12456 md5_init_ctx (&ctx);
12457 fold_checksum_tree (fn, &ctx, &ht);
12458 md5_finish_ctx (&ctx, checksum_after_fn);
12459 ht.empty ();
12460
12461 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
12462 fold_check_failed (fn, tem);
12463
12464 md5_init_ctx (&ctx);
12465 for (i = 0; i < nargs; i++)
12466 fold_checksum_tree (argarray[i], &ctx, &ht);
12467 md5_finish_ctx (&ctx, checksum_after_arglist);
12468
12469 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
12470 fold_check_failed (NULL_TREE, tem);
12471 #endif
12472 return tem;
12473 }
12474
12475 /* Perform constant folding and related simplification of initializer
12476 expression EXPR. These behave identically to "fold_buildN" but ignore
12477 potential run-time traps and exceptions that fold must preserve. */
12478
12479 #define START_FOLD_INIT \
12480 int saved_signaling_nans = flag_signaling_nans;\
12481 int saved_trapping_math = flag_trapping_math;\
12482 int saved_rounding_math = flag_rounding_math;\
12483 int saved_trapv = flag_trapv;\
12484 int saved_folding_initializer = folding_initializer;\
12485 flag_signaling_nans = 0;\
12486 flag_trapping_math = 0;\
12487 flag_rounding_math = 0;\
12488 flag_trapv = 0;\
12489 folding_initializer = 1;
12490
12491 #define END_FOLD_INIT \
12492 flag_signaling_nans = saved_signaling_nans;\
12493 flag_trapping_math = saved_trapping_math;\
12494 flag_rounding_math = saved_rounding_math;\
12495 flag_trapv = saved_trapv;\
12496 folding_initializer = saved_folding_initializer;
12497
12498 tree
12499 fold_build1_initializer_loc (location_t loc, enum tree_code code,
12500 tree type, tree op)
12501 {
12502 tree result;
12503 START_FOLD_INIT;
12504
12505 result = fold_build1_loc (loc, code, type, op);
12506
12507 END_FOLD_INIT;
12508 return result;
12509 }
12510
12511 tree
12512 fold_build2_initializer_loc (location_t loc, enum tree_code code,
12513 tree type, tree op0, tree op1)
12514 {
12515 tree result;
12516 START_FOLD_INIT;
12517
12518 result = fold_build2_loc (loc, code, type, op0, op1);
12519
12520 END_FOLD_INIT;
12521 return result;
12522 }
12523
12524 tree
12525 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
12526 int nargs, tree *argarray)
12527 {
12528 tree result;
12529 START_FOLD_INIT;
12530
12531 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
12532
12533 END_FOLD_INIT;
12534 return result;
12535 }
12536
12537 #undef START_FOLD_INIT
12538 #undef END_FOLD_INIT
12539
12540 /* Determine if first argument is a multiple of second argument. Return 0 if
12541 it is not, or we cannot easily determined it to be.
12542
12543 An example of the sort of thing we care about (at this point; this routine
12544 could surely be made more general, and expanded to do what the *_DIV_EXPR's
12545 fold cases do now) is discovering that
12546
12547 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12548
12549 is a multiple of
12550
12551 SAVE_EXPR (J * 8)
12552
12553 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12554
12555 This code also handles discovering that
12556
12557 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12558
12559 is a multiple of 8 so we don't have to worry about dealing with a
12560 possible remainder.
12561
12562 Note that we *look* inside a SAVE_EXPR only to determine how it was
12563 calculated; it is not safe for fold to do much of anything else with the
12564 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12565 at run time. For example, the latter example above *cannot* be implemented
12566 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12567 evaluation time of the original SAVE_EXPR is not necessarily the same at
12568 the time the new expression is evaluated. The only optimization of this
12569 sort that would be valid is changing
12570
12571 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12572
12573 divided by 8 to
12574
12575 SAVE_EXPR (I) * SAVE_EXPR (J)
12576
12577 (where the same SAVE_EXPR (J) is used in the original and the
12578 transformed version). */
12579
12580 int
12581 multiple_of_p (tree type, const_tree top, const_tree bottom)
12582 {
12583 if (operand_equal_p (top, bottom, 0))
12584 return 1;
12585
12586 if (TREE_CODE (type) != INTEGER_TYPE)
12587 return 0;
12588
12589 switch (TREE_CODE (top))
12590 {
12591 case BIT_AND_EXPR:
12592 /* Bitwise and provides a power of two multiple. If the mask is
12593 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
12594 if (!integer_pow2p (bottom))
12595 return 0;
12596 /* FALLTHRU */
12597
12598 case MULT_EXPR:
12599 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12600 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12601
12602 case PLUS_EXPR:
12603 case MINUS_EXPR:
12604 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12605 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12606
12607 case LSHIFT_EXPR:
12608 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
12609 {
12610 tree op1, t1;
12611
12612 op1 = TREE_OPERAND (top, 1);
12613 /* const_binop may not detect overflow correctly,
12614 so check for it explicitly here. */
12615 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
12616 && 0 != (t1 = fold_convert (type,
12617 const_binop (LSHIFT_EXPR,
12618 size_one_node,
12619 op1)))
12620 && !TREE_OVERFLOW (t1))
12621 return multiple_of_p (type, t1, bottom);
12622 }
12623 return 0;
12624
12625 case NOP_EXPR:
12626 /* Can't handle conversions from non-integral or wider integral type. */
12627 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
12628 || (TYPE_PRECISION (type)
12629 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
12630 return 0;
12631
12632 /* .. fall through ... */
12633
12634 case SAVE_EXPR:
12635 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
12636
12637 case COND_EXPR:
12638 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12639 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
12640
12641 case INTEGER_CST:
12642 if (TREE_CODE (bottom) != INTEGER_CST
12643 || integer_zerop (bottom)
12644 || (TYPE_UNSIGNED (type)
12645 && (tree_int_cst_sgn (top) < 0
12646 || tree_int_cst_sgn (bottom) < 0)))
12647 return 0;
12648 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
12649 SIGNED);
12650
12651 default:
12652 return 0;
12653 }
12654 }
12655
12656 #define tree_expr_nonnegative_warnv_p(X, Y) \
12657 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
12658
12659 #define RECURSE(X) \
12660 ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
12661
12662 /* Return true if CODE or TYPE is known to be non-negative. */
12663
12664 static bool
12665 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
12666 {
12667 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
12668 && truth_value_p (code))
12669 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
12670 have a signed:1 type (where the value is -1 and 0). */
12671 return true;
12672 return false;
12673 }
12674
12675 /* Return true if (CODE OP0) is known to be non-negative. If the return
12676 value is based on the assumption that signed overflow is undefined,
12677 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12678 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12679
12680 bool
12681 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12682 bool *strict_overflow_p, int depth)
12683 {
12684 if (TYPE_UNSIGNED (type))
12685 return true;
12686
12687 switch (code)
12688 {
12689 case ABS_EXPR:
12690 /* We can't return 1 if flag_wrapv is set because
12691 ABS_EXPR<INT_MIN> = INT_MIN. */
12692 if (!ANY_INTEGRAL_TYPE_P (type))
12693 return true;
12694 if (TYPE_OVERFLOW_UNDEFINED (type))
12695 {
12696 *strict_overflow_p = true;
12697 return true;
12698 }
12699 break;
12700
12701 case NON_LVALUE_EXPR:
12702 case FLOAT_EXPR:
12703 case FIX_TRUNC_EXPR:
12704 return RECURSE (op0);
12705
12706 CASE_CONVERT:
12707 {
12708 tree inner_type = TREE_TYPE (op0);
12709 tree outer_type = type;
12710
12711 if (TREE_CODE (outer_type) == REAL_TYPE)
12712 {
12713 if (TREE_CODE (inner_type) == REAL_TYPE)
12714 return RECURSE (op0);
12715 if (INTEGRAL_TYPE_P (inner_type))
12716 {
12717 if (TYPE_UNSIGNED (inner_type))
12718 return true;
12719 return RECURSE (op0);
12720 }
12721 }
12722 else if (INTEGRAL_TYPE_P (outer_type))
12723 {
12724 if (TREE_CODE (inner_type) == REAL_TYPE)
12725 return RECURSE (op0);
12726 if (INTEGRAL_TYPE_P (inner_type))
12727 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
12728 && TYPE_UNSIGNED (inner_type);
12729 }
12730 }
12731 break;
12732
12733 default:
12734 return tree_simple_nonnegative_warnv_p (code, type);
12735 }
12736
12737 /* We don't know sign of `t', so be conservative and return false. */
12738 return false;
12739 }
12740
12741 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
12742 value is based on the assumption that signed overflow is undefined,
12743 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12744 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12745
12746 bool
12747 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12748 tree op1, bool *strict_overflow_p,
12749 int depth)
12750 {
12751 if (TYPE_UNSIGNED (type))
12752 return true;
12753
12754 switch (code)
12755 {
12756 case POINTER_PLUS_EXPR:
12757 case PLUS_EXPR:
12758 if (FLOAT_TYPE_P (type))
12759 return RECURSE (op0) && RECURSE (op1);
12760
12761 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12762 both unsigned and at least 2 bits shorter than the result. */
12763 if (TREE_CODE (type) == INTEGER_TYPE
12764 && TREE_CODE (op0) == NOP_EXPR
12765 && TREE_CODE (op1) == NOP_EXPR)
12766 {
12767 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
12768 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
12769 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12770 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12771 {
12772 unsigned int prec = MAX (TYPE_PRECISION (inner1),
12773 TYPE_PRECISION (inner2)) + 1;
12774 return prec < TYPE_PRECISION (type);
12775 }
12776 }
12777 break;
12778
12779 case MULT_EXPR:
12780 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12781 {
12782 /* x * x is always non-negative for floating point x
12783 or without overflow. */
12784 if (operand_equal_p (op0, op1, 0)
12785 || (RECURSE (op0) && RECURSE (op1)))
12786 {
12787 if (ANY_INTEGRAL_TYPE_P (type)
12788 && TYPE_OVERFLOW_UNDEFINED (type))
12789 *strict_overflow_p = true;
12790 return true;
12791 }
12792 }
12793
12794 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12795 both unsigned and their total bits is shorter than the result. */
12796 if (TREE_CODE (type) == INTEGER_TYPE
12797 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
12798 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
12799 {
12800 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
12801 ? TREE_TYPE (TREE_OPERAND (op0, 0))
12802 : TREE_TYPE (op0);
12803 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
12804 ? TREE_TYPE (TREE_OPERAND (op1, 0))
12805 : TREE_TYPE (op1);
12806
12807 bool unsigned0 = TYPE_UNSIGNED (inner0);
12808 bool unsigned1 = TYPE_UNSIGNED (inner1);
12809
12810 if (TREE_CODE (op0) == INTEGER_CST)
12811 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
12812
12813 if (TREE_CODE (op1) == INTEGER_CST)
12814 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
12815
12816 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
12817 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
12818 {
12819 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
12820 ? tree_int_cst_min_precision (op0, UNSIGNED)
12821 : TYPE_PRECISION (inner0);
12822
12823 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
12824 ? tree_int_cst_min_precision (op1, UNSIGNED)
12825 : TYPE_PRECISION (inner1);
12826
12827 return precision0 + precision1 < TYPE_PRECISION (type);
12828 }
12829 }
12830 return false;
12831
12832 case BIT_AND_EXPR:
12833 case MAX_EXPR:
12834 return RECURSE (op0) || RECURSE (op1);
12835
12836 case BIT_IOR_EXPR:
12837 case BIT_XOR_EXPR:
12838 case MIN_EXPR:
12839 case RDIV_EXPR:
12840 case TRUNC_DIV_EXPR:
12841 case CEIL_DIV_EXPR:
12842 case FLOOR_DIV_EXPR:
12843 case ROUND_DIV_EXPR:
12844 return RECURSE (op0) && RECURSE (op1);
12845
12846 case TRUNC_MOD_EXPR:
12847 return RECURSE (op0);
12848
12849 case FLOOR_MOD_EXPR:
12850 return RECURSE (op1);
12851
12852 case CEIL_MOD_EXPR:
12853 case ROUND_MOD_EXPR:
12854 default:
12855 return tree_simple_nonnegative_warnv_p (code, type);
12856 }
12857
12858 /* We don't know sign of `t', so be conservative and return false. */
12859 return false;
12860 }
12861
12862 /* Return true if T is known to be non-negative. If the return
12863 value is based on the assumption that signed overflow is undefined,
12864 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12865 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12866
12867 bool
12868 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
12869 {
12870 if (TYPE_UNSIGNED (TREE_TYPE (t)))
12871 return true;
12872
12873 switch (TREE_CODE (t))
12874 {
12875 case INTEGER_CST:
12876 return tree_int_cst_sgn (t) >= 0;
12877
12878 case REAL_CST:
12879 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
12880
12881 case FIXED_CST:
12882 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
12883
12884 case COND_EXPR:
12885 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
12886
12887 case SSA_NAME:
12888 /* Limit the depth of recursion to avoid quadratic behavior.
12889 This is expected to catch almost all occurrences in practice.
12890 If this code misses important cases that unbounded recursion
12891 would not, passes that need this information could be revised
12892 to provide it through dataflow propagation. */
12893 return (!name_registered_for_update_p (t)
12894 && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
12895 && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
12896 strict_overflow_p, depth));
12897
12898 default:
12899 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
12900 }
12901 }
12902
12903 /* Return true if T is known to be non-negative. If the return
12904 value is based on the assumption that signed overflow is undefined,
12905 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12906 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12907
12908 bool
12909 tree_call_nonnegative_warnv_p (tree type, combined_fn fn, tree arg0, tree arg1,
12910 bool *strict_overflow_p, int depth)
12911 {
12912 switch (fn)
12913 {
12914 CASE_CFN_ACOS:
12915 CASE_CFN_ACOSH:
12916 CASE_CFN_CABS:
12917 CASE_CFN_COSH:
12918 CASE_CFN_ERFC:
12919 CASE_CFN_EXP:
12920 CASE_CFN_EXP10:
12921 CASE_CFN_EXP2:
12922 CASE_CFN_FABS:
12923 CASE_CFN_FDIM:
12924 CASE_CFN_HYPOT:
12925 CASE_CFN_POW10:
12926 CASE_CFN_FFS:
12927 CASE_CFN_PARITY:
12928 CASE_CFN_POPCOUNT:
12929 CASE_CFN_CLZ:
12930 CASE_CFN_CLRSB:
12931 case CFN_BUILT_IN_BSWAP32:
12932 case CFN_BUILT_IN_BSWAP64:
12933 /* Always true. */
12934 return true;
12935
12936 CASE_CFN_SQRT:
12937 /* sqrt(-0.0) is -0.0. */
12938 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
12939 return true;
12940 return RECURSE (arg0);
12941
12942 CASE_CFN_ASINH:
12943 CASE_CFN_ATAN:
12944 CASE_CFN_ATANH:
12945 CASE_CFN_CBRT:
12946 CASE_CFN_CEIL:
12947 CASE_CFN_ERF:
12948 CASE_CFN_EXPM1:
12949 CASE_CFN_FLOOR:
12950 CASE_CFN_FMOD:
12951 CASE_CFN_FREXP:
12952 CASE_CFN_ICEIL:
12953 CASE_CFN_IFLOOR:
12954 CASE_CFN_IRINT:
12955 CASE_CFN_IROUND:
12956 CASE_CFN_LCEIL:
12957 CASE_CFN_LDEXP:
12958 CASE_CFN_LFLOOR:
12959 CASE_CFN_LLCEIL:
12960 CASE_CFN_LLFLOOR:
12961 CASE_CFN_LLRINT:
12962 CASE_CFN_LLROUND:
12963 CASE_CFN_LRINT:
12964 CASE_CFN_LROUND:
12965 CASE_CFN_MODF:
12966 CASE_CFN_NEARBYINT:
12967 CASE_CFN_RINT:
12968 CASE_CFN_ROUND:
12969 CASE_CFN_SCALB:
12970 CASE_CFN_SCALBLN:
12971 CASE_CFN_SCALBN:
12972 CASE_CFN_SIGNBIT:
12973 CASE_CFN_SIGNIFICAND:
12974 CASE_CFN_SINH:
12975 CASE_CFN_TANH:
12976 CASE_CFN_TRUNC:
12977 /* True if the 1st argument is nonnegative. */
12978 return RECURSE (arg0);
12979
12980 CASE_CFN_FMAX:
12981 /* True if the 1st OR 2nd arguments are nonnegative. */
12982 return RECURSE (arg0) || RECURSE (arg1);
12983
12984 CASE_CFN_FMIN:
12985 /* True if the 1st AND 2nd arguments are nonnegative. */
12986 return RECURSE (arg0) && RECURSE (arg1);
12987
12988 CASE_CFN_COPYSIGN:
12989 /* True if the 2nd argument is nonnegative. */
12990 return RECURSE (arg1);
12991
12992 CASE_CFN_POWI:
12993 /* True if the 1st argument is nonnegative or the second
12994 argument is an even integer. */
12995 if (TREE_CODE (arg1) == INTEGER_CST
12996 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
12997 return true;
12998 return RECURSE (arg0);
12999
13000 CASE_CFN_POW:
13001 /* True if the 1st argument is nonnegative or the second
13002 argument is an even integer valued real. */
13003 if (TREE_CODE (arg1) == REAL_CST)
13004 {
13005 REAL_VALUE_TYPE c;
13006 HOST_WIDE_INT n;
13007
13008 c = TREE_REAL_CST (arg1);
13009 n = real_to_integer (&c);
13010 if ((n & 1) == 0)
13011 {
13012 REAL_VALUE_TYPE cint;
13013 real_from_integer (&cint, VOIDmode, n, SIGNED);
13014 if (real_identical (&c, &cint))
13015 return true;
13016 }
13017 }
13018 return RECURSE (arg0);
13019
13020 default:
13021 break;
13022 }
13023 return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
13024 }
13025
13026 /* Return true if T is known to be non-negative. If the return
13027 value is based on the assumption that signed overflow is undefined,
13028 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13029 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13030
13031 static bool
13032 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13033 {
13034 enum tree_code code = TREE_CODE (t);
13035 if (TYPE_UNSIGNED (TREE_TYPE (t)))
13036 return true;
13037
13038 switch (code)
13039 {
13040 case TARGET_EXPR:
13041 {
13042 tree temp = TARGET_EXPR_SLOT (t);
13043 t = TARGET_EXPR_INITIAL (t);
13044
13045 /* If the initializer is non-void, then it's a normal expression
13046 that will be assigned to the slot. */
13047 if (!VOID_TYPE_P (t))
13048 return RECURSE (t);
13049
13050 /* Otherwise, the initializer sets the slot in some way. One common
13051 way is an assignment statement at the end of the initializer. */
13052 while (1)
13053 {
13054 if (TREE_CODE (t) == BIND_EXPR)
13055 t = expr_last (BIND_EXPR_BODY (t));
13056 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
13057 || TREE_CODE (t) == TRY_CATCH_EXPR)
13058 t = expr_last (TREE_OPERAND (t, 0));
13059 else if (TREE_CODE (t) == STATEMENT_LIST)
13060 t = expr_last (t);
13061 else
13062 break;
13063 }
13064 if (TREE_CODE (t) == MODIFY_EXPR
13065 && TREE_OPERAND (t, 0) == temp)
13066 return RECURSE (TREE_OPERAND (t, 1));
13067
13068 return false;
13069 }
13070
13071 case CALL_EXPR:
13072 {
13073 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
13074 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
13075
13076 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
13077 get_call_combined_fn (t),
13078 arg0,
13079 arg1,
13080 strict_overflow_p, depth);
13081 }
13082 case COMPOUND_EXPR:
13083 case MODIFY_EXPR:
13084 return RECURSE (TREE_OPERAND (t, 1));
13085
13086 case BIND_EXPR:
13087 return RECURSE (expr_last (TREE_OPERAND (t, 1)));
13088
13089 case SAVE_EXPR:
13090 return RECURSE (TREE_OPERAND (t, 0));
13091
13092 default:
13093 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
13094 }
13095 }
13096
13097 #undef RECURSE
13098 #undef tree_expr_nonnegative_warnv_p
13099
13100 /* Return true if T is known to be non-negative. If the return
13101 value is based on the assumption that signed overflow is undefined,
13102 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13103 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13104
13105 bool
13106 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13107 {
13108 enum tree_code code;
13109 if (t == error_mark_node)
13110 return false;
13111
13112 code = TREE_CODE (t);
13113 switch (TREE_CODE_CLASS (code))
13114 {
13115 case tcc_binary:
13116 case tcc_comparison:
13117 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13118 TREE_TYPE (t),
13119 TREE_OPERAND (t, 0),
13120 TREE_OPERAND (t, 1),
13121 strict_overflow_p, depth);
13122
13123 case tcc_unary:
13124 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13125 TREE_TYPE (t),
13126 TREE_OPERAND (t, 0),
13127 strict_overflow_p, depth);
13128
13129 case tcc_constant:
13130 case tcc_declaration:
13131 case tcc_reference:
13132 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13133
13134 default:
13135 break;
13136 }
13137
13138 switch (code)
13139 {
13140 case TRUTH_AND_EXPR:
13141 case TRUTH_OR_EXPR:
13142 case TRUTH_XOR_EXPR:
13143 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13144 TREE_TYPE (t),
13145 TREE_OPERAND (t, 0),
13146 TREE_OPERAND (t, 1),
13147 strict_overflow_p, depth);
13148 case TRUTH_NOT_EXPR:
13149 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13150 TREE_TYPE (t),
13151 TREE_OPERAND (t, 0),
13152 strict_overflow_p, depth);
13153
13154 case COND_EXPR:
13155 case CONSTRUCTOR:
13156 case OBJ_TYPE_REF:
13157 case ASSERT_EXPR:
13158 case ADDR_EXPR:
13159 case WITH_SIZE_EXPR:
13160 case SSA_NAME:
13161 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13162
13163 default:
13164 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
13165 }
13166 }
13167
13168 /* Return true if `t' is known to be non-negative. Handle warnings
13169 about undefined signed overflow. */
13170
13171 bool
13172 tree_expr_nonnegative_p (tree t)
13173 {
13174 bool ret, strict_overflow_p;
13175
13176 strict_overflow_p = false;
13177 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
13178 if (strict_overflow_p)
13179 fold_overflow_warning (("assuming signed overflow does not occur when "
13180 "determining that expression is always "
13181 "non-negative"),
13182 WARN_STRICT_OVERFLOW_MISC);
13183 return ret;
13184 }
13185
13186
13187 /* Return true when (CODE OP0) is an address and is known to be nonzero.
13188 For floating point we further ensure that T is not denormal.
13189 Similar logic is present in nonzero_address in rtlanal.h.
13190
13191 If the return value is based on the assumption that signed overflow
13192 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13193 change *STRICT_OVERFLOW_P. */
13194
13195 bool
13196 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
13197 bool *strict_overflow_p)
13198 {
13199 switch (code)
13200 {
13201 case ABS_EXPR:
13202 return tree_expr_nonzero_warnv_p (op0,
13203 strict_overflow_p);
13204
13205 case NOP_EXPR:
13206 {
13207 tree inner_type = TREE_TYPE (op0);
13208 tree outer_type = type;
13209
13210 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
13211 && tree_expr_nonzero_warnv_p (op0,
13212 strict_overflow_p));
13213 }
13214 break;
13215
13216 case NON_LVALUE_EXPR:
13217 return tree_expr_nonzero_warnv_p (op0,
13218 strict_overflow_p);
13219
13220 default:
13221 break;
13222 }
13223
13224 return false;
13225 }
13226
13227 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
13228 For floating point we further ensure that T is not denormal.
13229 Similar logic is present in nonzero_address in rtlanal.h.
13230
13231 If the return value is based on the assumption that signed overflow
13232 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13233 change *STRICT_OVERFLOW_P. */
13234
13235 bool
13236 tree_binary_nonzero_warnv_p (enum tree_code code,
13237 tree type,
13238 tree op0,
13239 tree op1, bool *strict_overflow_p)
13240 {
13241 bool sub_strict_overflow_p;
13242 switch (code)
13243 {
13244 case POINTER_PLUS_EXPR:
13245 case PLUS_EXPR:
13246 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
13247 {
13248 /* With the presence of negative values it is hard
13249 to say something. */
13250 sub_strict_overflow_p = false;
13251 if (!tree_expr_nonnegative_warnv_p (op0,
13252 &sub_strict_overflow_p)
13253 || !tree_expr_nonnegative_warnv_p (op1,
13254 &sub_strict_overflow_p))
13255 return false;
13256 /* One of operands must be positive and the other non-negative. */
13257 /* We don't set *STRICT_OVERFLOW_P here: even if this value
13258 overflows, on a twos-complement machine the sum of two
13259 nonnegative numbers can never be zero. */
13260 return (tree_expr_nonzero_warnv_p (op0,
13261 strict_overflow_p)
13262 || tree_expr_nonzero_warnv_p (op1,
13263 strict_overflow_p));
13264 }
13265 break;
13266
13267 case MULT_EXPR:
13268 if (TYPE_OVERFLOW_UNDEFINED (type))
13269 {
13270 if (tree_expr_nonzero_warnv_p (op0,
13271 strict_overflow_p)
13272 && tree_expr_nonzero_warnv_p (op1,
13273 strict_overflow_p))
13274 {
13275 *strict_overflow_p = true;
13276 return true;
13277 }
13278 }
13279 break;
13280
13281 case MIN_EXPR:
13282 sub_strict_overflow_p = false;
13283 if (tree_expr_nonzero_warnv_p (op0,
13284 &sub_strict_overflow_p)
13285 && tree_expr_nonzero_warnv_p (op1,
13286 &sub_strict_overflow_p))
13287 {
13288 if (sub_strict_overflow_p)
13289 *strict_overflow_p = true;
13290 }
13291 break;
13292
13293 case MAX_EXPR:
13294 sub_strict_overflow_p = false;
13295 if (tree_expr_nonzero_warnv_p (op0,
13296 &sub_strict_overflow_p))
13297 {
13298 if (sub_strict_overflow_p)
13299 *strict_overflow_p = true;
13300
13301 /* When both operands are nonzero, then MAX must be too. */
13302 if (tree_expr_nonzero_warnv_p (op1,
13303 strict_overflow_p))
13304 return true;
13305
13306 /* MAX where operand 0 is positive is positive. */
13307 return tree_expr_nonnegative_warnv_p (op0,
13308 strict_overflow_p);
13309 }
13310 /* MAX where operand 1 is positive is positive. */
13311 else if (tree_expr_nonzero_warnv_p (op1,
13312 &sub_strict_overflow_p)
13313 && tree_expr_nonnegative_warnv_p (op1,
13314 &sub_strict_overflow_p))
13315 {
13316 if (sub_strict_overflow_p)
13317 *strict_overflow_p = true;
13318 return true;
13319 }
13320 break;
13321
13322 case BIT_IOR_EXPR:
13323 return (tree_expr_nonzero_warnv_p (op1,
13324 strict_overflow_p)
13325 || tree_expr_nonzero_warnv_p (op0,
13326 strict_overflow_p));
13327
13328 default:
13329 break;
13330 }
13331
13332 return false;
13333 }
13334
13335 /* Return true when T is an address and is known to be nonzero.
13336 For floating point we further ensure that T is not denormal.
13337 Similar logic is present in nonzero_address in rtlanal.h.
13338
13339 If the return value is based on the assumption that signed overflow
13340 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13341 change *STRICT_OVERFLOW_P. */
13342
13343 bool
13344 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
13345 {
13346 bool sub_strict_overflow_p;
13347 switch (TREE_CODE (t))
13348 {
13349 case INTEGER_CST:
13350 return !integer_zerop (t);
13351
13352 case ADDR_EXPR:
13353 {
13354 tree base = TREE_OPERAND (t, 0);
13355
13356 if (!DECL_P (base))
13357 base = get_base_address (base);
13358
13359 if (!base)
13360 return false;
13361
13362 /* For objects in symbol table check if we know they are non-zero.
13363 Don't do anything for variables and functions before symtab is built;
13364 it is quite possible that they will be declared weak later. */
13365 if (DECL_P (base) && decl_in_symtab_p (base))
13366 {
13367 struct symtab_node *symbol;
13368
13369 symbol = symtab_node::get_create (base);
13370 if (symbol)
13371 return symbol->nonzero_address ();
13372 else
13373 return false;
13374 }
13375
13376 /* Function local objects are never NULL. */
13377 if (DECL_P (base)
13378 && (DECL_CONTEXT (base)
13379 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
13380 && auto_var_in_fn_p (base, DECL_CONTEXT (base))))
13381 return true;
13382
13383 /* Constants are never weak. */
13384 if (CONSTANT_CLASS_P (base))
13385 return true;
13386
13387 return false;
13388 }
13389
13390 case COND_EXPR:
13391 sub_strict_overflow_p = false;
13392 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13393 &sub_strict_overflow_p)
13394 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
13395 &sub_strict_overflow_p))
13396 {
13397 if (sub_strict_overflow_p)
13398 *strict_overflow_p = true;
13399 return true;
13400 }
13401 break;
13402
13403 default:
13404 break;
13405 }
13406 return false;
13407 }
13408
13409 #define integer_valued_real_p(X) \
13410 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
13411
13412 #define RECURSE(X) \
13413 ((integer_valued_real_p) (X, depth + 1))
13414
13415 /* Return true if the floating point result of (CODE OP0) has an
13416 integer value. We also allow +Inf, -Inf and NaN to be considered
13417 integer values.
13418
13419 DEPTH is the current nesting depth of the query. */
13420
13421 bool
13422 integer_valued_real_unary_p (tree_code code, tree op0, int depth)
13423 {
13424 switch (code)
13425 {
13426 case FLOAT_EXPR:
13427 return true;
13428
13429 case ABS_EXPR:
13430 return RECURSE (op0);
13431
13432 CASE_CONVERT:
13433 {
13434 tree type = TREE_TYPE (op0);
13435 if (TREE_CODE (type) == INTEGER_TYPE)
13436 return true;
13437 if (TREE_CODE (type) == REAL_TYPE)
13438 return RECURSE (op0);
13439 break;
13440 }
13441
13442 default:
13443 break;
13444 }
13445 return false;
13446 }
13447
13448 /* Return true if the floating point result of (CODE OP0 OP1) has an
13449 integer value. We also allow +Inf, -Inf and NaN to be considered
13450 integer values.
13451
13452 DEPTH is the current nesting depth of the query. */
13453
13454 bool
13455 integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth)
13456 {
13457 switch (code)
13458 {
13459 case PLUS_EXPR:
13460 case MINUS_EXPR:
13461 case MULT_EXPR:
13462 case MIN_EXPR:
13463 case MAX_EXPR:
13464 return RECURSE (op0) && RECURSE (op1);
13465
13466 default:
13467 break;
13468 }
13469 return false;
13470 }
13471
13472 /* Return true if the floating point result of calling FNDECL with arguments
13473 ARG0 and ARG1 has an integer value. We also allow +Inf, -Inf and NaN to be
13474 considered integer values. If FNDECL takes fewer than 2 arguments,
13475 the remaining ARGn are null.
13476
13477 DEPTH is the current nesting depth of the query. */
13478
13479 bool
13480 integer_valued_real_call_p (combined_fn fn, tree arg0, tree arg1, int depth)
13481 {
13482 switch (fn)
13483 {
13484 CASE_CFN_CEIL:
13485 CASE_CFN_FLOOR:
13486 CASE_CFN_NEARBYINT:
13487 CASE_CFN_RINT:
13488 CASE_CFN_ROUND:
13489 CASE_CFN_TRUNC:
13490 return true;
13491
13492 CASE_CFN_FMIN:
13493 CASE_CFN_FMAX:
13494 return RECURSE (arg0) && RECURSE (arg1);
13495
13496 default:
13497 break;
13498 }
13499 return false;
13500 }
13501
13502 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
13503 has an integer value. We also allow +Inf, -Inf and NaN to be
13504 considered integer values.
13505
13506 DEPTH is the current nesting depth of the query. */
13507
13508 bool
13509 integer_valued_real_single_p (tree t, int depth)
13510 {
13511 switch (TREE_CODE (t))
13512 {
13513 case REAL_CST:
13514 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
13515
13516 case COND_EXPR:
13517 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
13518
13519 case SSA_NAME:
13520 /* Limit the depth of recursion to avoid quadratic behavior.
13521 This is expected to catch almost all occurrences in practice.
13522 If this code misses important cases that unbounded recursion
13523 would not, passes that need this information could be revised
13524 to provide it through dataflow propagation. */
13525 return (!name_registered_for_update_p (t)
13526 && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
13527 && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t),
13528 depth));
13529
13530 default:
13531 break;
13532 }
13533 return false;
13534 }
13535
13536 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
13537 has an integer value. We also allow +Inf, -Inf and NaN to be
13538 considered integer values.
13539
13540 DEPTH is the current nesting depth of the query. */
13541
13542 static bool
13543 integer_valued_real_invalid_p (tree t, int depth)
13544 {
13545 switch (TREE_CODE (t))
13546 {
13547 case COMPOUND_EXPR:
13548 case MODIFY_EXPR:
13549 case BIND_EXPR:
13550 return RECURSE (TREE_OPERAND (t, 1));
13551
13552 case SAVE_EXPR:
13553 return RECURSE (TREE_OPERAND (t, 0));
13554
13555 default:
13556 break;
13557 }
13558 return false;
13559 }
13560
13561 #undef RECURSE
13562 #undef integer_valued_real_p
13563
13564 /* Return true if the floating point expression T has an integer value.
13565 We also allow +Inf, -Inf and NaN to be considered integer values.
13566
13567 DEPTH is the current nesting depth of the query. */
13568
13569 bool
13570 integer_valued_real_p (tree t, int depth)
13571 {
13572 if (t == error_mark_node)
13573 return false;
13574
13575 tree_code code = TREE_CODE (t);
13576 switch (TREE_CODE_CLASS (code))
13577 {
13578 case tcc_binary:
13579 case tcc_comparison:
13580 return integer_valued_real_binary_p (code, TREE_OPERAND (t, 0),
13581 TREE_OPERAND (t, 1), depth);
13582
13583 case tcc_unary:
13584 return integer_valued_real_unary_p (code, TREE_OPERAND (t, 0), depth);
13585
13586 case tcc_constant:
13587 case tcc_declaration:
13588 case tcc_reference:
13589 return integer_valued_real_single_p (t, depth);
13590
13591 default:
13592 break;
13593 }
13594
13595 switch (code)
13596 {
13597 case COND_EXPR:
13598 case SSA_NAME:
13599 return integer_valued_real_single_p (t, depth);
13600
13601 case CALL_EXPR:
13602 {
13603 tree arg0 = (call_expr_nargs (t) > 0
13604 ? CALL_EXPR_ARG (t, 0)
13605 : NULL_TREE);
13606 tree arg1 = (call_expr_nargs (t) > 1
13607 ? CALL_EXPR_ARG (t, 1)
13608 : NULL_TREE);
13609 return integer_valued_real_call_p (get_call_combined_fn (t),
13610 arg0, arg1, depth);
13611 }
13612
13613 default:
13614 return integer_valued_real_invalid_p (t, depth);
13615 }
13616 }
13617
13618 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
13619 attempt to fold the expression to a constant without modifying TYPE,
13620 OP0 or OP1.
13621
13622 If the expression could be simplified to a constant, then return
13623 the constant. If the expression would not be simplified to a
13624 constant, then return NULL_TREE. */
13625
13626 tree
13627 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
13628 {
13629 tree tem = fold_binary (code, type, op0, op1);
13630 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13631 }
13632
13633 /* Given the components of a unary expression CODE, TYPE and OP0,
13634 attempt to fold the expression to a constant without modifying
13635 TYPE or OP0.
13636
13637 If the expression could be simplified to a constant, then return
13638 the constant. If the expression would not be simplified to a
13639 constant, then return NULL_TREE. */
13640
13641 tree
13642 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
13643 {
13644 tree tem = fold_unary (code, type, op0);
13645 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13646 }
13647
13648 /* If EXP represents referencing an element in a constant string
13649 (either via pointer arithmetic or array indexing), return the
13650 tree representing the value accessed, otherwise return NULL. */
13651
13652 tree
13653 fold_read_from_constant_string (tree exp)
13654 {
13655 if ((TREE_CODE (exp) == INDIRECT_REF
13656 || TREE_CODE (exp) == ARRAY_REF)
13657 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
13658 {
13659 tree exp1 = TREE_OPERAND (exp, 0);
13660 tree index;
13661 tree string;
13662 location_t loc = EXPR_LOCATION (exp);
13663
13664 if (TREE_CODE (exp) == INDIRECT_REF)
13665 string = string_constant (exp1, &index);
13666 else
13667 {
13668 tree low_bound = array_ref_low_bound (exp);
13669 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
13670
13671 /* Optimize the special-case of a zero lower bound.
13672
13673 We convert the low_bound to sizetype to avoid some problems
13674 with constant folding. (E.g. suppose the lower bound is 1,
13675 and its mode is QI. Without the conversion,l (ARRAY
13676 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
13677 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
13678 if (! integer_zerop (low_bound))
13679 index = size_diffop_loc (loc, index,
13680 fold_convert_loc (loc, sizetype, low_bound));
13681
13682 string = exp1;
13683 }
13684
13685 if (string
13686 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
13687 && TREE_CODE (string) == STRING_CST
13688 && TREE_CODE (index) == INTEGER_CST
13689 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
13690 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
13691 == MODE_INT)
13692 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
13693 return build_int_cst_type (TREE_TYPE (exp),
13694 (TREE_STRING_POINTER (string)
13695 [TREE_INT_CST_LOW (index)]));
13696 }
13697 return NULL;
13698 }
13699
13700 /* Return the tree for neg (ARG0) when ARG0 is known to be either
13701 an integer constant, real, or fixed-point constant.
13702
13703 TYPE is the type of the result. */
13704
13705 static tree
13706 fold_negate_const (tree arg0, tree type)
13707 {
13708 tree t = NULL_TREE;
13709
13710 switch (TREE_CODE (arg0))
13711 {
13712 case INTEGER_CST:
13713 {
13714 bool overflow;
13715 wide_int val = wi::neg (arg0, &overflow);
13716 t = force_fit_type (type, val, 1,
13717 (overflow | TREE_OVERFLOW (arg0))
13718 && !TYPE_UNSIGNED (type));
13719 break;
13720 }
13721
13722 case REAL_CST:
13723 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13724 break;
13725
13726 case FIXED_CST:
13727 {
13728 FIXED_VALUE_TYPE f;
13729 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
13730 &(TREE_FIXED_CST (arg0)), NULL,
13731 TYPE_SATURATING (type));
13732 t = build_fixed (type, f);
13733 /* Propagate overflow flags. */
13734 if (overflow_p | TREE_OVERFLOW (arg0))
13735 TREE_OVERFLOW (t) = 1;
13736 break;
13737 }
13738
13739 default:
13740 gcc_unreachable ();
13741 }
13742
13743 return t;
13744 }
13745
13746 /* Return the tree for abs (ARG0) when ARG0 is known to be either
13747 an integer constant or real constant.
13748
13749 TYPE is the type of the result. */
13750
13751 tree
13752 fold_abs_const (tree arg0, tree type)
13753 {
13754 tree t = NULL_TREE;
13755
13756 switch (TREE_CODE (arg0))
13757 {
13758 case INTEGER_CST:
13759 {
13760 /* If the value is unsigned or non-negative, then the absolute value
13761 is the same as the ordinary value. */
13762 if (!wi::neg_p (arg0, TYPE_SIGN (type)))
13763 t = arg0;
13764
13765 /* If the value is negative, then the absolute value is
13766 its negation. */
13767 else
13768 {
13769 bool overflow;
13770 wide_int val = wi::neg (arg0, &overflow);
13771 t = force_fit_type (type, val, -1,
13772 overflow | TREE_OVERFLOW (arg0));
13773 }
13774 }
13775 break;
13776
13777 case REAL_CST:
13778 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
13779 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13780 else
13781 t = arg0;
13782 break;
13783
13784 default:
13785 gcc_unreachable ();
13786 }
13787
13788 return t;
13789 }
13790
13791 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13792 constant. TYPE is the type of the result. */
13793
13794 static tree
13795 fold_not_const (const_tree arg0, tree type)
13796 {
13797 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
13798
13799 return force_fit_type (type, wi::bit_not (arg0), 0, TREE_OVERFLOW (arg0));
13800 }
13801
13802 /* Given CODE, a relational operator, the target type, TYPE and two
13803 constant operands OP0 and OP1, return the result of the
13804 relational operation. If the result is not a compile time
13805 constant, then return NULL_TREE. */
13806
13807 static tree
13808 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
13809 {
13810 int result, invert;
13811
13812 /* From here on, the only cases we handle are when the result is
13813 known to be a constant. */
13814
13815 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
13816 {
13817 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
13818 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
13819
13820 /* Handle the cases where either operand is a NaN. */
13821 if (real_isnan (c0) || real_isnan (c1))
13822 {
13823 switch (code)
13824 {
13825 case EQ_EXPR:
13826 case ORDERED_EXPR:
13827 result = 0;
13828 break;
13829
13830 case NE_EXPR:
13831 case UNORDERED_EXPR:
13832 case UNLT_EXPR:
13833 case UNLE_EXPR:
13834 case UNGT_EXPR:
13835 case UNGE_EXPR:
13836 case UNEQ_EXPR:
13837 result = 1;
13838 break;
13839
13840 case LT_EXPR:
13841 case LE_EXPR:
13842 case GT_EXPR:
13843 case GE_EXPR:
13844 case LTGT_EXPR:
13845 if (flag_trapping_math)
13846 return NULL_TREE;
13847 result = 0;
13848 break;
13849
13850 default:
13851 gcc_unreachable ();
13852 }
13853
13854 return constant_boolean_node (result, type);
13855 }
13856
13857 return constant_boolean_node (real_compare (code, c0, c1), type);
13858 }
13859
13860 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
13861 {
13862 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
13863 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
13864 return constant_boolean_node (fixed_compare (code, c0, c1), type);
13865 }
13866
13867 /* Handle equality/inequality of complex constants. */
13868 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
13869 {
13870 tree rcond = fold_relational_const (code, type,
13871 TREE_REALPART (op0),
13872 TREE_REALPART (op1));
13873 tree icond = fold_relational_const (code, type,
13874 TREE_IMAGPART (op0),
13875 TREE_IMAGPART (op1));
13876 if (code == EQ_EXPR)
13877 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
13878 else if (code == NE_EXPR)
13879 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
13880 else
13881 return NULL_TREE;
13882 }
13883
13884 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
13885 {
13886 unsigned count = VECTOR_CST_NELTS (op0);
13887 tree *elts = XALLOCAVEC (tree, count);
13888 gcc_assert (VECTOR_CST_NELTS (op1) == count
13889 && TYPE_VECTOR_SUBPARTS (type) == count);
13890
13891 for (unsigned i = 0; i < count; i++)
13892 {
13893 tree elem_type = TREE_TYPE (type);
13894 tree elem0 = VECTOR_CST_ELT (op0, i);
13895 tree elem1 = VECTOR_CST_ELT (op1, i);
13896
13897 tree tem = fold_relational_const (code, elem_type,
13898 elem0, elem1);
13899
13900 if (tem == NULL_TREE)
13901 return NULL_TREE;
13902
13903 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
13904 }
13905
13906 return build_vector (type, elts);
13907 }
13908
13909 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
13910
13911 To compute GT, swap the arguments and do LT.
13912 To compute GE, do LT and invert the result.
13913 To compute LE, swap the arguments, do LT and invert the result.
13914 To compute NE, do EQ and invert the result.
13915
13916 Therefore, the code below must handle only EQ and LT. */
13917
13918 if (code == LE_EXPR || code == GT_EXPR)
13919 {
13920 std::swap (op0, op1);
13921 code = swap_tree_comparison (code);
13922 }
13923
13924 /* Note that it is safe to invert for real values here because we
13925 have already handled the one case that it matters. */
13926
13927 invert = 0;
13928 if (code == NE_EXPR || code == GE_EXPR)
13929 {
13930 invert = 1;
13931 code = invert_tree_comparison (code, false);
13932 }
13933
13934 /* Compute a result for LT or EQ if args permit;
13935 Otherwise return T. */
13936 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
13937 {
13938 if (code == EQ_EXPR)
13939 result = tree_int_cst_equal (op0, op1);
13940 else
13941 result = tree_int_cst_lt (op0, op1);
13942 }
13943 else
13944 return NULL_TREE;
13945
13946 if (invert)
13947 result ^= 1;
13948 return constant_boolean_node (result, type);
13949 }
13950
13951 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
13952 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
13953 itself. */
13954
13955 tree
13956 fold_build_cleanup_point_expr (tree type, tree expr)
13957 {
13958 /* If the expression does not have side effects then we don't have to wrap
13959 it with a cleanup point expression. */
13960 if (!TREE_SIDE_EFFECTS (expr))
13961 return expr;
13962
13963 /* If the expression is a return, check to see if the expression inside the
13964 return has no side effects or the right hand side of the modify expression
13965 inside the return. If either don't have side effects set we don't need to
13966 wrap the expression in a cleanup point expression. Note we don't check the
13967 left hand side of the modify because it should always be a return decl. */
13968 if (TREE_CODE (expr) == RETURN_EXPR)
13969 {
13970 tree op = TREE_OPERAND (expr, 0);
13971 if (!op || !TREE_SIDE_EFFECTS (op))
13972 return expr;
13973 op = TREE_OPERAND (op, 1);
13974 if (!TREE_SIDE_EFFECTS (op))
13975 return expr;
13976 }
13977
13978 return build1 (CLEANUP_POINT_EXPR, type, expr);
13979 }
13980
13981 /* Given a pointer value OP0 and a type TYPE, return a simplified version
13982 of an indirection through OP0, or NULL_TREE if no simplification is
13983 possible. */
13984
13985 tree
13986 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
13987 {
13988 tree sub = op0;
13989 tree subtype;
13990
13991 STRIP_NOPS (sub);
13992 subtype = TREE_TYPE (sub);
13993 if (!POINTER_TYPE_P (subtype))
13994 return NULL_TREE;
13995
13996 if (TREE_CODE (sub) == ADDR_EXPR)
13997 {
13998 tree op = TREE_OPERAND (sub, 0);
13999 tree optype = TREE_TYPE (op);
14000 /* *&CONST_DECL -> to the value of the const decl. */
14001 if (TREE_CODE (op) == CONST_DECL)
14002 return DECL_INITIAL (op);
14003 /* *&p => p; make sure to handle *&"str"[cst] here. */
14004 if (type == optype)
14005 {
14006 tree fop = fold_read_from_constant_string (op);
14007 if (fop)
14008 return fop;
14009 else
14010 return op;
14011 }
14012 /* *(foo *)&fooarray => fooarray[0] */
14013 else if (TREE_CODE (optype) == ARRAY_TYPE
14014 && type == TREE_TYPE (optype)
14015 && (!in_gimple_form
14016 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14017 {
14018 tree type_domain = TYPE_DOMAIN (optype);
14019 tree min_val = size_zero_node;
14020 if (type_domain && TYPE_MIN_VALUE (type_domain))
14021 min_val = TYPE_MIN_VALUE (type_domain);
14022 if (in_gimple_form
14023 && TREE_CODE (min_val) != INTEGER_CST)
14024 return NULL_TREE;
14025 return build4_loc (loc, ARRAY_REF, type, op, min_val,
14026 NULL_TREE, NULL_TREE);
14027 }
14028 /* *(foo *)&complexfoo => __real__ complexfoo */
14029 else if (TREE_CODE (optype) == COMPLEX_TYPE
14030 && type == TREE_TYPE (optype))
14031 return fold_build1_loc (loc, REALPART_EXPR, type, op);
14032 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
14033 else if (TREE_CODE (optype) == VECTOR_TYPE
14034 && type == TREE_TYPE (optype))
14035 {
14036 tree part_width = TYPE_SIZE (type);
14037 tree index = bitsize_int (0);
14038 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
14039 }
14040 }
14041
14042 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
14043 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
14044 {
14045 tree op00 = TREE_OPERAND (sub, 0);
14046 tree op01 = TREE_OPERAND (sub, 1);
14047
14048 STRIP_NOPS (op00);
14049 if (TREE_CODE (op00) == ADDR_EXPR)
14050 {
14051 tree op00type;
14052 op00 = TREE_OPERAND (op00, 0);
14053 op00type = TREE_TYPE (op00);
14054
14055 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
14056 if (TREE_CODE (op00type) == VECTOR_TYPE
14057 && type == TREE_TYPE (op00type))
14058 {
14059 HOST_WIDE_INT offset = tree_to_shwi (op01);
14060 tree part_width = TYPE_SIZE (type);
14061 unsigned HOST_WIDE_INT part_widthi = tree_to_shwi (part_width)/BITS_PER_UNIT;
14062 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
14063 tree index = bitsize_int (indexi);
14064
14065 if (offset / part_widthi < TYPE_VECTOR_SUBPARTS (op00type))
14066 return fold_build3_loc (loc,
14067 BIT_FIELD_REF, type, op00,
14068 part_width, index);
14069
14070 }
14071 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
14072 else if (TREE_CODE (op00type) == COMPLEX_TYPE
14073 && type == TREE_TYPE (op00type))
14074 {
14075 tree size = TYPE_SIZE_UNIT (type);
14076 if (tree_int_cst_equal (size, op01))
14077 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
14078 }
14079 /* ((foo *)&fooarray)[1] => fooarray[1] */
14080 else if (TREE_CODE (op00type) == ARRAY_TYPE
14081 && type == TREE_TYPE (op00type))
14082 {
14083 tree type_domain = TYPE_DOMAIN (op00type);
14084 tree min_val = size_zero_node;
14085 if (type_domain && TYPE_MIN_VALUE (type_domain))
14086 min_val = TYPE_MIN_VALUE (type_domain);
14087 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
14088 TYPE_SIZE_UNIT (type));
14089 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
14090 return build4_loc (loc, ARRAY_REF, type, op00, op01,
14091 NULL_TREE, NULL_TREE);
14092 }
14093 }
14094 }
14095
14096 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
14097 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
14098 && type == TREE_TYPE (TREE_TYPE (subtype))
14099 && (!in_gimple_form
14100 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14101 {
14102 tree type_domain;
14103 tree min_val = size_zero_node;
14104 sub = build_fold_indirect_ref_loc (loc, sub);
14105 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
14106 if (type_domain && TYPE_MIN_VALUE (type_domain))
14107 min_val = TYPE_MIN_VALUE (type_domain);
14108 if (in_gimple_form
14109 && TREE_CODE (min_val) != INTEGER_CST)
14110 return NULL_TREE;
14111 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
14112 NULL_TREE);
14113 }
14114
14115 return NULL_TREE;
14116 }
14117
14118 /* Builds an expression for an indirection through T, simplifying some
14119 cases. */
14120
14121 tree
14122 build_fold_indirect_ref_loc (location_t loc, tree t)
14123 {
14124 tree type = TREE_TYPE (TREE_TYPE (t));
14125 tree sub = fold_indirect_ref_1 (loc, type, t);
14126
14127 if (sub)
14128 return sub;
14129
14130 return build1_loc (loc, INDIRECT_REF, type, t);
14131 }
14132
14133 /* Given an INDIRECT_REF T, return either T or a simplified version. */
14134
14135 tree
14136 fold_indirect_ref_loc (location_t loc, tree t)
14137 {
14138 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
14139
14140 if (sub)
14141 return sub;
14142 else
14143 return t;
14144 }
14145
14146 /* Strip non-trapping, non-side-effecting tree nodes from an expression
14147 whose result is ignored. The type of the returned tree need not be
14148 the same as the original expression. */
14149
14150 tree
14151 fold_ignored_result (tree t)
14152 {
14153 if (!TREE_SIDE_EFFECTS (t))
14154 return integer_zero_node;
14155
14156 for (;;)
14157 switch (TREE_CODE_CLASS (TREE_CODE (t)))
14158 {
14159 case tcc_unary:
14160 t = TREE_OPERAND (t, 0);
14161 break;
14162
14163 case tcc_binary:
14164 case tcc_comparison:
14165 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14166 t = TREE_OPERAND (t, 0);
14167 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
14168 t = TREE_OPERAND (t, 1);
14169 else
14170 return t;
14171 break;
14172
14173 case tcc_expression:
14174 switch (TREE_CODE (t))
14175 {
14176 case COMPOUND_EXPR:
14177 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14178 return t;
14179 t = TREE_OPERAND (t, 0);
14180 break;
14181
14182 case COND_EXPR:
14183 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
14184 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
14185 return t;
14186 t = TREE_OPERAND (t, 0);
14187 break;
14188
14189 default:
14190 return t;
14191 }
14192 break;
14193
14194 default:
14195 return t;
14196 }
14197 }
14198
14199 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
14200
14201 tree
14202 round_up_loc (location_t loc, tree value, unsigned int divisor)
14203 {
14204 tree div = NULL_TREE;
14205
14206 if (divisor == 1)
14207 return value;
14208
14209 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14210 have to do anything. Only do this when we are not given a const,
14211 because in that case, this check is more expensive than just
14212 doing it. */
14213 if (TREE_CODE (value) != INTEGER_CST)
14214 {
14215 div = build_int_cst (TREE_TYPE (value), divisor);
14216
14217 if (multiple_of_p (TREE_TYPE (value), value, div))
14218 return value;
14219 }
14220
14221 /* If divisor is a power of two, simplify this to bit manipulation. */
14222 if (divisor == (divisor & -divisor))
14223 {
14224 if (TREE_CODE (value) == INTEGER_CST)
14225 {
14226 wide_int val = value;
14227 bool overflow_p;
14228
14229 if ((val & (divisor - 1)) == 0)
14230 return value;
14231
14232 overflow_p = TREE_OVERFLOW (value);
14233 val += divisor - 1;
14234 val &= - (int) divisor;
14235 if (val == 0)
14236 overflow_p = true;
14237
14238 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
14239 }
14240 else
14241 {
14242 tree t;
14243
14244 t = build_int_cst (TREE_TYPE (value), divisor - 1);
14245 value = size_binop_loc (loc, PLUS_EXPR, value, t);
14246 t = build_int_cst (TREE_TYPE (value), - (int) divisor);
14247 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14248 }
14249 }
14250 else
14251 {
14252 if (!div)
14253 div = build_int_cst (TREE_TYPE (value), divisor);
14254 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
14255 value = size_binop_loc (loc, MULT_EXPR, value, div);
14256 }
14257
14258 return value;
14259 }
14260
14261 /* Likewise, but round down. */
14262
14263 tree
14264 round_down_loc (location_t loc, tree value, int divisor)
14265 {
14266 tree div = NULL_TREE;
14267
14268 gcc_assert (divisor > 0);
14269 if (divisor == 1)
14270 return value;
14271
14272 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14273 have to do anything. Only do this when we are not given a const,
14274 because in that case, this check is more expensive than just
14275 doing it. */
14276 if (TREE_CODE (value) != INTEGER_CST)
14277 {
14278 div = build_int_cst (TREE_TYPE (value), divisor);
14279
14280 if (multiple_of_p (TREE_TYPE (value), value, div))
14281 return value;
14282 }
14283
14284 /* If divisor is a power of two, simplify this to bit manipulation. */
14285 if (divisor == (divisor & -divisor))
14286 {
14287 tree t;
14288
14289 t = build_int_cst (TREE_TYPE (value), -divisor);
14290 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14291 }
14292 else
14293 {
14294 if (!div)
14295 div = build_int_cst (TREE_TYPE (value), divisor);
14296 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
14297 value = size_binop_loc (loc, MULT_EXPR, value, div);
14298 }
14299
14300 return value;
14301 }
14302
14303 /* Returns the pointer to the base of the object addressed by EXP and
14304 extracts the information about the offset of the access, storing it
14305 to PBITPOS and POFFSET. */
14306
14307 static tree
14308 split_address_to_core_and_offset (tree exp,
14309 HOST_WIDE_INT *pbitpos, tree *poffset)
14310 {
14311 tree core;
14312 machine_mode mode;
14313 int unsignedp, reversep, volatilep;
14314 HOST_WIDE_INT bitsize;
14315 location_t loc = EXPR_LOCATION (exp);
14316
14317 if (TREE_CODE (exp) == ADDR_EXPR)
14318 {
14319 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
14320 poffset, &mode, &unsignedp, &reversep,
14321 &volatilep, false);
14322 core = build_fold_addr_expr_loc (loc, core);
14323 }
14324 else
14325 {
14326 core = exp;
14327 *pbitpos = 0;
14328 *poffset = NULL_TREE;
14329 }
14330
14331 return core;
14332 }
14333
14334 /* Returns true if addresses of E1 and E2 differ by a constant, false
14335 otherwise. If they do, E1 - E2 is stored in *DIFF. */
14336
14337 bool
14338 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
14339 {
14340 tree core1, core2;
14341 HOST_WIDE_INT bitpos1, bitpos2;
14342 tree toffset1, toffset2, tdiff, type;
14343
14344 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
14345 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
14346
14347 if (bitpos1 % BITS_PER_UNIT != 0
14348 || bitpos2 % BITS_PER_UNIT != 0
14349 || !operand_equal_p (core1, core2, 0))
14350 return false;
14351
14352 if (toffset1 && toffset2)
14353 {
14354 type = TREE_TYPE (toffset1);
14355 if (type != TREE_TYPE (toffset2))
14356 toffset2 = fold_convert (type, toffset2);
14357
14358 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
14359 if (!cst_and_fits_in_hwi (tdiff))
14360 return false;
14361
14362 *diff = int_cst_value (tdiff);
14363 }
14364 else if (toffset1 || toffset2)
14365 {
14366 /* If only one of the offsets is non-constant, the difference cannot
14367 be a constant. */
14368 return false;
14369 }
14370 else
14371 *diff = 0;
14372
14373 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
14374 return true;
14375 }
14376
14377 /* Return OFF converted to a pointer offset type suitable as offset for
14378 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
14379 tree
14380 convert_to_ptrofftype_loc (location_t loc, tree off)
14381 {
14382 return fold_convert_loc (loc, sizetype, off);
14383 }
14384
14385 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14386 tree
14387 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
14388 {
14389 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14390 ptr, convert_to_ptrofftype_loc (loc, off));
14391 }
14392
14393 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14394 tree
14395 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
14396 {
14397 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14398 ptr, size_int (off));
14399 }
14400
14401 /* Return a char pointer for a C string if it is a string constant
14402 or sum of string constant and integer constant. */
14403
14404 const char *
14405 c_getstr (tree src)
14406 {
14407 tree offset_node;
14408
14409 src = string_constant (src, &offset_node);
14410 if (src == 0)
14411 return 0;
14412
14413 if (offset_node == 0)
14414 return TREE_STRING_POINTER (src);
14415 else if (!tree_fits_uhwi_p (offset_node)
14416 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
14417 return 0;
14418
14419 return TREE_STRING_POINTER (src) + tree_to_uhwi (offset_node);
14420 }