This series of patches fix PR61441.
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
27
28 /* The entry points in this file are fold, size_int_wide and size_binop.
29
30 fold takes a tree as argument and returns a simplified tree.
31
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
35
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
38
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
42
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "backend.h"
47 #include "target.h"
48 #include "rtl.h"
49 #include "tree.h"
50 #include "gimple.h"
51 #include "predict.h"
52 #include "tm_p.h"
53 #include "tree-ssa-operands.h"
54 #include "optabs-query.h"
55 #include "cgraph.h"
56 #include "diagnostic-core.h"
57 #include "flags.h"
58 #include "alias.h"
59 #include "fold-const.h"
60 #include "fold-const-call.h"
61 #include "stor-layout.h"
62 #include "calls.h"
63 #include "tree-iterator.h"
64 #include "expr.h"
65 #include "intl.h"
66 #include "langhooks.h"
67 #include "tree-eh.h"
68 #include "gimplify.h"
69 #include "tree-dfa.h"
70 #include "builtins.h"
71 #include "generic-match.h"
72 #include "gimple-fold.h"
73 #include "params.h"
74 #include "tree-into-ssa.h"
75 #include "md5.h"
76 #include "case-cfn-macros.h"
77
78 #ifndef LOAD_EXTEND_OP
79 #define LOAD_EXTEND_OP(M) UNKNOWN
80 #endif
81
82 /* Nonzero if we are folding constants inside an initializer; zero
83 otherwise. */
84 int folding_initializer = 0;
85
86 /* The following constants represent a bit based encoding of GCC's
87 comparison operators. This encoding simplifies transformations
88 on relational comparison operators, such as AND and OR. */
89 enum comparison_code {
90 COMPCODE_FALSE = 0,
91 COMPCODE_LT = 1,
92 COMPCODE_EQ = 2,
93 COMPCODE_LE = 3,
94 COMPCODE_GT = 4,
95 COMPCODE_LTGT = 5,
96 COMPCODE_GE = 6,
97 COMPCODE_ORD = 7,
98 COMPCODE_UNORD = 8,
99 COMPCODE_UNLT = 9,
100 COMPCODE_UNEQ = 10,
101 COMPCODE_UNLE = 11,
102 COMPCODE_UNGT = 12,
103 COMPCODE_NE = 13,
104 COMPCODE_UNGE = 14,
105 COMPCODE_TRUE = 15
106 };
107
108 static bool negate_expr_p (tree);
109 static tree negate_expr (tree);
110 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
111 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
112 static enum comparison_code comparison_to_compcode (enum tree_code);
113 static enum tree_code compcode_to_comparison (enum comparison_code);
114 static int operand_equal_for_comparison_p (tree, tree, tree);
115 static int twoval_comparison_p (tree, tree *, tree *, int *);
116 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
117 static tree make_bit_field_ref (location_t, tree, tree,
118 HOST_WIDE_INT, HOST_WIDE_INT, int, int);
119 static tree optimize_bit_field_compare (location_t, enum tree_code,
120 tree, tree, tree);
121 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
122 HOST_WIDE_INT *,
123 machine_mode *, int *, int *, int *,
124 tree *, tree *);
125 static int simple_operand_p (const_tree);
126 static bool simple_operand_p_2 (tree);
127 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
128 static tree range_predecessor (tree);
129 static tree range_successor (tree);
130 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
131 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
132 static tree unextend (tree, int, int, tree);
133 static tree optimize_minmax_comparison (location_t, enum tree_code,
134 tree, tree, tree);
135 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
136 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
137 static tree fold_binary_op_with_conditional_arg (location_t,
138 enum tree_code, tree,
139 tree, tree,
140 tree, tree, int);
141 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
142 static bool reorder_operands_p (const_tree, const_tree);
143 static tree fold_negate_const (tree, tree);
144 static tree fold_not_const (const_tree, tree);
145 static tree fold_relational_const (enum tree_code, tree, tree, tree);
146 static tree fold_convert_const (enum tree_code, tree, tree);
147 static tree fold_view_convert_expr (tree, tree);
148 static bool vec_cst_ctor_to_array (tree, tree *);
149
150
151 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
152 Otherwise, return LOC. */
153
154 static location_t
155 expr_location_or (tree t, location_t loc)
156 {
157 location_t tloc = EXPR_LOCATION (t);
158 return tloc == UNKNOWN_LOCATION ? loc : tloc;
159 }
160
161 /* Similar to protected_set_expr_location, but never modify x in place,
162 if location can and needs to be set, unshare it. */
163
164 static inline tree
165 protected_set_expr_location_unshare (tree x, location_t loc)
166 {
167 if (CAN_HAVE_LOCATION_P (x)
168 && EXPR_LOCATION (x) != loc
169 && !(TREE_CODE (x) == SAVE_EXPR
170 || TREE_CODE (x) == TARGET_EXPR
171 || TREE_CODE (x) == BIND_EXPR))
172 {
173 x = copy_node (x);
174 SET_EXPR_LOCATION (x, loc);
175 }
176 return x;
177 }
178 \f
179 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
180 division and returns the quotient. Otherwise returns
181 NULL_TREE. */
182
183 tree
184 div_if_zero_remainder (const_tree arg1, const_tree arg2)
185 {
186 widest_int quo;
187
188 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
189 SIGNED, &quo))
190 return wide_int_to_tree (TREE_TYPE (arg1), quo);
191
192 return NULL_TREE;
193 }
194 \f
195 /* This is nonzero if we should defer warnings about undefined
196 overflow. This facility exists because these warnings are a
197 special case. The code to estimate loop iterations does not want
198 to issue any warnings, since it works with expressions which do not
199 occur in user code. Various bits of cleanup code call fold(), but
200 only use the result if it has certain characteristics (e.g., is a
201 constant); that code only wants to issue a warning if the result is
202 used. */
203
204 static int fold_deferring_overflow_warnings;
205
206 /* If a warning about undefined overflow is deferred, this is the
207 warning. Note that this may cause us to turn two warnings into
208 one, but that is fine since it is sufficient to only give one
209 warning per expression. */
210
211 static const char* fold_deferred_overflow_warning;
212
213 /* If a warning about undefined overflow is deferred, this is the
214 level at which the warning should be emitted. */
215
216 static enum warn_strict_overflow_code fold_deferred_overflow_code;
217
218 /* Start deferring overflow warnings. We could use a stack here to
219 permit nested calls, but at present it is not necessary. */
220
221 void
222 fold_defer_overflow_warnings (void)
223 {
224 ++fold_deferring_overflow_warnings;
225 }
226
227 /* Stop deferring overflow warnings. If there is a pending warning,
228 and ISSUE is true, then issue the warning if appropriate. STMT is
229 the statement with which the warning should be associated (used for
230 location information); STMT may be NULL. CODE is the level of the
231 warning--a warn_strict_overflow_code value. This function will use
232 the smaller of CODE and the deferred code when deciding whether to
233 issue the warning. CODE may be zero to mean to always use the
234 deferred code. */
235
236 void
237 fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
238 {
239 const char *warnmsg;
240 location_t locus;
241
242 gcc_assert (fold_deferring_overflow_warnings > 0);
243 --fold_deferring_overflow_warnings;
244 if (fold_deferring_overflow_warnings > 0)
245 {
246 if (fold_deferred_overflow_warning != NULL
247 && code != 0
248 && code < (int) fold_deferred_overflow_code)
249 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
250 return;
251 }
252
253 warnmsg = fold_deferred_overflow_warning;
254 fold_deferred_overflow_warning = NULL;
255
256 if (!issue || warnmsg == NULL)
257 return;
258
259 if (gimple_no_warning_p (stmt))
260 return;
261
262 /* Use the smallest code level when deciding to issue the
263 warning. */
264 if (code == 0 || code > (int) fold_deferred_overflow_code)
265 code = fold_deferred_overflow_code;
266
267 if (!issue_strict_overflow_warning (code))
268 return;
269
270 if (stmt == NULL)
271 locus = input_location;
272 else
273 locus = gimple_location (stmt);
274 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
275 }
276
277 /* Stop deferring overflow warnings, ignoring any deferred
278 warnings. */
279
280 void
281 fold_undefer_and_ignore_overflow_warnings (void)
282 {
283 fold_undefer_overflow_warnings (false, NULL, 0);
284 }
285
286 /* Whether we are deferring overflow warnings. */
287
288 bool
289 fold_deferring_overflow_warnings_p (void)
290 {
291 return fold_deferring_overflow_warnings > 0;
292 }
293
294 /* This is called when we fold something based on the fact that signed
295 overflow is undefined. */
296
297 static void
298 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
299 {
300 if (fold_deferring_overflow_warnings > 0)
301 {
302 if (fold_deferred_overflow_warning == NULL
303 || wc < fold_deferred_overflow_code)
304 {
305 fold_deferred_overflow_warning = gmsgid;
306 fold_deferred_overflow_code = wc;
307 }
308 }
309 else if (issue_strict_overflow_warning (wc))
310 warning (OPT_Wstrict_overflow, gmsgid);
311 }
312 \f
313 /* Return true if the built-in mathematical function specified by CODE
314 is odd, i.e. -f(x) == f(-x). */
315
316 bool
317 negate_mathfn_p (combined_fn fn)
318 {
319 switch (fn)
320 {
321 CASE_CFN_ASIN:
322 CASE_CFN_ASINH:
323 CASE_CFN_ATAN:
324 CASE_CFN_ATANH:
325 CASE_CFN_CASIN:
326 CASE_CFN_CASINH:
327 CASE_CFN_CATAN:
328 CASE_CFN_CATANH:
329 CASE_CFN_CBRT:
330 CASE_CFN_CPROJ:
331 CASE_CFN_CSIN:
332 CASE_CFN_CSINH:
333 CASE_CFN_CTAN:
334 CASE_CFN_CTANH:
335 CASE_CFN_ERF:
336 CASE_CFN_LLROUND:
337 CASE_CFN_LROUND:
338 CASE_CFN_ROUND:
339 CASE_CFN_SIN:
340 CASE_CFN_SINH:
341 CASE_CFN_TAN:
342 CASE_CFN_TANH:
343 CASE_CFN_TRUNC:
344 return true;
345
346 CASE_CFN_LLRINT:
347 CASE_CFN_LRINT:
348 CASE_CFN_NEARBYINT:
349 CASE_CFN_RINT:
350 return !flag_rounding_math;
351
352 default:
353 break;
354 }
355 return false;
356 }
357
358 /* Check whether we may negate an integer constant T without causing
359 overflow. */
360
361 bool
362 may_negate_without_overflow_p (const_tree t)
363 {
364 tree type;
365
366 gcc_assert (TREE_CODE (t) == INTEGER_CST);
367
368 type = TREE_TYPE (t);
369 if (TYPE_UNSIGNED (type))
370 return false;
371
372 return !wi::only_sign_bit_p (t);
373 }
374
375 /* Determine whether an expression T can be cheaply negated using
376 the function negate_expr without introducing undefined overflow. */
377
378 static bool
379 negate_expr_p (tree t)
380 {
381 tree type;
382
383 if (t == 0)
384 return false;
385
386 type = TREE_TYPE (t);
387
388 STRIP_SIGN_NOPS (t);
389 switch (TREE_CODE (t))
390 {
391 case INTEGER_CST:
392 if (INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_WRAPS (type))
393 return true;
394
395 /* Check that -CST will not overflow type. */
396 return may_negate_without_overflow_p (t);
397 case BIT_NOT_EXPR:
398 return (INTEGRAL_TYPE_P (type)
399 && TYPE_OVERFLOW_WRAPS (type));
400
401 case FIXED_CST:
402 return true;
403
404 case NEGATE_EXPR:
405 return !TYPE_OVERFLOW_SANITIZED (type);
406
407 case REAL_CST:
408 /* We want to canonicalize to positive real constants. Pretend
409 that only negative ones can be easily negated. */
410 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
411
412 case COMPLEX_CST:
413 return negate_expr_p (TREE_REALPART (t))
414 && negate_expr_p (TREE_IMAGPART (t));
415
416 case VECTOR_CST:
417 {
418 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
419 return true;
420
421 int count = TYPE_VECTOR_SUBPARTS (type), i;
422
423 for (i = 0; i < count; i++)
424 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
425 return false;
426
427 return true;
428 }
429
430 case COMPLEX_EXPR:
431 return negate_expr_p (TREE_OPERAND (t, 0))
432 && negate_expr_p (TREE_OPERAND (t, 1));
433
434 case CONJ_EXPR:
435 return negate_expr_p (TREE_OPERAND (t, 0));
436
437 case PLUS_EXPR:
438 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
439 || HONOR_SIGNED_ZEROS (element_mode (type))
440 || (INTEGRAL_TYPE_P (type)
441 && ! TYPE_OVERFLOW_WRAPS (type)))
442 return false;
443 /* -(A + B) -> (-B) - A. */
444 if (negate_expr_p (TREE_OPERAND (t, 1))
445 && reorder_operands_p (TREE_OPERAND (t, 0),
446 TREE_OPERAND (t, 1)))
447 return true;
448 /* -(A + B) -> (-A) - B. */
449 return negate_expr_p (TREE_OPERAND (t, 0));
450
451 case MINUS_EXPR:
452 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
453 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
454 && !HONOR_SIGNED_ZEROS (element_mode (type))
455 && (! INTEGRAL_TYPE_P (type)
456 || TYPE_OVERFLOW_WRAPS (type))
457 && reorder_operands_p (TREE_OPERAND (t, 0),
458 TREE_OPERAND (t, 1));
459
460 case MULT_EXPR:
461 if (TYPE_UNSIGNED (type))
462 break;
463 /* INT_MIN/n * n doesn't overflow while negating one operand it does
464 if n is a power of two. */
465 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
466 && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
467 && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
468 && ! integer_pow2p (TREE_OPERAND (t, 0)))
469 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
470 && ! integer_pow2p (TREE_OPERAND (t, 1)))))
471 break;
472
473 /* Fall through. */
474
475 case RDIV_EXPR:
476 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
477 return negate_expr_p (TREE_OPERAND (t, 1))
478 || negate_expr_p (TREE_OPERAND (t, 0));
479 break;
480
481 case TRUNC_DIV_EXPR:
482 case ROUND_DIV_EXPR:
483 case EXACT_DIV_EXPR:
484 if (TYPE_UNSIGNED (type))
485 break;
486 if (negate_expr_p (TREE_OPERAND (t, 0)))
487 return true;
488 /* In general we can't negate B in A / B, because if A is INT_MIN and
489 B is 1, we may turn this into INT_MIN / -1 which is undefined
490 and actually traps on some architectures. */
491 if (! INTEGRAL_TYPE_P (TREE_TYPE (t))
492 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
493 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
494 && ! integer_onep (TREE_OPERAND (t, 1))))
495 return negate_expr_p (TREE_OPERAND (t, 1));
496 break;
497
498 case NOP_EXPR:
499 /* Negate -((double)float) as (double)(-float). */
500 if (TREE_CODE (type) == REAL_TYPE)
501 {
502 tree tem = strip_float_extensions (t);
503 if (tem != t)
504 return negate_expr_p (tem);
505 }
506 break;
507
508 case CALL_EXPR:
509 /* Negate -f(x) as f(-x). */
510 if (negate_mathfn_p (get_call_combined_fn (t)))
511 return negate_expr_p (CALL_EXPR_ARG (t, 0));
512 break;
513
514 case RSHIFT_EXPR:
515 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
516 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
517 {
518 tree op1 = TREE_OPERAND (t, 1);
519 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
520 return true;
521 }
522 break;
523
524 default:
525 break;
526 }
527 return false;
528 }
529
530 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
531 simplification is possible.
532 If negate_expr_p would return true for T, NULL_TREE will never be
533 returned. */
534
535 static tree
536 fold_negate_expr (location_t loc, tree t)
537 {
538 tree type = TREE_TYPE (t);
539 tree tem;
540
541 switch (TREE_CODE (t))
542 {
543 /* Convert - (~A) to A + 1. */
544 case BIT_NOT_EXPR:
545 if (INTEGRAL_TYPE_P (type))
546 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
547 build_one_cst (type));
548 break;
549
550 case INTEGER_CST:
551 tem = fold_negate_const (t, type);
552 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
553 || (ANY_INTEGRAL_TYPE_P (type)
554 && !TYPE_OVERFLOW_TRAPS (type)
555 && TYPE_OVERFLOW_WRAPS (type))
556 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
557 return tem;
558 break;
559
560 case REAL_CST:
561 tem = fold_negate_const (t, type);
562 return tem;
563
564 case FIXED_CST:
565 tem = fold_negate_const (t, type);
566 return tem;
567
568 case COMPLEX_CST:
569 {
570 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
571 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
572 if (rpart && ipart)
573 return build_complex (type, rpart, ipart);
574 }
575 break;
576
577 case VECTOR_CST:
578 {
579 int count = TYPE_VECTOR_SUBPARTS (type), i;
580 tree *elts = XALLOCAVEC (tree, count);
581
582 for (i = 0; i < count; i++)
583 {
584 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
585 if (elts[i] == NULL_TREE)
586 return NULL_TREE;
587 }
588
589 return build_vector (type, elts);
590 }
591
592 case COMPLEX_EXPR:
593 if (negate_expr_p (t))
594 return fold_build2_loc (loc, COMPLEX_EXPR, type,
595 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
596 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
597 break;
598
599 case CONJ_EXPR:
600 if (negate_expr_p (t))
601 return fold_build1_loc (loc, CONJ_EXPR, type,
602 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
603 break;
604
605 case NEGATE_EXPR:
606 if (!TYPE_OVERFLOW_SANITIZED (type))
607 return TREE_OPERAND (t, 0);
608 break;
609
610 case PLUS_EXPR:
611 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
612 && !HONOR_SIGNED_ZEROS (element_mode (type)))
613 {
614 /* -(A + B) -> (-B) - A. */
615 if (negate_expr_p (TREE_OPERAND (t, 1))
616 && reorder_operands_p (TREE_OPERAND (t, 0),
617 TREE_OPERAND (t, 1)))
618 {
619 tem = negate_expr (TREE_OPERAND (t, 1));
620 return fold_build2_loc (loc, MINUS_EXPR, type,
621 tem, TREE_OPERAND (t, 0));
622 }
623
624 /* -(A + B) -> (-A) - B. */
625 if (negate_expr_p (TREE_OPERAND (t, 0)))
626 {
627 tem = negate_expr (TREE_OPERAND (t, 0));
628 return fold_build2_loc (loc, MINUS_EXPR, type,
629 tem, TREE_OPERAND (t, 1));
630 }
631 }
632 break;
633
634 case MINUS_EXPR:
635 /* - (A - B) -> B - A */
636 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
637 && !HONOR_SIGNED_ZEROS (element_mode (type))
638 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
639 return fold_build2_loc (loc, MINUS_EXPR, type,
640 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
641 break;
642
643 case MULT_EXPR:
644 if (TYPE_UNSIGNED (type))
645 break;
646
647 /* Fall through. */
648
649 case RDIV_EXPR:
650 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
651 {
652 tem = TREE_OPERAND (t, 1);
653 if (negate_expr_p (tem))
654 return fold_build2_loc (loc, TREE_CODE (t), type,
655 TREE_OPERAND (t, 0), negate_expr (tem));
656 tem = TREE_OPERAND (t, 0);
657 if (negate_expr_p (tem))
658 return fold_build2_loc (loc, TREE_CODE (t), type,
659 negate_expr (tem), TREE_OPERAND (t, 1));
660 }
661 break;
662
663 case TRUNC_DIV_EXPR:
664 case ROUND_DIV_EXPR:
665 case EXACT_DIV_EXPR:
666 if (TYPE_UNSIGNED (type))
667 break;
668 if (negate_expr_p (TREE_OPERAND (t, 0)))
669 return fold_build2_loc (loc, TREE_CODE (t), type,
670 negate_expr (TREE_OPERAND (t, 0)),
671 TREE_OPERAND (t, 1));
672 /* In general we can't negate B in A / B, because if A is INT_MIN and
673 B is 1, we may turn this into INT_MIN / -1 which is undefined
674 and actually traps on some architectures. */
675 if ((! INTEGRAL_TYPE_P (TREE_TYPE (t))
676 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
677 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
678 && ! integer_onep (TREE_OPERAND (t, 1))))
679 && negate_expr_p (TREE_OPERAND (t, 1)))
680 return fold_build2_loc (loc, TREE_CODE (t), type,
681 TREE_OPERAND (t, 0),
682 negate_expr (TREE_OPERAND (t, 1)));
683 break;
684
685 case NOP_EXPR:
686 /* Convert -((double)float) into (double)(-float). */
687 if (TREE_CODE (type) == REAL_TYPE)
688 {
689 tem = strip_float_extensions (t);
690 if (tem != t && negate_expr_p (tem))
691 return fold_convert_loc (loc, type, negate_expr (tem));
692 }
693 break;
694
695 case CALL_EXPR:
696 /* Negate -f(x) as f(-x). */
697 if (negate_mathfn_p (get_call_combined_fn (t))
698 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
699 {
700 tree fndecl, arg;
701
702 fndecl = get_callee_fndecl (t);
703 arg = negate_expr (CALL_EXPR_ARG (t, 0));
704 return build_call_expr_loc (loc, fndecl, 1, arg);
705 }
706 break;
707
708 case RSHIFT_EXPR:
709 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
710 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
711 {
712 tree op1 = TREE_OPERAND (t, 1);
713 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
714 {
715 tree ntype = TYPE_UNSIGNED (type)
716 ? signed_type_for (type)
717 : unsigned_type_for (type);
718 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
719 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
720 return fold_convert_loc (loc, type, temp);
721 }
722 }
723 break;
724
725 default:
726 break;
727 }
728
729 return NULL_TREE;
730 }
731
732 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
733 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
734 return NULL_TREE. */
735
736 static tree
737 negate_expr (tree t)
738 {
739 tree type, tem;
740 location_t loc;
741
742 if (t == NULL_TREE)
743 return NULL_TREE;
744
745 loc = EXPR_LOCATION (t);
746 type = TREE_TYPE (t);
747 STRIP_SIGN_NOPS (t);
748
749 tem = fold_negate_expr (loc, t);
750 if (!tem)
751 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
752 return fold_convert_loc (loc, type, tem);
753 }
754 \f
755 /* Split a tree IN into a constant, literal and variable parts that could be
756 combined with CODE to make IN. "constant" means an expression with
757 TREE_CONSTANT but that isn't an actual constant. CODE must be a
758 commutative arithmetic operation. Store the constant part into *CONP,
759 the literal in *LITP and return the variable part. If a part isn't
760 present, set it to null. If the tree does not decompose in this way,
761 return the entire tree as the variable part and the other parts as null.
762
763 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
764 case, we negate an operand that was subtracted. Except if it is a
765 literal for which we use *MINUS_LITP instead.
766
767 If NEGATE_P is true, we are negating all of IN, again except a literal
768 for which we use *MINUS_LITP instead.
769
770 If IN is itself a literal or constant, return it as appropriate.
771
772 Note that we do not guarantee that any of the three values will be the
773 same type as IN, but they will have the same signedness and mode. */
774
775 static tree
776 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
777 tree *minus_litp, int negate_p)
778 {
779 tree var = 0;
780
781 *conp = 0;
782 *litp = 0;
783 *minus_litp = 0;
784
785 /* Strip any conversions that don't change the machine mode or signedness. */
786 STRIP_SIGN_NOPS (in);
787
788 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
789 || TREE_CODE (in) == FIXED_CST)
790 *litp = in;
791 else if (TREE_CODE (in) == code
792 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
793 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
794 /* We can associate addition and subtraction together (even
795 though the C standard doesn't say so) for integers because
796 the value is not affected. For reals, the value might be
797 affected, so we can't. */
798 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
799 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
800 {
801 tree op0 = TREE_OPERAND (in, 0);
802 tree op1 = TREE_OPERAND (in, 1);
803 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
804 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
805
806 /* First see if either of the operands is a literal, then a constant. */
807 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
808 || TREE_CODE (op0) == FIXED_CST)
809 *litp = op0, op0 = 0;
810 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
811 || TREE_CODE (op1) == FIXED_CST)
812 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
813
814 if (op0 != 0 && TREE_CONSTANT (op0))
815 *conp = op0, op0 = 0;
816 else if (op1 != 0 && TREE_CONSTANT (op1))
817 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
818
819 /* If we haven't dealt with either operand, this is not a case we can
820 decompose. Otherwise, VAR is either of the ones remaining, if any. */
821 if (op0 != 0 && op1 != 0)
822 var = in;
823 else if (op0 != 0)
824 var = op0;
825 else
826 var = op1, neg_var_p = neg1_p;
827
828 /* Now do any needed negations. */
829 if (neg_litp_p)
830 *minus_litp = *litp, *litp = 0;
831 if (neg_conp_p)
832 *conp = negate_expr (*conp);
833 if (neg_var_p)
834 var = negate_expr (var);
835 }
836 else if (TREE_CODE (in) == BIT_NOT_EXPR
837 && code == PLUS_EXPR)
838 {
839 /* -X - 1 is folded to ~X, undo that here. */
840 *minus_litp = build_one_cst (TREE_TYPE (in));
841 var = negate_expr (TREE_OPERAND (in, 0));
842 }
843 else if (TREE_CONSTANT (in))
844 *conp = in;
845 else
846 var = in;
847
848 if (negate_p)
849 {
850 if (*litp)
851 *minus_litp = *litp, *litp = 0;
852 else if (*minus_litp)
853 *litp = *minus_litp, *minus_litp = 0;
854 *conp = negate_expr (*conp);
855 var = negate_expr (var);
856 }
857
858 return var;
859 }
860
861 /* Re-associate trees split by the above function. T1 and T2 are
862 either expressions to associate or null. Return the new
863 expression, if any. LOC is the location of the new expression. If
864 we build an operation, do it in TYPE and with CODE. */
865
866 static tree
867 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
868 {
869 if (t1 == 0)
870 return t2;
871 else if (t2 == 0)
872 return t1;
873
874 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
875 try to fold this since we will have infinite recursion. But do
876 deal with any NEGATE_EXPRs. */
877 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
878 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
879 {
880 if (code == PLUS_EXPR)
881 {
882 if (TREE_CODE (t1) == NEGATE_EXPR)
883 return build2_loc (loc, MINUS_EXPR, type,
884 fold_convert_loc (loc, type, t2),
885 fold_convert_loc (loc, type,
886 TREE_OPERAND (t1, 0)));
887 else if (TREE_CODE (t2) == NEGATE_EXPR)
888 return build2_loc (loc, MINUS_EXPR, type,
889 fold_convert_loc (loc, type, t1),
890 fold_convert_loc (loc, type,
891 TREE_OPERAND (t2, 0)));
892 else if (integer_zerop (t2))
893 return fold_convert_loc (loc, type, t1);
894 }
895 else if (code == MINUS_EXPR)
896 {
897 if (integer_zerop (t2))
898 return fold_convert_loc (loc, type, t1);
899 }
900
901 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
902 fold_convert_loc (loc, type, t2));
903 }
904
905 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
906 fold_convert_loc (loc, type, t2));
907 }
908 \f
909 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
910 for use in int_const_binop, size_binop and size_diffop. */
911
912 static bool
913 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
914 {
915 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
916 return false;
917 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
918 return false;
919
920 switch (code)
921 {
922 case LSHIFT_EXPR:
923 case RSHIFT_EXPR:
924 case LROTATE_EXPR:
925 case RROTATE_EXPR:
926 return true;
927
928 default:
929 break;
930 }
931
932 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
933 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
934 && TYPE_MODE (type1) == TYPE_MODE (type2);
935 }
936
937
938 /* Combine two integer constants ARG1 and ARG2 under operation CODE
939 to produce a new constant. Return NULL_TREE if we don't know how
940 to evaluate CODE at compile-time. */
941
942 static tree
943 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree parg2,
944 int overflowable)
945 {
946 wide_int res;
947 tree t;
948 tree type = TREE_TYPE (arg1);
949 signop sign = TYPE_SIGN (type);
950 bool overflow = false;
951
952 wide_int arg2 = wide_int::from (parg2, TYPE_PRECISION (type),
953 TYPE_SIGN (TREE_TYPE (parg2)));
954
955 switch (code)
956 {
957 case BIT_IOR_EXPR:
958 res = wi::bit_or (arg1, arg2);
959 break;
960
961 case BIT_XOR_EXPR:
962 res = wi::bit_xor (arg1, arg2);
963 break;
964
965 case BIT_AND_EXPR:
966 res = wi::bit_and (arg1, arg2);
967 break;
968
969 case RSHIFT_EXPR:
970 case LSHIFT_EXPR:
971 if (wi::neg_p (arg2))
972 {
973 arg2 = -arg2;
974 if (code == RSHIFT_EXPR)
975 code = LSHIFT_EXPR;
976 else
977 code = RSHIFT_EXPR;
978 }
979
980 if (code == RSHIFT_EXPR)
981 /* It's unclear from the C standard whether shifts can overflow.
982 The following code ignores overflow; perhaps a C standard
983 interpretation ruling is needed. */
984 res = wi::rshift (arg1, arg2, sign);
985 else
986 res = wi::lshift (arg1, arg2);
987 break;
988
989 case RROTATE_EXPR:
990 case LROTATE_EXPR:
991 if (wi::neg_p (arg2))
992 {
993 arg2 = -arg2;
994 if (code == RROTATE_EXPR)
995 code = LROTATE_EXPR;
996 else
997 code = RROTATE_EXPR;
998 }
999
1000 if (code == RROTATE_EXPR)
1001 res = wi::rrotate (arg1, arg2);
1002 else
1003 res = wi::lrotate (arg1, arg2);
1004 break;
1005
1006 case PLUS_EXPR:
1007 res = wi::add (arg1, arg2, sign, &overflow);
1008 break;
1009
1010 case MINUS_EXPR:
1011 res = wi::sub (arg1, arg2, sign, &overflow);
1012 break;
1013
1014 case MULT_EXPR:
1015 res = wi::mul (arg1, arg2, sign, &overflow);
1016 break;
1017
1018 case MULT_HIGHPART_EXPR:
1019 res = wi::mul_high (arg1, arg2, sign);
1020 break;
1021
1022 case TRUNC_DIV_EXPR:
1023 case EXACT_DIV_EXPR:
1024 if (arg2 == 0)
1025 return NULL_TREE;
1026 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1027 break;
1028
1029 case FLOOR_DIV_EXPR:
1030 if (arg2 == 0)
1031 return NULL_TREE;
1032 res = wi::div_floor (arg1, arg2, sign, &overflow);
1033 break;
1034
1035 case CEIL_DIV_EXPR:
1036 if (arg2 == 0)
1037 return NULL_TREE;
1038 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1039 break;
1040
1041 case ROUND_DIV_EXPR:
1042 if (arg2 == 0)
1043 return NULL_TREE;
1044 res = wi::div_round (arg1, arg2, sign, &overflow);
1045 break;
1046
1047 case TRUNC_MOD_EXPR:
1048 if (arg2 == 0)
1049 return NULL_TREE;
1050 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1051 break;
1052
1053 case FLOOR_MOD_EXPR:
1054 if (arg2 == 0)
1055 return NULL_TREE;
1056 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1057 break;
1058
1059 case CEIL_MOD_EXPR:
1060 if (arg2 == 0)
1061 return NULL_TREE;
1062 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1063 break;
1064
1065 case ROUND_MOD_EXPR:
1066 if (arg2 == 0)
1067 return NULL_TREE;
1068 res = wi::mod_round (arg1, arg2, sign, &overflow);
1069 break;
1070
1071 case MIN_EXPR:
1072 res = wi::min (arg1, arg2, sign);
1073 break;
1074
1075 case MAX_EXPR:
1076 res = wi::max (arg1, arg2, sign);
1077 break;
1078
1079 default:
1080 return NULL_TREE;
1081 }
1082
1083 t = force_fit_type (type, res, overflowable,
1084 (((sign == SIGNED || overflowable == -1)
1085 && overflow)
1086 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (parg2)));
1087
1088 return t;
1089 }
1090
1091 tree
1092 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1093 {
1094 return int_const_binop_1 (code, arg1, arg2, 1);
1095 }
1096
1097 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1098 constant. We assume ARG1 and ARG2 have the same data type, or at least
1099 are the same kind of constant and the same machine mode. Return zero if
1100 combining the constants is not allowed in the current operating mode. */
1101
1102 static tree
1103 const_binop (enum tree_code code, tree arg1, tree arg2)
1104 {
1105 /* Sanity check for the recursive cases. */
1106 if (!arg1 || !arg2)
1107 return NULL_TREE;
1108
1109 STRIP_NOPS (arg1);
1110 STRIP_NOPS (arg2);
1111
1112 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1113 {
1114 if (code == POINTER_PLUS_EXPR)
1115 return int_const_binop (PLUS_EXPR,
1116 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1117
1118 return int_const_binop (code, arg1, arg2);
1119 }
1120
1121 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1122 {
1123 machine_mode mode;
1124 REAL_VALUE_TYPE d1;
1125 REAL_VALUE_TYPE d2;
1126 REAL_VALUE_TYPE value;
1127 REAL_VALUE_TYPE result;
1128 bool inexact;
1129 tree t, type;
1130
1131 /* The following codes are handled by real_arithmetic. */
1132 switch (code)
1133 {
1134 case PLUS_EXPR:
1135 case MINUS_EXPR:
1136 case MULT_EXPR:
1137 case RDIV_EXPR:
1138 case MIN_EXPR:
1139 case MAX_EXPR:
1140 break;
1141
1142 default:
1143 return NULL_TREE;
1144 }
1145
1146 d1 = TREE_REAL_CST (arg1);
1147 d2 = TREE_REAL_CST (arg2);
1148
1149 type = TREE_TYPE (arg1);
1150 mode = TYPE_MODE (type);
1151
1152 /* Don't perform operation if we honor signaling NaNs and
1153 either operand is a signaling NaN. */
1154 if (HONOR_SNANS (mode)
1155 && (REAL_VALUE_ISSIGNALING_NAN (d1)
1156 || REAL_VALUE_ISSIGNALING_NAN (d2)))
1157 return NULL_TREE;
1158
1159 /* Don't perform operation if it would raise a division
1160 by zero exception. */
1161 if (code == RDIV_EXPR
1162 && real_equal (&d2, &dconst0)
1163 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1164 return NULL_TREE;
1165
1166 /* If either operand is a NaN, just return it. Otherwise, set up
1167 for floating-point trap; we return an overflow. */
1168 if (REAL_VALUE_ISNAN (d1))
1169 {
1170 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1171 is off. */
1172 d1.signalling = 0;
1173 t = build_real (type, d1);
1174 return t;
1175 }
1176 else if (REAL_VALUE_ISNAN (d2))
1177 {
1178 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1179 is off. */
1180 d2.signalling = 0;
1181 t = build_real (type, d2);
1182 return t;
1183 }
1184
1185 inexact = real_arithmetic (&value, code, &d1, &d2);
1186 real_convert (&result, mode, &value);
1187
1188 /* Don't constant fold this floating point operation if
1189 the result has overflowed and flag_trapping_math. */
1190 if (flag_trapping_math
1191 && MODE_HAS_INFINITIES (mode)
1192 && REAL_VALUE_ISINF (result)
1193 && !REAL_VALUE_ISINF (d1)
1194 && !REAL_VALUE_ISINF (d2))
1195 return NULL_TREE;
1196
1197 /* Don't constant fold this floating point operation if the
1198 result may dependent upon the run-time rounding mode and
1199 flag_rounding_math is set, or if GCC's software emulation
1200 is unable to accurately represent the result. */
1201 if ((flag_rounding_math
1202 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1203 && (inexact || !real_identical (&result, &value)))
1204 return NULL_TREE;
1205
1206 t = build_real (type, result);
1207
1208 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1209 return t;
1210 }
1211
1212 if (TREE_CODE (arg1) == FIXED_CST)
1213 {
1214 FIXED_VALUE_TYPE f1;
1215 FIXED_VALUE_TYPE f2;
1216 FIXED_VALUE_TYPE result;
1217 tree t, type;
1218 int sat_p;
1219 bool overflow_p;
1220
1221 /* The following codes are handled by fixed_arithmetic. */
1222 switch (code)
1223 {
1224 case PLUS_EXPR:
1225 case MINUS_EXPR:
1226 case MULT_EXPR:
1227 case TRUNC_DIV_EXPR:
1228 if (TREE_CODE (arg2) != FIXED_CST)
1229 return NULL_TREE;
1230 f2 = TREE_FIXED_CST (arg2);
1231 break;
1232
1233 case LSHIFT_EXPR:
1234 case RSHIFT_EXPR:
1235 {
1236 if (TREE_CODE (arg2) != INTEGER_CST)
1237 return NULL_TREE;
1238 wide_int w2 = arg2;
1239 f2.data.high = w2.elt (1);
1240 f2.data.low = w2.elt (0);
1241 f2.mode = SImode;
1242 }
1243 break;
1244
1245 default:
1246 return NULL_TREE;
1247 }
1248
1249 f1 = TREE_FIXED_CST (arg1);
1250 type = TREE_TYPE (arg1);
1251 sat_p = TYPE_SATURATING (type);
1252 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1253 t = build_fixed (type, result);
1254 /* Propagate overflow flags. */
1255 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1256 TREE_OVERFLOW (t) = 1;
1257 return t;
1258 }
1259
1260 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1261 {
1262 tree type = TREE_TYPE (arg1);
1263 tree r1 = TREE_REALPART (arg1);
1264 tree i1 = TREE_IMAGPART (arg1);
1265 tree r2 = TREE_REALPART (arg2);
1266 tree i2 = TREE_IMAGPART (arg2);
1267 tree real, imag;
1268
1269 switch (code)
1270 {
1271 case PLUS_EXPR:
1272 case MINUS_EXPR:
1273 real = const_binop (code, r1, r2);
1274 imag = const_binop (code, i1, i2);
1275 break;
1276
1277 case MULT_EXPR:
1278 if (COMPLEX_FLOAT_TYPE_P (type))
1279 return do_mpc_arg2 (arg1, arg2, type,
1280 /* do_nonfinite= */ folding_initializer,
1281 mpc_mul);
1282
1283 real = const_binop (MINUS_EXPR,
1284 const_binop (MULT_EXPR, r1, r2),
1285 const_binop (MULT_EXPR, i1, i2));
1286 imag = const_binop (PLUS_EXPR,
1287 const_binop (MULT_EXPR, r1, i2),
1288 const_binop (MULT_EXPR, i1, r2));
1289 break;
1290
1291 case RDIV_EXPR:
1292 if (COMPLEX_FLOAT_TYPE_P (type))
1293 return do_mpc_arg2 (arg1, arg2, type,
1294 /* do_nonfinite= */ folding_initializer,
1295 mpc_div);
1296 /* Fallthru ... */
1297 case TRUNC_DIV_EXPR:
1298 case CEIL_DIV_EXPR:
1299 case FLOOR_DIV_EXPR:
1300 case ROUND_DIV_EXPR:
1301 if (flag_complex_method == 0)
1302 {
1303 /* Keep this algorithm in sync with
1304 tree-complex.c:expand_complex_div_straight().
1305
1306 Expand complex division to scalars, straightforward algorithm.
1307 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1308 t = br*br + bi*bi
1309 */
1310 tree magsquared
1311 = const_binop (PLUS_EXPR,
1312 const_binop (MULT_EXPR, r2, r2),
1313 const_binop (MULT_EXPR, i2, i2));
1314 tree t1
1315 = const_binop (PLUS_EXPR,
1316 const_binop (MULT_EXPR, r1, r2),
1317 const_binop (MULT_EXPR, i1, i2));
1318 tree t2
1319 = const_binop (MINUS_EXPR,
1320 const_binop (MULT_EXPR, i1, r2),
1321 const_binop (MULT_EXPR, r1, i2));
1322
1323 real = const_binop (code, t1, magsquared);
1324 imag = const_binop (code, t2, magsquared);
1325 }
1326 else
1327 {
1328 /* Keep this algorithm in sync with
1329 tree-complex.c:expand_complex_div_wide().
1330
1331 Expand complex division to scalars, modified algorithm to minimize
1332 overflow with wide input ranges. */
1333 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1334 fold_abs_const (r2, TREE_TYPE (type)),
1335 fold_abs_const (i2, TREE_TYPE (type)));
1336
1337 if (integer_nonzerop (compare))
1338 {
1339 /* In the TRUE branch, we compute
1340 ratio = br/bi;
1341 div = (br * ratio) + bi;
1342 tr = (ar * ratio) + ai;
1343 ti = (ai * ratio) - ar;
1344 tr = tr / div;
1345 ti = ti / div; */
1346 tree ratio = const_binop (code, r2, i2);
1347 tree div = const_binop (PLUS_EXPR, i2,
1348 const_binop (MULT_EXPR, r2, ratio));
1349 real = const_binop (MULT_EXPR, r1, ratio);
1350 real = const_binop (PLUS_EXPR, real, i1);
1351 real = const_binop (code, real, div);
1352
1353 imag = const_binop (MULT_EXPR, i1, ratio);
1354 imag = const_binop (MINUS_EXPR, imag, r1);
1355 imag = const_binop (code, imag, div);
1356 }
1357 else
1358 {
1359 /* In the FALSE branch, we compute
1360 ratio = d/c;
1361 divisor = (d * ratio) + c;
1362 tr = (b * ratio) + a;
1363 ti = b - (a * ratio);
1364 tr = tr / div;
1365 ti = ti / div; */
1366 tree ratio = const_binop (code, i2, r2);
1367 tree div = const_binop (PLUS_EXPR, r2,
1368 const_binop (MULT_EXPR, i2, ratio));
1369
1370 real = const_binop (MULT_EXPR, i1, ratio);
1371 real = const_binop (PLUS_EXPR, real, r1);
1372 real = const_binop (code, real, div);
1373
1374 imag = const_binop (MULT_EXPR, r1, ratio);
1375 imag = const_binop (MINUS_EXPR, i1, imag);
1376 imag = const_binop (code, imag, div);
1377 }
1378 }
1379 break;
1380
1381 default:
1382 return NULL_TREE;
1383 }
1384
1385 if (real && imag)
1386 return build_complex (type, real, imag);
1387 }
1388
1389 if (TREE_CODE (arg1) == VECTOR_CST
1390 && TREE_CODE (arg2) == VECTOR_CST)
1391 {
1392 tree type = TREE_TYPE (arg1);
1393 int count = TYPE_VECTOR_SUBPARTS (type), i;
1394 tree *elts = XALLOCAVEC (tree, count);
1395
1396 for (i = 0; i < count; i++)
1397 {
1398 tree elem1 = VECTOR_CST_ELT (arg1, i);
1399 tree elem2 = VECTOR_CST_ELT (arg2, i);
1400
1401 elts[i] = const_binop (code, elem1, elem2);
1402
1403 /* It is possible that const_binop cannot handle the given
1404 code and return NULL_TREE */
1405 if (elts[i] == NULL_TREE)
1406 return NULL_TREE;
1407 }
1408
1409 return build_vector (type, elts);
1410 }
1411
1412 /* Shifts allow a scalar offset for a vector. */
1413 if (TREE_CODE (arg1) == VECTOR_CST
1414 && TREE_CODE (arg2) == INTEGER_CST)
1415 {
1416 tree type = TREE_TYPE (arg1);
1417 int count = TYPE_VECTOR_SUBPARTS (type), i;
1418 tree *elts = XALLOCAVEC (tree, count);
1419
1420 for (i = 0; i < count; i++)
1421 {
1422 tree elem1 = VECTOR_CST_ELT (arg1, i);
1423
1424 elts[i] = const_binop (code, elem1, arg2);
1425
1426 /* It is possible that const_binop cannot handle the given
1427 code and return NULL_TREE. */
1428 if (elts[i] == NULL_TREE)
1429 return NULL_TREE;
1430 }
1431
1432 return build_vector (type, elts);
1433 }
1434 return NULL_TREE;
1435 }
1436
1437 /* Overload that adds a TYPE parameter to be able to dispatch
1438 to fold_relational_const. */
1439
1440 tree
1441 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1442 {
1443 if (TREE_CODE_CLASS (code) == tcc_comparison)
1444 return fold_relational_const (code, type, arg1, arg2);
1445
1446 /* ??? Until we make the const_binop worker take the type of the
1447 result as argument put those cases that need it here. */
1448 switch (code)
1449 {
1450 case COMPLEX_EXPR:
1451 if ((TREE_CODE (arg1) == REAL_CST
1452 && TREE_CODE (arg2) == REAL_CST)
1453 || (TREE_CODE (arg1) == INTEGER_CST
1454 && TREE_CODE (arg2) == INTEGER_CST))
1455 return build_complex (type, arg1, arg2);
1456 return NULL_TREE;
1457
1458 case VEC_PACK_TRUNC_EXPR:
1459 case VEC_PACK_FIX_TRUNC_EXPR:
1460 {
1461 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1462 tree *elts;
1463
1464 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2
1465 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts / 2);
1466 if (TREE_CODE (arg1) != VECTOR_CST
1467 || TREE_CODE (arg2) != VECTOR_CST)
1468 return NULL_TREE;
1469
1470 elts = XALLOCAVEC (tree, nelts);
1471 if (!vec_cst_ctor_to_array (arg1, elts)
1472 || !vec_cst_ctor_to_array (arg2, elts + nelts / 2))
1473 return NULL_TREE;
1474
1475 for (i = 0; i < nelts; i++)
1476 {
1477 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1478 ? NOP_EXPR : FIX_TRUNC_EXPR,
1479 TREE_TYPE (type), elts[i]);
1480 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1481 return NULL_TREE;
1482 }
1483
1484 return build_vector (type, elts);
1485 }
1486
1487 case VEC_WIDEN_MULT_LO_EXPR:
1488 case VEC_WIDEN_MULT_HI_EXPR:
1489 case VEC_WIDEN_MULT_EVEN_EXPR:
1490 case VEC_WIDEN_MULT_ODD_EXPR:
1491 {
1492 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
1493 unsigned int out, ofs, scale;
1494 tree *elts;
1495
1496 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2
1497 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts * 2);
1498 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1499 return NULL_TREE;
1500
1501 elts = XALLOCAVEC (tree, nelts * 4);
1502 if (!vec_cst_ctor_to_array (arg1, elts)
1503 || !vec_cst_ctor_to_array (arg2, elts + nelts * 2))
1504 return NULL_TREE;
1505
1506 if (code == VEC_WIDEN_MULT_LO_EXPR)
1507 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
1508 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1509 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
1510 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1511 scale = 1, ofs = 0;
1512 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1513 scale = 1, ofs = 1;
1514
1515 for (out = 0; out < nelts; out++)
1516 {
1517 unsigned int in1 = (out << scale) + ofs;
1518 unsigned int in2 = in1 + nelts * 2;
1519 tree t1, t2;
1520
1521 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
1522 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
1523
1524 if (t1 == NULL_TREE || t2 == NULL_TREE)
1525 return NULL_TREE;
1526 elts[out] = const_binop (MULT_EXPR, t1, t2);
1527 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
1528 return NULL_TREE;
1529 }
1530
1531 return build_vector (type, elts);
1532 }
1533
1534 default:;
1535 }
1536
1537 if (TREE_CODE_CLASS (code) != tcc_binary)
1538 return NULL_TREE;
1539
1540 /* Make sure type and arg0 have the same saturating flag. */
1541 gcc_checking_assert (TYPE_SATURATING (type)
1542 == TYPE_SATURATING (TREE_TYPE (arg1)));
1543
1544 return const_binop (code, arg1, arg2);
1545 }
1546
1547 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1548 Return zero if computing the constants is not possible. */
1549
1550 tree
1551 const_unop (enum tree_code code, tree type, tree arg0)
1552 {
1553 /* Don't perform the operation, other than NEGATE and ABS, if
1554 flag_signaling_nans is on and the operand is a signaling NaN. */
1555 if (TREE_CODE (arg0) == REAL_CST
1556 && HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
1557 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0))
1558 && code != NEGATE_EXPR
1559 && code != ABS_EXPR)
1560 return NULL_TREE;
1561
1562 switch (code)
1563 {
1564 CASE_CONVERT:
1565 case FLOAT_EXPR:
1566 case FIX_TRUNC_EXPR:
1567 case FIXED_CONVERT_EXPR:
1568 return fold_convert_const (code, type, arg0);
1569
1570 case ADDR_SPACE_CONVERT_EXPR:
1571 /* If the source address is 0, and the source address space
1572 cannot have a valid object at 0, fold to dest type null. */
1573 if (integer_zerop (arg0)
1574 && !(targetm.addr_space.zero_address_valid
1575 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))))))
1576 return fold_convert_const (code, type, arg0);
1577 break;
1578
1579 case VIEW_CONVERT_EXPR:
1580 return fold_view_convert_expr (type, arg0);
1581
1582 case NEGATE_EXPR:
1583 {
1584 /* Can't call fold_negate_const directly here as that doesn't
1585 handle all cases and we might not be able to negate some
1586 constants. */
1587 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1588 if (tem && CONSTANT_CLASS_P (tem))
1589 return tem;
1590 break;
1591 }
1592
1593 case ABS_EXPR:
1594 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1595 return fold_abs_const (arg0, type);
1596 break;
1597
1598 case CONJ_EXPR:
1599 if (TREE_CODE (arg0) == COMPLEX_CST)
1600 {
1601 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1602 TREE_TYPE (type));
1603 return build_complex (type, TREE_REALPART (arg0), ipart);
1604 }
1605 break;
1606
1607 case BIT_NOT_EXPR:
1608 if (TREE_CODE (arg0) == INTEGER_CST)
1609 return fold_not_const (arg0, type);
1610 /* Perform BIT_NOT_EXPR on each element individually. */
1611 else if (TREE_CODE (arg0) == VECTOR_CST)
1612 {
1613 tree *elements;
1614 tree elem;
1615 unsigned count = VECTOR_CST_NELTS (arg0), i;
1616
1617 elements = XALLOCAVEC (tree, count);
1618 for (i = 0; i < count; i++)
1619 {
1620 elem = VECTOR_CST_ELT (arg0, i);
1621 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1622 if (elem == NULL_TREE)
1623 break;
1624 elements[i] = elem;
1625 }
1626 if (i == count)
1627 return build_vector (type, elements);
1628 }
1629 break;
1630
1631 case TRUTH_NOT_EXPR:
1632 if (TREE_CODE (arg0) == INTEGER_CST)
1633 return constant_boolean_node (integer_zerop (arg0), type);
1634 break;
1635
1636 case REALPART_EXPR:
1637 if (TREE_CODE (arg0) == COMPLEX_CST)
1638 return fold_convert (type, TREE_REALPART (arg0));
1639 break;
1640
1641 case IMAGPART_EXPR:
1642 if (TREE_CODE (arg0) == COMPLEX_CST)
1643 return fold_convert (type, TREE_IMAGPART (arg0));
1644 break;
1645
1646 case VEC_UNPACK_LO_EXPR:
1647 case VEC_UNPACK_HI_EXPR:
1648 case VEC_UNPACK_FLOAT_LO_EXPR:
1649 case VEC_UNPACK_FLOAT_HI_EXPR:
1650 {
1651 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1652 tree *elts;
1653 enum tree_code subcode;
1654
1655 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
1656 if (TREE_CODE (arg0) != VECTOR_CST)
1657 return NULL_TREE;
1658
1659 elts = XALLOCAVEC (tree, nelts * 2);
1660 if (!vec_cst_ctor_to_array (arg0, elts))
1661 return NULL_TREE;
1662
1663 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1664 || code == VEC_UNPACK_FLOAT_LO_EXPR))
1665 elts += nelts;
1666
1667 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1668 subcode = NOP_EXPR;
1669 else
1670 subcode = FLOAT_EXPR;
1671
1672 for (i = 0; i < nelts; i++)
1673 {
1674 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
1675 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1676 return NULL_TREE;
1677 }
1678
1679 return build_vector (type, elts);
1680 }
1681
1682 case REDUC_MIN_EXPR:
1683 case REDUC_MAX_EXPR:
1684 case REDUC_PLUS_EXPR:
1685 {
1686 unsigned int nelts, i;
1687 tree *elts;
1688 enum tree_code subcode;
1689
1690 if (TREE_CODE (arg0) != VECTOR_CST)
1691 return NULL_TREE;
1692 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
1693
1694 elts = XALLOCAVEC (tree, nelts);
1695 if (!vec_cst_ctor_to_array (arg0, elts))
1696 return NULL_TREE;
1697
1698 switch (code)
1699 {
1700 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
1701 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
1702 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
1703 default: gcc_unreachable ();
1704 }
1705
1706 for (i = 1; i < nelts; i++)
1707 {
1708 elts[0] = const_binop (subcode, elts[0], elts[i]);
1709 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
1710 return NULL_TREE;
1711 }
1712
1713 return elts[0];
1714 }
1715
1716 default:
1717 break;
1718 }
1719
1720 return NULL_TREE;
1721 }
1722
1723 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1724 indicates which particular sizetype to create. */
1725
1726 tree
1727 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1728 {
1729 return build_int_cst (sizetype_tab[(int) kind], number);
1730 }
1731 \f
1732 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1733 is a tree code. The type of the result is taken from the operands.
1734 Both must be equivalent integer types, ala int_binop_types_match_p.
1735 If the operands are constant, so is the result. */
1736
1737 tree
1738 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1739 {
1740 tree type = TREE_TYPE (arg0);
1741
1742 if (arg0 == error_mark_node || arg1 == error_mark_node)
1743 return error_mark_node;
1744
1745 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1746 TREE_TYPE (arg1)));
1747
1748 /* Handle the special case of two integer constants faster. */
1749 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1750 {
1751 /* And some specific cases even faster than that. */
1752 if (code == PLUS_EXPR)
1753 {
1754 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1755 return arg1;
1756 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1757 return arg0;
1758 }
1759 else if (code == MINUS_EXPR)
1760 {
1761 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1762 return arg0;
1763 }
1764 else if (code == MULT_EXPR)
1765 {
1766 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1767 return arg1;
1768 }
1769
1770 /* Handle general case of two integer constants. For sizetype
1771 constant calculations we always want to know about overflow,
1772 even in the unsigned case. */
1773 return int_const_binop_1 (code, arg0, arg1, -1);
1774 }
1775
1776 return fold_build2_loc (loc, code, type, arg0, arg1);
1777 }
1778
1779 /* Given two values, either both of sizetype or both of bitsizetype,
1780 compute the difference between the two values. Return the value
1781 in signed type corresponding to the type of the operands. */
1782
1783 tree
1784 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1785 {
1786 tree type = TREE_TYPE (arg0);
1787 tree ctype;
1788
1789 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1790 TREE_TYPE (arg1)));
1791
1792 /* If the type is already signed, just do the simple thing. */
1793 if (!TYPE_UNSIGNED (type))
1794 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1795
1796 if (type == sizetype)
1797 ctype = ssizetype;
1798 else if (type == bitsizetype)
1799 ctype = sbitsizetype;
1800 else
1801 ctype = signed_type_for (type);
1802
1803 /* If either operand is not a constant, do the conversions to the signed
1804 type and subtract. The hardware will do the right thing with any
1805 overflow in the subtraction. */
1806 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1807 return size_binop_loc (loc, MINUS_EXPR,
1808 fold_convert_loc (loc, ctype, arg0),
1809 fold_convert_loc (loc, ctype, arg1));
1810
1811 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1812 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1813 overflow) and negate (which can't either). Special-case a result
1814 of zero while we're here. */
1815 if (tree_int_cst_equal (arg0, arg1))
1816 return build_int_cst (ctype, 0);
1817 else if (tree_int_cst_lt (arg1, arg0))
1818 return fold_convert_loc (loc, ctype,
1819 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1820 else
1821 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1822 fold_convert_loc (loc, ctype,
1823 size_binop_loc (loc,
1824 MINUS_EXPR,
1825 arg1, arg0)));
1826 }
1827 \f
1828 /* A subroutine of fold_convert_const handling conversions of an
1829 INTEGER_CST to another integer type. */
1830
1831 static tree
1832 fold_convert_const_int_from_int (tree type, const_tree arg1)
1833 {
1834 /* Given an integer constant, make new constant with new type,
1835 appropriately sign-extended or truncated. Use widest_int
1836 so that any extension is done according ARG1's type. */
1837 return force_fit_type (type, wi::to_widest (arg1),
1838 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1839 TREE_OVERFLOW (arg1));
1840 }
1841
1842 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1843 to an integer type. */
1844
1845 static tree
1846 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1847 {
1848 bool overflow = false;
1849 tree t;
1850
1851 /* The following code implements the floating point to integer
1852 conversion rules required by the Java Language Specification,
1853 that IEEE NaNs are mapped to zero and values that overflow
1854 the target precision saturate, i.e. values greater than
1855 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1856 are mapped to INT_MIN. These semantics are allowed by the
1857 C and C++ standards that simply state that the behavior of
1858 FP-to-integer conversion is unspecified upon overflow. */
1859
1860 wide_int val;
1861 REAL_VALUE_TYPE r;
1862 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1863
1864 switch (code)
1865 {
1866 case FIX_TRUNC_EXPR:
1867 real_trunc (&r, VOIDmode, &x);
1868 break;
1869
1870 default:
1871 gcc_unreachable ();
1872 }
1873
1874 /* If R is NaN, return zero and show we have an overflow. */
1875 if (REAL_VALUE_ISNAN (r))
1876 {
1877 overflow = true;
1878 val = wi::zero (TYPE_PRECISION (type));
1879 }
1880
1881 /* See if R is less than the lower bound or greater than the
1882 upper bound. */
1883
1884 if (! overflow)
1885 {
1886 tree lt = TYPE_MIN_VALUE (type);
1887 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1888 if (real_less (&r, &l))
1889 {
1890 overflow = true;
1891 val = lt;
1892 }
1893 }
1894
1895 if (! overflow)
1896 {
1897 tree ut = TYPE_MAX_VALUE (type);
1898 if (ut)
1899 {
1900 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1901 if (real_less (&u, &r))
1902 {
1903 overflow = true;
1904 val = ut;
1905 }
1906 }
1907 }
1908
1909 if (! overflow)
1910 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1911
1912 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1913 return t;
1914 }
1915
1916 /* A subroutine of fold_convert_const handling conversions of a
1917 FIXED_CST to an integer type. */
1918
1919 static tree
1920 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1921 {
1922 tree t;
1923 double_int temp, temp_trunc;
1924 unsigned int mode;
1925
1926 /* Right shift FIXED_CST to temp by fbit. */
1927 temp = TREE_FIXED_CST (arg1).data;
1928 mode = TREE_FIXED_CST (arg1).mode;
1929 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1930 {
1931 temp = temp.rshift (GET_MODE_FBIT (mode),
1932 HOST_BITS_PER_DOUBLE_INT,
1933 SIGNED_FIXED_POINT_MODE_P (mode));
1934
1935 /* Left shift temp to temp_trunc by fbit. */
1936 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1937 HOST_BITS_PER_DOUBLE_INT,
1938 SIGNED_FIXED_POINT_MODE_P (mode));
1939 }
1940 else
1941 {
1942 temp = double_int_zero;
1943 temp_trunc = double_int_zero;
1944 }
1945
1946 /* If FIXED_CST is negative, we need to round the value toward 0.
1947 By checking if the fractional bits are not zero to add 1 to temp. */
1948 if (SIGNED_FIXED_POINT_MODE_P (mode)
1949 && temp_trunc.is_negative ()
1950 && TREE_FIXED_CST (arg1).data != temp_trunc)
1951 temp += double_int_one;
1952
1953 /* Given a fixed-point constant, make new constant with new type,
1954 appropriately sign-extended or truncated. */
1955 t = force_fit_type (type, temp, -1,
1956 (temp.is_negative ()
1957 && (TYPE_UNSIGNED (type)
1958 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1959 | TREE_OVERFLOW (arg1));
1960
1961 return t;
1962 }
1963
1964 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1965 to another floating point type. */
1966
1967 static tree
1968 fold_convert_const_real_from_real (tree type, const_tree arg1)
1969 {
1970 REAL_VALUE_TYPE value;
1971 tree t;
1972
1973 /* Don't perform the operation if flag_signaling_nans is on
1974 and the operand is a signaling NaN. */
1975 if (HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
1976 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1)))
1977 return NULL_TREE;
1978
1979 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1980 t = build_real (type, value);
1981
1982 /* If converting an infinity or NAN to a representation that doesn't
1983 have one, set the overflow bit so that we can produce some kind of
1984 error message at the appropriate point if necessary. It's not the
1985 most user-friendly message, but it's better than nothing. */
1986 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1987 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1988 TREE_OVERFLOW (t) = 1;
1989 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1990 && !MODE_HAS_NANS (TYPE_MODE (type)))
1991 TREE_OVERFLOW (t) = 1;
1992 /* Regular overflow, conversion produced an infinity in a mode that
1993 can't represent them. */
1994 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1995 && REAL_VALUE_ISINF (value)
1996 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1997 TREE_OVERFLOW (t) = 1;
1998 else
1999 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2000 return t;
2001 }
2002
2003 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2004 to a floating point type. */
2005
2006 static tree
2007 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2008 {
2009 REAL_VALUE_TYPE value;
2010 tree t;
2011
2012 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2013 t = build_real (type, value);
2014
2015 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2016 return t;
2017 }
2018
2019 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2020 to another fixed-point type. */
2021
2022 static tree
2023 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2024 {
2025 FIXED_VALUE_TYPE value;
2026 tree t;
2027 bool overflow_p;
2028
2029 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2030 TYPE_SATURATING (type));
2031 t = build_fixed (type, value);
2032
2033 /* Propagate overflow flags. */
2034 if (overflow_p | TREE_OVERFLOW (arg1))
2035 TREE_OVERFLOW (t) = 1;
2036 return t;
2037 }
2038
2039 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2040 to a fixed-point type. */
2041
2042 static tree
2043 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2044 {
2045 FIXED_VALUE_TYPE value;
2046 tree t;
2047 bool overflow_p;
2048 double_int di;
2049
2050 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2051
2052 di.low = TREE_INT_CST_ELT (arg1, 0);
2053 if (TREE_INT_CST_NUNITS (arg1) == 1)
2054 di.high = (HOST_WIDE_INT) di.low < 0 ? (HOST_WIDE_INT) -1 : 0;
2055 else
2056 di.high = TREE_INT_CST_ELT (arg1, 1);
2057
2058 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di,
2059 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2060 TYPE_SATURATING (type));
2061 t = build_fixed (type, value);
2062
2063 /* Propagate overflow flags. */
2064 if (overflow_p | TREE_OVERFLOW (arg1))
2065 TREE_OVERFLOW (t) = 1;
2066 return t;
2067 }
2068
2069 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2070 to a fixed-point type. */
2071
2072 static tree
2073 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2074 {
2075 FIXED_VALUE_TYPE value;
2076 tree t;
2077 bool overflow_p;
2078
2079 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2080 &TREE_REAL_CST (arg1),
2081 TYPE_SATURATING (type));
2082 t = build_fixed (type, value);
2083
2084 /* Propagate overflow flags. */
2085 if (overflow_p | TREE_OVERFLOW (arg1))
2086 TREE_OVERFLOW (t) = 1;
2087 return t;
2088 }
2089
2090 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2091 type TYPE. If no simplification can be done return NULL_TREE. */
2092
2093 static tree
2094 fold_convert_const (enum tree_code code, tree type, tree arg1)
2095 {
2096 if (TREE_TYPE (arg1) == type)
2097 return arg1;
2098
2099 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2100 || TREE_CODE (type) == OFFSET_TYPE)
2101 {
2102 if (TREE_CODE (arg1) == INTEGER_CST)
2103 return fold_convert_const_int_from_int (type, arg1);
2104 else if (TREE_CODE (arg1) == REAL_CST)
2105 return fold_convert_const_int_from_real (code, type, arg1);
2106 else if (TREE_CODE (arg1) == FIXED_CST)
2107 return fold_convert_const_int_from_fixed (type, arg1);
2108 }
2109 else if (TREE_CODE (type) == REAL_TYPE)
2110 {
2111 if (TREE_CODE (arg1) == INTEGER_CST)
2112 return build_real_from_int_cst (type, arg1);
2113 else if (TREE_CODE (arg1) == REAL_CST)
2114 return fold_convert_const_real_from_real (type, arg1);
2115 else if (TREE_CODE (arg1) == FIXED_CST)
2116 return fold_convert_const_real_from_fixed (type, arg1);
2117 }
2118 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2119 {
2120 if (TREE_CODE (arg1) == FIXED_CST)
2121 return fold_convert_const_fixed_from_fixed (type, arg1);
2122 else if (TREE_CODE (arg1) == INTEGER_CST)
2123 return fold_convert_const_fixed_from_int (type, arg1);
2124 else if (TREE_CODE (arg1) == REAL_CST)
2125 return fold_convert_const_fixed_from_real (type, arg1);
2126 }
2127 else if (TREE_CODE (type) == VECTOR_TYPE)
2128 {
2129 if (TREE_CODE (arg1) == VECTOR_CST
2130 && TYPE_VECTOR_SUBPARTS (type) == VECTOR_CST_NELTS (arg1))
2131 {
2132 int len = TYPE_VECTOR_SUBPARTS (type);
2133 tree elttype = TREE_TYPE (type);
2134 tree *v = XALLOCAVEC (tree, len);
2135 for (int i = 0; i < len; ++i)
2136 {
2137 tree elt = VECTOR_CST_ELT (arg1, i);
2138 tree cvt = fold_convert_const (code, elttype, elt);
2139 if (cvt == NULL_TREE)
2140 return NULL_TREE;
2141 v[i] = cvt;
2142 }
2143 return build_vector (type, v);
2144 }
2145 }
2146 return NULL_TREE;
2147 }
2148
2149 /* Construct a vector of zero elements of vector type TYPE. */
2150
2151 static tree
2152 build_zero_vector (tree type)
2153 {
2154 tree t;
2155
2156 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2157 return build_vector_from_val (type, t);
2158 }
2159
2160 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2161
2162 bool
2163 fold_convertible_p (const_tree type, const_tree arg)
2164 {
2165 tree orig = TREE_TYPE (arg);
2166
2167 if (type == orig)
2168 return true;
2169
2170 if (TREE_CODE (arg) == ERROR_MARK
2171 || TREE_CODE (type) == ERROR_MARK
2172 || TREE_CODE (orig) == ERROR_MARK)
2173 return false;
2174
2175 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2176 return true;
2177
2178 switch (TREE_CODE (type))
2179 {
2180 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2181 case POINTER_TYPE: case REFERENCE_TYPE:
2182 case OFFSET_TYPE:
2183 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2184 || TREE_CODE (orig) == OFFSET_TYPE)
2185 return true;
2186 return (TREE_CODE (orig) == VECTOR_TYPE
2187 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2188
2189 case REAL_TYPE:
2190 case FIXED_POINT_TYPE:
2191 case COMPLEX_TYPE:
2192 case VECTOR_TYPE:
2193 case VOID_TYPE:
2194 return TREE_CODE (type) == TREE_CODE (orig);
2195
2196 default:
2197 return false;
2198 }
2199 }
2200
2201 /* Convert expression ARG to type TYPE. Used by the middle-end for
2202 simple conversions in preference to calling the front-end's convert. */
2203
2204 tree
2205 fold_convert_loc (location_t loc, tree type, tree arg)
2206 {
2207 tree orig = TREE_TYPE (arg);
2208 tree tem;
2209
2210 if (type == orig)
2211 return arg;
2212
2213 if (TREE_CODE (arg) == ERROR_MARK
2214 || TREE_CODE (type) == ERROR_MARK
2215 || TREE_CODE (orig) == ERROR_MARK)
2216 return error_mark_node;
2217
2218 switch (TREE_CODE (type))
2219 {
2220 case POINTER_TYPE:
2221 case REFERENCE_TYPE:
2222 /* Handle conversions between pointers to different address spaces. */
2223 if (POINTER_TYPE_P (orig)
2224 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2225 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2226 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2227 /* fall through */
2228
2229 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2230 case OFFSET_TYPE:
2231 if (TREE_CODE (arg) == INTEGER_CST)
2232 {
2233 tem = fold_convert_const (NOP_EXPR, type, arg);
2234 if (tem != NULL_TREE)
2235 return tem;
2236 }
2237 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2238 || TREE_CODE (orig) == OFFSET_TYPE)
2239 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2240 if (TREE_CODE (orig) == COMPLEX_TYPE)
2241 return fold_convert_loc (loc, type,
2242 fold_build1_loc (loc, REALPART_EXPR,
2243 TREE_TYPE (orig), arg));
2244 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2245 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2246 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2247
2248 case REAL_TYPE:
2249 if (TREE_CODE (arg) == INTEGER_CST)
2250 {
2251 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2252 if (tem != NULL_TREE)
2253 return tem;
2254 }
2255 else if (TREE_CODE (arg) == REAL_CST)
2256 {
2257 tem = fold_convert_const (NOP_EXPR, type, arg);
2258 if (tem != NULL_TREE)
2259 return tem;
2260 }
2261 else if (TREE_CODE (arg) == FIXED_CST)
2262 {
2263 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2264 if (tem != NULL_TREE)
2265 return tem;
2266 }
2267
2268 switch (TREE_CODE (orig))
2269 {
2270 case INTEGER_TYPE:
2271 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2272 case POINTER_TYPE: case REFERENCE_TYPE:
2273 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2274
2275 case REAL_TYPE:
2276 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2277
2278 case FIXED_POINT_TYPE:
2279 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2280
2281 case COMPLEX_TYPE:
2282 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2283 return fold_convert_loc (loc, type, tem);
2284
2285 default:
2286 gcc_unreachable ();
2287 }
2288
2289 case FIXED_POINT_TYPE:
2290 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2291 || TREE_CODE (arg) == REAL_CST)
2292 {
2293 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2294 if (tem != NULL_TREE)
2295 goto fold_convert_exit;
2296 }
2297
2298 switch (TREE_CODE (orig))
2299 {
2300 case FIXED_POINT_TYPE:
2301 case INTEGER_TYPE:
2302 case ENUMERAL_TYPE:
2303 case BOOLEAN_TYPE:
2304 case REAL_TYPE:
2305 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2306
2307 case COMPLEX_TYPE:
2308 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2309 return fold_convert_loc (loc, type, tem);
2310
2311 default:
2312 gcc_unreachable ();
2313 }
2314
2315 case COMPLEX_TYPE:
2316 switch (TREE_CODE (orig))
2317 {
2318 case INTEGER_TYPE:
2319 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2320 case POINTER_TYPE: case REFERENCE_TYPE:
2321 case REAL_TYPE:
2322 case FIXED_POINT_TYPE:
2323 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2324 fold_convert_loc (loc, TREE_TYPE (type), arg),
2325 fold_convert_loc (loc, TREE_TYPE (type),
2326 integer_zero_node));
2327 case COMPLEX_TYPE:
2328 {
2329 tree rpart, ipart;
2330
2331 if (TREE_CODE (arg) == COMPLEX_EXPR)
2332 {
2333 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2334 TREE_OPERAND (arg, 0));
2335 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2336 TREE_OPERAND (arg, 1));
2337 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2338 }
2339
2340 arg = save_expr (arg);
2341 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2342 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2343 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2344 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2345 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2346 }
2347
2348 default:
2349 gcc_unreachable ();
2350 }
2351
2352 case VECTOR_TYPE:
2353 if (integer_zerop (arg))
2354 return build_zero_vector (type);
2355 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2356 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2357 || TREE_CODE (orig) == VECTOR_TYPE);
2358 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2359
2360 case VOID_TYPE:
2361 tem = fold_ignored_result (arg);
2362 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2363
2364 default:
2365 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2366 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2367 gcc_unreachable ();
2368 }
2369 fold_convert_exit:
2370 protected_set_expr_location_unshare (tem, loc);
2371 return tem;
2372 }
2373 \f
2374 /* Return false if expr can be assumed not to be an lvalue, true
2375 otherwise. */
2376
2377 static bool
2378 maybe_lvalue_p (const_tree x)
2379 {
2380 /* We only need to wrap lvalue tree codes. */
2381 switch (TREE_CODE (x))
2382 {
2383 case VAR_DECL:
2384 case PARM_DECL:
2385 case RESULT_DECL:
2386 case LABEL_DECL:
2387 case FUNCTION_DECL:
2388 case SSA_NAME:
2389
2390 case COMPONENT_REF:
2391 case MEM_REF:
2392 case INDIRECT_REF:
2393 case ARRAY_REF:
2394 case ARRAY_RANGE_REF:
2395 case BIT_FIELD_REF:
2396 case OBJ_TYPE_REF:
2397
2398 case REALPART_EXPR:
2399 case IMAGPART_EXPR:
2400 case PREINCREMENT_EXPR:
2401 case PREDECREMENT_EXPR:
2402 case SAVE_EXPR:
2403 case TRY_CATCH_EXPR:
2404 case WITH_CLEANUP_EXPR:
2405 case COMPOUND_EXPR:
2406 case MODIFY_EXPR:
2407 case TARGET_EXPR:
2408 case COND_EXPR:
2409 case BIND_EXPR:
2410 break;
2411
2412 default:
2413 /* Assume the worst for front-end tree codes. */
2414 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2415 break;
2416 return false;
2417 }
2418
2419 return true;
2420 }
2421
2422 /* Return an expr equal to X but certainly not valid as an lvalue. */
2423
2424 tree
2425 non_lvalue_loc (location_t loc, tree x)
2426 {
2427 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2428 us. */
2429 if (in_gimple_form)
2430 return x;
2431
2432 if (! maybe_lvalue_p (x))
2433 return x;
2434 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2435 }
2436
2437 /* When pedantic, return an expr equal to X but certainly not valid as a
2438 pedantic lvalue. Otherwise, return X. */
2439
2440 static tree
2441 pedantic_non_lvalue_loc (location_t loc, tree x)
2442 {
2443 return protected_set_expr_location_unshare (x, loc);
2444 }
2445 \f
2446 /* Given a tree comparison code, return the code that is the logical inverse.
2447 It is generally not safe to do this for floating-point comparisons, except
2448 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2449 ERROR_MARK in this case. */
2450
2451 enum tree_code
2452 invert_tree_comparison (enum tree_code code, bool honor_nans)
2453 {
2454 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2455 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2456 return ERROR_MARK;
2457
2458 switch (code)
2459 {
2460 case EQ_EXPR:
2461 return NE_EXPR;
2462 case NE_EXPR:
2463 return EQ_EXPR;
2464 case GT_EXPR:
2465 return honor_nans ? UNLE_EXPR : LE_EXPR;
2466 case GE_EXPR:
2467 return honor_nans ? UNLT_EXPR : LT_EXPR;
2468 case LT_EXPR:
2469 return honor_nans ? UNGE_EXPR : GE_EXPR;
2470 case LE_EXPR:
2471 return honor_nans ? UNGT_EXPR : GT_EXPR;
2472 case LTGT_EXPR:
2473 return UNEQ_EXPR;
2474 case UNEQ_EXPR:
2475 return LTGT_EXPR;
2476 case UNGT_EXPR:
2477 return LE_EXPR;
2478 case UNGE_EXPR:
2479 return LT_EXPR;
2480 case UNLT_EXPR:
2481 return GE_EXPR;
2482 case UNLE_EXPR:
2483 return GT_EXPR;
2484 case ORDERED_EXPR:
2485 return UNORDERED_EXPR;
2486 case UNORDERED_EXPR:
2487 return ORDERED_EXPR;
2488 default:
2489 gcc_unreachable ();
2490 }
2491 }
2492
2493 /* Similar, but return the comparison that results if the operands are
2494 swapped. This is safe for floating-point. */
2495
2496 enum tree_code
2497 swap_tree_comparison (enum tree_code code)
2498 {
2499 switch (code)
2500 {
2501 case EQ_EXPR:
2502 case NE_EXPR:
2503 case ORDERED_EXPR:
2504 case UNORDERED_EXPR:
2505 case LTGT_EXPR:
2506 case UNEQ_EXPR:
2507 return code;
2508 case GT_EXPR:
2509 return LT_EXPR;
2510 case GE_EXPR:
2511 return LE_EXPR;
2512 case LT_EXPR:
2513 return GT_EXPR;
2514 case LE_EXPR:
2515 return GE_EXPR;
2516 case UNGT_EXPR:
2517 return UNLT_EXPR;
2518 case UNGE_EXPR:
2519 return UNLE_EXPR;
2520 case UNLT_EXPR:
2521 return UNGT_EXPR;
2522 case UNLE_EXPR:
2523 return UNGE_EXPR;
2524 default:
2525 gcc_unreachable ();
2526 }
2527 }
2528
2529
2530 /* Convert a comparison tree code from an enum tree_code representation
2531 into a compcode bit-based encoding. This function is the inverse of
2532 compcode_to_comparison. */
2533
2534 static enum comparison_code
2535 comparison_to_compcode (enum tree_code code)
2536 {
2537 switch (code)
2538 {
2539 case LT_EXPR:
2540 return COMPCODE_LT;
2541 case EQ_EXPR:
2542 return COMPCODE_EQ;
2543 case LE_EXPR:
2544 return COMPCODE_LE;
2545 case GT_EXPR:
2546 return COMPCODE_GT;
2547 case NE_EXPR:
2548 return COMPCODE_NE;
2549 case GE_EXPR:
2550 return COMPCODE_GE;
2551 case ORDERED_EXPR:
2552 return COMPCODE_ORD;
2553 case UNORDERED_EXPR:
2554 return COMPCODE_UNORD;
2555 case UNLT_EXPR:
2556 return COMPCODE_UNLT;
2557 case UNEQ_EXPR:
2558 return COMPCODE_UNEQ;
2559 case UNLE_EXPR:
2560 return COMPCODE_UNLE;
2561 case UNGT_EXPR:
2562 return COMPCODE_UNGT;
2563 case LTGT_EXPR:
2564 return COMPCODE_LTGT;
2565 case UNGE_EXPR:
2566 return COMPCODE_UNGE;
2567 default:
2568 gcc_unreachable ();
2569 }
2570 }
2571
2572 /* Convert a compcode bit-based encoding of a comparison operator back
2573 to GCC's enum tree_code representation. This function is the
2574 inverse of comparison_to_compcode. */
2575
2576 static enum tree_code
2577 compcode_to_comparison (enum comparison_code code)
2578 {
2579 switch (code)
2580 {
2581 case COMPCODE_LT:
2582 return LT_EXPR;
2583 case COMPCODE_EQ:
2584 return EQ_EXPR;
2585 case COMPCODE_LE:
2586 return LE_EXPR;
2587 case COMPCODE_GT:
2588 return GT_EXPR;
2589 case COMPCODE_NE:
2590 return NE_EXPR;
2591 case COMPCODE_GE:
2592 return GE_EXPR;
2593 case COMPCODE_ORD:
2594 return ORDERED_EXPR;
2595 case COMPCODE_UNORD:
2596 return UNORDERED_EXPR;
2597 case COMPCODE_UNLT:
2598 return UNLT_EXPR;
2599 case COMPCODE_UNEQ:
2600 return UNEQ_EXPR;
2601 case COMPCODE_UNLE:
2602 return UNLE_EXPR;
2603 case COMPCODE_UNGT:
2604 return UNGT_EXPR;
2605 case COMPCODE_LTGT:
2606 return LTGT_EXPR;
2607 case COMPCODE_UNGE:
2608 return UNGE_EXPR;
2609 default:
2610 gcc_unreachable ();
2611 }
2612 }
2613
2614 /* Return a tree for the comparison which is the combination of
2615 doing the AND or OR (depending on CODE) of the two operations LCODE
2616 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2617 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2618 if this makes the transformation invalid. */
2619
2620 tree
2621 combine_comparisons (location_t loc,
2622 enum tree_code code, enum tree_code lcode,
2623 enum tree_code rcode, tree truth_type,
2624 tree ll_arg, tree lr_arg)
2625 {
2626 bool honor_nans = HONOR_NANS (ll_arg);
2627 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2628 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2629 int compcode;
2630
2631 switch (code)
2632 {
2633 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2634 compcode = lcompcode & rcompcode;
2635 break;
2636
2637 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2638 compcode = lcompcode | rcompcode;
2639 break;
2640
2641 default:
2642 return NULL_TREE;
2643 }
2644
2645 if (!honor_nans)
2646 {
2647 /* Eliminate unordered comparisons, as well as LTGT and ORD
2648 which are not used unless the mode has NaNs. */
2649 compcode &= ~COMPCODE_UNORD;
2650 if (compcode == COMPCODE_LTGT)
2651 compcode = COMPCODE_NE;
2652 else if (compcode == COMPCODE_ORD)
2653 compcode = COMPCODE_TRUE;
2654 }
2655 else if (flag_trapping_math)
2656 {
2657 /* Check that the original operation and the optimized ones will trap
2658 under the same condition. */
2659 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2660 && (lcompcode != COMPCODE_EQ)
2661 && (lcompcode != COMPCODE_ORD);
2662 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2663 && (rcompcode != COMPCODE_EQ)
2664 && (rcompcode != COMPCODE_ORD);
2665 bool trap = (compcode & COMPCODE_UNORD) == 0
2666 && (compcode != COMPCODE_EQ)
2667 && (compcode != COMPCODE_ORD);
2668
2669 /* In a short-circuited boolean expression the LHS might be
2670 such that the RHS, if evaluated, will never trap. For
2671 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2672 if neither x nor y is NaN. (This is a mixed blessing: for
2673 example, the expression above will never trap, hence
2674 optimizing it to x < y would be invalid). */
2675 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2676 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2677 rtrap = false;
2678
2679 /* If the comparison was short-circuited, and only the RHS
2680 trapped, we may now generate a spurious trap. */
2681 if (rtrap && !ltrap
2682 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2683 return NULL_TREE;
2684
2685 /* If we changed the conditions that cause a trap, we lose. */
2686 if ((ltrap || rtrap) != trap)
2687 return NULL_TREE;
2688 }
2689
2690 if (compcode == COMPCODE_TRUE)
2691 return constant_boolean_node (true, truth_type);
2692 else if (compcode == COMPCODE_FALSE)
2693 return constant_boolean_node (false, truth_type);
2694 else
2695 {
2696 enum tree_code tcode;
2697
2698 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2699 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2700 }
2701 }
2702 \f
2703 /* Return nonzero if two operands (typically of the same tree node)
2704 are necessarily equal. FLAGS modifies behavior as follows:
2705
2706 If OEP_ONLY_CONST is set, only return nonzero for constants.
2707 This function tests whether the operands are indistinguishable;
2708 it does not test whether they are equal using C's == operation.
2709 The distinction is important for IEEE floating point, because
2710 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2711 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2712
2713 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2714 even though it may hold multiple values during a function.
2715 This is because a GCC tree node guarantees that nothing else is
2716 executed between the evaluation of its "operands" (which may often
2717 be evaluated in arbitrary order). Hence if the operands themselves
2718 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2719 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2720 unset means assuming isochronic (or instantaneous) tree equivalence.
2721 Unless comparing arbitrary expression trees, such as from different
2722 statements, this flag can usually be left unset.
2723
2724 If OEP_PURE_SAME is set, then pure functions with identical arguments
2725 are considered the same. It is used when the caller has other ways
2726 to ensure that global memory is unchanged in between.
2727
2728 If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
2729 not values of expressions.
2730
2731 Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
2732 any operand with side effect. This is unnecesarily conservative in the
2733 case we know that arg0 and arg1 are in disjoint code paths (such as in
2734 ?: operator). In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
2735 addresses with TREE_CONSTANT flag set so we know that &var == &var
2736 even if var is volatile. */
2737
2738 int
2739 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2740 {
2741 /* If either is ERROR_MARK, they aren't equal. */
2742 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2743 || TREE_TYPE (arg0) == error_mark_node
2744 || TREE_TYPE (arg1) == error_mark_node)
2745 return 0;
2746
2747 /* Similar, if either does not have a type (like a released SSA name),
2748 they aren't equal. */
2749 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2750 return 0;
2751
2752 /* We cannot consider pointers to different address space equal. */
2753 if (POINTER_TYPE_P (TREE_TYPE (arg0))
2754 && POINTER_TYPE_P (TREE_TYPE (arg1))
2755 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2756 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2757 return 0;
2758
2759 /* Check equality of integer constants before bailing out due to
2760 precision differences. */
2761 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2762 {
2763 /* Address of INTEGER_CST is not defined; check that we did not forget
2764 to drop the OEP_ADDRESS_OF flags. */
2765 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2766 return tree_int_cst_equal (arg0, arg1);
2767 }
2768
2769 if (!(flags & OEP_ADDRESS_OF))
2770 {
2771 /* If both types don't have the same signedness, then we can't consider
2772 them equal. We must check this before the STRIP_NOPS calls
2773 because they may change the signedness of the arguments. As pointers
2774 strictly don't have a signedness, require either two pointers or
2775 two non-pointers as well. */
2776 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2777 || POINTER_TYPE_P (TREE_TYPE (arg0))
2778 != POINTER_TYPE_P (TREE_TYPE (arg1)))
2779 return 0;
2780
2781 /* If both types don't have the same precision, then it is not safe
2782 to strip NOPs. */
2783 if (element_precision (TREE_TYPE (arg0))
2784 != element_precision (TREE_TYPE (arg1)))
2785 return 0;
2786
2787 STRIP_NOPS (arg0);
2788 STRIP_NOPS (arg1);
2789 }
2790 #if 0
2791 /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
2792 sanity check once the issue is solved. */
2793 else
2794 /* Addresses of conversions and SSA_NAMEs (and many other things)
2795 are not defined. Check that we did not forget to drop the
2796 OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
2797 gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
2798 && TREE_CODE (arg0) != SSA_NAME);
2799 #endif
2800
2801 /* In case both args are comparisons but with different comparison
2802 code, try to swap the comparison operands of one arg to produce
2803 a match and compare that variant. */
2804 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2805 && COMPARISON_CLASS_P (arg0)
2806 && COMPARISON_CLASS_P (arg1))
2807 {
2808 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2809
2810 if (TREE_CODE (arg0) == swap_code)
2811 return operand_equal_p (TREE_OPERAND (arg0, 0),
2812 TREE_OPERAND (arg1, 1), flags)
2813 && operand_equal_p (TREE_OPERAND (arg0, 1),
2814 TREE_OPERAND (arg1, 0), flags);
2815 }
2816
2817 if (TREE_CODE (arg0) != TREE_CODE (arg1))
2818 {
2819 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2820 if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
2821 ;
2822 else if (flags & OEP_ADDRESS_OF)
2823 {
2824 /* If we are interested in comparing addresses ignore
2825 MEM_REF wrappings of the base that can appear just for
2826 TBAA reasons. */
2827 if (TREE_CODE (arg0) == MEM_REF
2828 && DECL_P (arg1)
2829 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
2830 && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
2831 && integer_zerop (TREE_OPERAND (arg0, 1)))
2832 return 1;
2833 else if (TREE_CODE (arg1) == MEM_REF
2834 && DECL_P (arg0)
2835 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
2836 && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
2837 && integer_zerop (TREE_OPERAND (arg1, 1)))
2838 return 1;
2839 return 0;
2840 }
2841 else
2842 return 0;
2843 }
2844
2845 /* When not checking adddresses, this is needed for conversions and for
2846 COMPONENT_REF. Might as well play it safe and always test this. */
2847 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2848 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2849 || (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))
2850 && !(flags & OEP_ADDRESS_OF)))
2851 return 0;
2852
2853 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2854 We don't care about side effects in that case because the SAVE_EXPR
2855 takes care of that for us. In all other cases, two expressions are
2856 equal if they have no side effects. If we have two identical
2857 expressions with side effects that should be treated the same due
2858 to the only side effects being identical SAVE_EXPR's, that will
2859 be detected in the recursive calls below.
2860 If we are taking an invariant address of two identical objects
2861 they are necessarily equal as well. */
2862 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2863 && (TREE_CODE (arg0) == SAVE_EXPR
2864 || (flags & OEP_MATCH_SIDE_EFFECTS)
2865 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2866 return 1;
2867
2868 /* Next handle constant cases, those for which we can return 1 even
2869 if ONLY_CONST is set. */
2870 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2871 switch (TREE_CODE (arg0))
2872 {
2873 case INTEGER_CST:
2874 return tree_int_cst_equal (arg0, arg1);
2875
2876 case FIXED_CST:
2877 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2878 TREE_FIXED_CST (arg1));
2879
2880 case REAL_CST:
2881 if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
2882 return 1;
2883
2884
2885 if (!HONOR_SIGNED_ZEROS (arg0))
2886 {
2887 /* If we do not distinguish between signed and unsigned zero,
2888 consider them equal. */
2889 if (real_zerop (arg0) && real_zerop (arg1))
2890 return 1;
2891 }
2892 return 0;
2893
2894 case VECTOR_CST:
2895 {
2896 unsigned i;
2897
2898 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2899 return 0;
2900
2901 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2902 {
2903 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2904 VECTOR_CST_ELT (arg1, i), flags))
2905 return 0;
2906 }
2907 return 1;
2908 }
2909
2910 case COMPLEX_CST:
2911 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2912 flags)
2913 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2914 flags));
2915
2916 case STRING_CST:
2917 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2918 && ! memcmp (TREE_STRING_POINTER (arg0),
2919 TREE_STRING_POINTER (arg1),
2920 TREE_STRING_LENGTH (arg0)));
2921
2922 case ADDR_EXPR:
2923 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2924 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2925 flags | OEP_ADDRESS_OF
2926 | OEP_MATCH_SIDE_EFFECTS);
2927 case CONSTRUCTOR:
2928 /* In GIMPLE empty constructors are allowed in initializers of
2929 aggregates. */
2930 return (!vec_safe_length (CONSTRUCTOR_ELTS (arg0))
2931 && !vec_safe_length (CONSTRUCTOR_ELTS (arg1)));
2932 default:
2933 break;
2934 }
2935
2936 if (flags & OEP_ONLY_CONST)
2937 return 0;
2938
2939 /* Define macros to test an operand from arg0 and arg1 for equality and a
2940 variant that allows null and views null as being different from any
2941 non-null value. In the latter case, if either is null, the both
2942 must be; otherwise, do the normal comparison. */
2943 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2944 TREE_OPERAND (arg1, N), flags)
2945
2946 #define OP_SAME_WITH_NULL(N) \
2947 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2948 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2949
2950 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2951 {
2952 case tcc_unary:
2953 /* Two conversions are equal only if signedness and modes match. */
2954 switch (TREE_CODE (arg0))
2955 {
2956 CASE_CONVERT:
2957 case FIX_TRUNC_EXPR:
2958 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2959 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2960 return 0;
2961 break;
2962 default:
2963 break;
2964 }
2965
2966 return OP_SAME (0);
2967
2968
2969 case tcc_comparison:
2970 case tcc_binary:
2971 if (OP_SAME (0) && OP_SAME (1))
2972 return 1;
2973
2974 /* For commutative ops, allow the other order. */
2975 return (commutative_tree_code (TREE_CODE (arg0))
2976 && operand_equal_p (TREE_OPERAND (arg0, 0),
2977 TREE_OPERAND (arg1, 1), flags)
2978 && operand_equal_p (TREE_OPERAND (arg0, 1),
2979 TREE_OPERAND (arg1, 0), flags));
2980
2981 case tcc_reference:
2982 /* If either of the pointer (or reference) expressions we are
2983 dereferencing contain a side effect, these cannot be equal,
2984 but their addresses can be. */
2985 if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0
2986 && (TREE_SIDE_EFFECTS (arg0)
2987 || TREE_SIDE_EFFECTS (arg1)))
2988 return 0;
2989
2990 switch (TREE_CODE (arg0))
2991 {
2992 case INDIRECT_REF:
2993 if (!(flags & OEP_ADDRESS_OF)
2994 && (TYPE_ALIGN (TREE_TYPE (arg0))
2995 != TYPE_ALIGN (TREE_TYPE (arg1))))
2996 return 0;
2997 flags &= ~OEP_ADDRESS_OF;
2998 return OP_SAME (0);
2999
3000 case REALPART_EXPR:
3001 case IMAGPART_EXPR:
3002 case VIEW_CONVERT_EXPR:
3003 return OP_SAME (0);
3004
3005 case TARGET_MEM_REF:
3006 case MEM_REF:
3007 if (!(flags & OEP_ADDRESS_OF))
3008 {
3009 /* Require equal access sizes */
3010 if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1))
3011 && (!TYPE_SIZE (TREE_TYPE (arg0))
3012 || !TYPE_SIZE (TREE_TYPE (arg1))
3013 || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3014 TYPE_SIZE (TREE_TYPE (arg1)),
3015 flags)))
3016 return 0;
3017 /* Verify that accesses are TBAA compatible. */
3018 if (!alias_ptr_types_compatible_p
3019 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
3020 TREE_TYPE (TREE_OPERAND (arg1, 1)))
3021 || (MR_DEPENDENCE_CLIQUE (arg0)
3022 != MR_DEPENDENCE_CLIQUE (arg1))
3023 || (MR_DEPENDENCE_BASE (arg0)
3024 != MR_DEPENDENCE_BASE (arg1)))
3025 return 0;
3026 /* Verify that alignment is compatible. */
3027 if (TYPE_ALIGN (TREE_TYPE (arg0))
3028 != TYPE_ALIGN (TREE_TYPE (arg1)))
3029 return 0;
3030 }
3031 flags &= ~OEP_ADDRESS_OF;
3032 return (OP_SAME (0) && OP_SAME (1)
3033 /* TARGET_MEM_REF require equal extra operands. */
3034 && (TREE_CODE (arg0) != TARGET_MEM_REF
3035 || (OP_SAME_WITH_NULL (2)
3036 && OP_SAME_WITH_NULL (3)
3037 && OP_SAME_WITH_NULL (4))));
3038
3039 case ARRAY_REF:
3040 case ARRAY_RANGE_REF:
3041 /* Operands 2 and 3 may be null.
3042 Compare the array index by value if it is constant first as we
3043 may have different types but same value here. */
3044 if (!OP_SAME (0))
3045 return 0;
3046 flags &= ~OEP_ADDRESS_OF;
3047 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3048 TREE_OPERAND (arg1, 1))
3049 || OP_SAME (1))
3050 && OP_SAME_WITH_NULL (2)
3051 && OP_SAME_WITH_NULL (3));
3052
3053 case COMPONENT_REF:
3054 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3055 may be NULL when we're called to compare MEM_EXPRs. */
3056 if (!OP_SAME_WITH_NULL (0)
3057 || !OP_SAME (1))
3058 return 0;
3059 flags &= ~OEP_ADDRESS_OF;
3060 return OP_SAME_WITH_NULL (2);
3061
3062 case BIT_FIELD_REF:
3063 if (!OP_SAME (0))
3064 return 0;
3065 flags &= ~OEP_ADDRESS_OF;
3066 return OP_SAME (1) && OP_SAME (2);
3067
3068 default:
3069 return 0;
3070 }
3071
3072 case tcc_expression:
3073 switch (TREE_CODE (arg0))
3074 {
3075 case ADDR_EXPR:
3076 /* Be sure we pass right ADDRESS_OF flag. */
3077 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3078 return operand_equal_p (TREE_OPERAND (arg0, 0),
3079 TREE_OPERAND (arg1, 0),
3080 flags | OEP_ADDRESS_OF);
3081
3082 case TRUTH_NOT_EXPR:
3083 return OP_SAME (0);
3084
3085 case TRUTH_ANDIF_EXPR:
3086 case TRUTH_ORIF_EXPR:
3087 return OP_SAME (0) && OP_SAME (1);
3088
3089 case FMA_EXPR:
3090 case WIDEN_MULT_PLUS_EXPR:
3091 case WIDEN_MULT_MINUS_EXPR:
3092 if (!OP_SAME (2))
3093 return 0;
3094 /* The multiplcation operands are commutative. */
3095 /* FALLTHRU */
3096
3097 case TRUTH_AND_EXPR:
3098 case TRUTH_OR_EXPR:
3099 case TRUTH_XOR_EXPR:
3100 if (OP_SAME (0) && OP_SAME (1))
3101 return 1;
3102
3103 /* Otherwise take into account this is a commutative operation. */
3104 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3105 TREE_OPERAND (arg1, 1), flags)
3106 && operand_equal_p (TREE_OPERAND (arg0, 1),
3107 TREE_OPERAND (arg1, 0), flags));
3108
3109 case COND_EXPR:
3110 case VEC_COND_EXPR:
3111 case DOT_PROD_EXPR:
3112 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3113
3114 default:
3115 return 0;
3116 }
3117
3118 case tcc_vl_exp:
3119 switch (TREE_CODE (arg0))
3120 {
3121 case CALL_EXPR:
3122 if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3123 != (CALL_EXPR_FN (arg1) == NULL_TREE))
3124 /* If not both CALL_EXPRs are either internal or normal function
3125 functions, then they are not equal. */
3126 return 0;
3127 else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3128 {
3129 /* If the CALL_EXPRs call different internal functions, then they
3130 are not equal. */
3131 if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3132 return 0;
3133 }
3134 else
3135 {
3136 /* If the CALL_EXPRs call different functions, then they are not
3137 equal. */
3138 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3139 flags))
3140 return 0;
3141 }
3142
3143 /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */
3144 {
3145 unsigned int cef = call_expr_flags (arg0);
3146 if (flags & OEP_PURE_SAME)
3147 cef &= ECF_CONST | ECF_PURE;
3148 else
3149 cef &= ECF_CONST;
3150 if (!cef)
3151 return 0;
3152 }
3153
3154 /* Now see if all the arguments are the same. */
3155 {
3156 const_call_expr_arg_iterator iter0, iter1;
3157 const_tree a0, a1;
3158 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3159 a1 = first_const_call_expr_arg (arg1, &iter1);
3160 a0 && a1;
3161 a0 = next_const_call_expr_arg (&iter0),
3162 a1 = next_const_call_expr_arg (&iter1))
3163 if (! operand_equal_p (a0, a1, flags))
3164 return 0;
3165
3166 /* If we get here and both argument lists are exhausted
3167 then the CALL_EXPRs are equal. */
3168 return ! (a0 || a1);
3169 }
3170 default:
3171 return 0;
3172 }
3173
3174 case tcc_declaration:
3175 /* Consider __builtin_sqrt equal to sqrt. */
3176 return (TREE_CODE (arg0) == FUNCTION_DECL
3177 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3178 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3179 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3180
3181 case tcc_exceptional:
3182 if (TREE_CODE (arg0) == CONSTRUCTOR)
3183 {
3184 /* In GIMPLE constructors are used only to build vectors from
3185 elements. Individual elements in the constructor must be
3186 indexed in increasing order and form an initial sequence.
3187
3188 We make no effort to compare constructors in generic.
3189 (see sem_variable::equals in ipa-icf which can do so for
3190 constants). */
3191 if (!VECTOR_TYPE_P (TREE_TYPE (arg0))
3192 || !VECTOR_TYPE_P (TREE_TYPE (arg1)))
3193 return 0;
3194
3195 /* Be sure that vectors constructed have the same representation.
3196 We only tested element precision and modes to match.
3197 Vectors may be BLKmode and thus also check that the number of
3198 parts match. */
3199 if (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))
3200 != TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)))
3201 return 0;
3202
3203 vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3204 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3205 unsigned int len = vec_safe_length (v0);
3206
3207 if (len != vec_safe_length (v1))
3208 return 0;
3209
3210 for (unsigned int i = 0; i < len; i++)
3211 {
3212 constructor_elt *c0 = &(*v0)[i];
3213 constructor_elt *c1 = &(*v1)[i];
3214
3215 if (!operand_equal_p (c0->value, c1->value, flags)
3216 /* In GIMPLE the indexes can be either NULL or matching i.
3217 Double check this so we won't get false
3218 positives for GENERIC. */
3219 || (c0->index
3220 && (TREE_CODE (c0->index) != INTEGER_CST
3221 || !compare_tree_int (c0->index, i)))
3222 || (c1->index
3223 && (TREE_CODE (c1->index) != INTEGER_CST
3224 || !compare_tree_int (c1->index, i))))
3225 return 0;
3226 }
3227 return 1;
3228 }
3229 return 0;
3230
3231 default:
3232 return 0;
3233 }
3234
3235 #undef OP_SAME
3236 #undef OP_SAME_WITH_NULL
3237 }
3238 \f
3239 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3240 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3241
3242 When in doubt, return 0. */
3243
3244 static int
3245 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3246 {
3247 int unsignedp1, unsignedpo;
3248 tree primarg0, primarg1, primother;
3249 unsigned int correct_width;
3250
3251 if (operand_equal_p (arg0, arg1, 0))
3252 return 1;
3253
3254 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3255 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3256 return 0;
3257
3258 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3259 and see if the inner values are the same. This removes any
3260 signedness comparison, which doesn't matter here. */
3261 primarg0 = arg0, primarg1 = arg1;
3262 STRIP_NOPS (primarg0);
3263 STRIP_NOPS (primarg1);
3264 if (operand_equal_p (primarg0, primarg1, 0))
3265 return 1;
3266
3267 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3268 actual comparison operand, ARG0.
3269
3270 First throw away any conversions to wider types
3271 already present in the operands. */
3272
3273 primarg1 = get_narrower (arg1, &unsignedp1);
3274 primother = get_narrower (other, &unsignedpo);
3275
3276 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3277 if (unsignedp1 == unsignedpo
3278 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3279 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3280 {
3281 tree type = TREE_TYPE (arg0);
3282
3283 /* Make sure shorter operand is extended the right way
3284 to match the longer operand. */
3285 primarg1 = fold_convert (signed_or_unsigned_type_for
3286 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3287
3288 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3289 return 1;
3290 }
3291
3292 return 0;
3293 }
3294 \f
3295 /* See if ARG is an expression that is either a comparison or is performing
3296 arithmetic on comparisons. The comparisons must only be comparing
3297 two different values, which will be stored in *CVAL1 and *CVAL2; if
3298 they are nonzero it means that some operands have already been found.
3299 No variables may be used anywhere else in the expression except in the
3300 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3301 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3302
3303 If this is true, return 1. Otherwise, return zero. */
3304
3305 static int
3306 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3307 {
3308 enum tree_code code = TREE_CODE (arg);
3309 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3310
3311 /* We can handle some of the tcc_expression cases here. */
3312 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3313 tclass = tcc_unary;
3314 else if (tclass == tcc_expression
3315 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3316 || code == COMPOUND_EXPR))
3317 tclass = tcc_binary;
3318
3319 else if (tclass == tcc_expression && code == SAVE_EXPR
3320 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3321 {
3322 /* If we've already found a CVAL1 or CVAL2, this expression is
3323 two complex to handle. */
3324 if (*cval1 || *cval2)
3325 return 0;
3326
3327 tclass = tcc_unary;
3328 *save_p = 1;
3329 }
3330
3331 switch (tclass)
3332 {
3333 case tcc_unary:
3334 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3335
3336 case tcc_binary:
3337 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3338 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3339 cval1, cval2, save_p));
3340
3341 case tcc_constant:
3342 return 1;
3343
3344 case tcc_expression:
3345 if (code == COND_EXPR)
3346 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3347 cval1, cval2, save_p)
3348 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3349 cval1, cval2, save_p)
3350 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3351 cval1, cval2, save_p));
3352 return 0;
3353
3354 case tcc_comparison:
3355 /* First see if we can handle the first operand, then the second. For
3356 the second operand, we know *CVAL1 can't be zero. It must be that
3357 one side of the comparison is each of the values; test for the
3358 case where this isn't true by failing if the two operands
3359 are the same. */
3360
3361 if (operand_equal_p (TREE_OPERAND (arg, 0),
3362 TREE_OPERAND (arg, 1), 0))
3363 return 0;
3364
3365 if (*cval1 == 0)
3366 *cval1 = TREE_OPERAND (arg, 0);
3367 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3368 ;
3369 else if (*cval2 == 0)
3370 *cval2 = TREE_OPERAND (arg, 0);
3371 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3372 ;
3373 else
3374 return 0;
3375
3376 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3377 ;
3378 else if (*cval2 == 0)
3379 *cval2 = TREE_OPERAND (arg, 1);
3380 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3381 ;
3382 else
3383 return 0;
3384
3385 return 1;
3386
3387 default:
3388 return 0;
3389 }
3390 }
3391 \f
3392 /* ARG is a tree that is known to contain just arithmetic operations and
3393 comparisons. Evaluate the operations in the tree substituting NEW0 for
3394 any occurrence of OLD0 as an operand of a comparison and likewise for
3395 NEW1 and OLD1. */
3396
3397 static tree
3398 eval_subst (location_t loc, tree arg, tree old0, tree new0,
3399 tree old1, tree new1)
3400 {
3401 tree type = TREE_TYPE (arg);
3402 enum tree_code code = TREE_CODE (arg);
3403 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3404
3405 /* We can handle some of the tcc_expression cases here. */
3406 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3407 tclass = tcc_unary;
3408 else if (tclass == tcc_expression
3409 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3410 tclass = tcc_binary;
3411
3412 switch (tclass)
3413 {
3414 case tcc_unary:
3415 return fold_build1_loc (loc, code, type,
3416 eval_subst (loc, TREE_OPERAND (arg, 0),
3417 old0, new0, old1, new1));
3418
3419 case tcc_binary:
3420 return fold_build2_loc (loc, code, type,
3421 eval_subst (loc, TREE_OPERAND (arg, 0),
3422 old0, new0, old1, new1),
3423 eval_subst (loc, TREE_OPERAND (arg, 1),
3424 old0, new0, old1, new1));
3425
3426 case tcc_expression:
3427 switch (code)
3428 {
3429 case SAVE_EXPR:
3430 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3431 old1, new1);
3432
3433 case COMPOUND_EXPR:
3434 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3435 old1, new1);
3436
3437 case COND_EXPR:
3438 return fold_build3_loc (loc, code, type,
3439 eval_subst (loc, TREE_OPERAND (arg, 0),
3440 old0, new0, old1, new1),
3441 eval_subst (loc, TREE_OPERAND (arg, 1),
3442 old0, new0, old1, new1),
3443 eval_subst (loc, TREE_OPERAND (arg, 2),
3444 old0, new0, old1, new1));
3445 default:
3446 break;
3447 }
3448 /* Fall through - ??? */
3449
3450 case tcc_comparison:
3451 {
3452 tree arg0 = TREE_OPERAND (arg, 0);
3453 tree arg1 = TREE_OPERAND (arg, 1);
3454
3455 /* We need to check both for exact equality and tree equality. The
3456 former will be true if the operand has a side-effect. In that
3457 case, we know the operand occurred exactly once. */
3458
3459 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3460 arg0 = new0;
3461 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3462 arg0 = new1;
3463
3464 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3465 arg1 = new0;
3466 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3467 arg1 = new1;
3468
3469 return fold_build2_loc (loc, code, type, arg0, arg1);
3470 }
3471
3472 default:
3473 return arg;
3474 }
3475 }
3476 \f
3477 /* Return a tree for the case when the result of an expression is RESULT
3478 converted to TYPE and OMITTED was previously an operand of the expression
3479 but is now not needed (e.g., we folded OMITTED * 0).
3480
3481 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3482 the conversion of RESULT to TYPE. */
3483
3484 tree
3485 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3486 {
3487 tree t = fold_convert_loc (loc, type, result);
3488
3489 /* If the resulting operand is an empty statement, just return the omitted
3490 statement casted to void. */
3491 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3492 return build1_loc (loc, NOP_EXPR, void_type_node,
3493 fold_ignored_result (omitted));
3494
3495 if (TREE_SIDE_EFFECTS (omitted))
3496 return build2_loc (loc, COMPOUND_EXPR, type,
3497 fold_ignored_result (omitted), t);
3498
3499 return non_lvalue_loc (loc, t);
3500 }
3501
3502 /* Return a tree for the case when the result of an expression is RESULT
3503 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3504 of the expression but are now not needed.
3505
3506 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3507 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3508 evaluated before OMITTED2. Otherwise, if neither has side effects,
3509 just do the conversion of RESULT to TYPE. */
3510
3511 tree
3512 omit_two_operands_loc (location_t loc, tree type, tree result,
3513 tree omitted1, tree omitted2)
3514 {
3515 tree t = fold_convert_loc (loc, type, result);
3516
3517 if (TREE_SIDE_EFFECTS (omitted2))
3518 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3519 if (TREE_SIDE_EFFECTS (omitted1))
3520 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3521
3522 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3523 }
3524
3525 \f
3526 /* Return a simplified tree node for the truth-negation of ARG. This
3527 never alters ARG itself. We assume that ARG is an operation that
3528 returns a truth value (0 or 1).
3529
3530 FIXME: one would think we would fold the result, but it causes
3531 problems with the dominator optimizer. */
3532
3533 static tree
3534 fold_truth_not_expr (location_t loc, tree arg)
3535 {
3536 tree type = TREE_TYPE (arg);
3537 enum tree_code code = TREE_CODE (arg);
3538 location_t loc1, loc2;
3539
3540 /* If this is a comparison, we can simply invert it, except for
3541 floating-point non-equality comparisons, in which case we just
3542 enclose a TRUTH_NOT_EXPR around what we have. */
3543
3544 if (TREE_CODE_CLASS (code) == tcc_comparison)
3545 {
3546 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3547 if (FLOAT_TYPE_P (op_type)
3548 && flag_trapping_math
3549 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3550 && code != NE_EXPR && code != EQ_EXPR)
3551 return NULL_TREE;
3552
3553 code = invert_tree_comparison (code, HONOR_NANS (op_type));
3554 if (code == ERROR_MARK)
3555 return NULL_TREE;
3556
3557 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3558 TREE_OPERAND (arg, 1));
3559 }
3560
3561 switch (code)
3562 {
3563 case INTEGER_CST:
3564 return constant_boolean_node (integer_zerop (arg), type);
3565
3566 case TRUTH_AND_EXPR:
3567 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3568 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3569 return build2_loc (loc, TRUTH_OR_EXPR, type,
3570 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3571 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3572
3573 case TRUTH_OR_EXPR:
3574 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3575 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3576 return build2_loc (loc, TRUTH_AND_EXPR, type,
3577 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3578 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3579
3580 case TRUTH_XOR_EXPR:
3581 /* Here we can invert either operand. We invert the first operand
3582 unless the second operand is a TRUTH_NOT_EXPR in which case our
3583 result is the XOR of the first operand with the inside of the
3584 negation of the second operand. */
3585
3586 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3587 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3588 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3589 else
3590 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3591 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3592 TREE_OPERAND (arg, 1));
3593
3594 case TRUTH_ANDIF_EXPR:
3595 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3596 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3597 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3598 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3599 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3600
3601 case TRUTH_ORIF_EXPR:
3602 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3603 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3604 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3605 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3606 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3607
3608 case TRUTH_NOT_EXPR:
3609 return TREE_OPERAND (arg, 0);
3610
3611 case COND_EXPR:
3612 {
3613 tree arg1 = TREE_OPERAND (arg, 1);
3614 tree arg2 = TREE_OPERAND (arg, 2);
3615
3616 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3617 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3618
3619 /* A COND_EXPR may have a throw as one operand, which
3620 then has void type. Just leave void operands
3621 as they are. */
3622 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3623 VOID_TYPE_P (TREE_TYPE (arg1))
3624 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3625 VOID_TYPE_P (TREE_TYPE (arg2))
3626 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3627 }
3628
3629 case COMPOUND_EXPR:
3630 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3631 return build2_loc (loc, COMPOUND_EXPR, type,
3632 TREE_OPERAND (arg, 0),
3633 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3634
3635 case NON_LVALUE_EXPR:
3636 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3637 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3638
3639 CASE_CONVERT:
3640 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3641 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3642
3643 /* ... fall through ... */
3644
3645 case FLOAT_EXPR:
3646 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3647 return build1_loc (loc, TREE_CODE (arg), type,
3648 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3649
3650 case BIT_AND_EXPR:
3651 if (!integer_onep (TREE_OPERAND (arg, 1)))
3652 return NULL_TREE;
3653 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3654
3655 case SAVE_EXPR:
3656 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3657
3658 case CLEANUP_POINT_EXPR:
3659 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3660 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3661 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3662
3663 default:
3664 return NULL_TREE;
3665 }
3666 }
3667
3668 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3669 assume that ARG is an operation that returns a truth value (0 or 1
3670 for scalars, 0 or -1 for vectors). Return the folded expression if
3671 folding is successful. Otherwise, return NULL_TREE. */
3672
3673 static tree
3674 fold_invert_truthvalue (location_t loc, tree arg)
3675 {
3676 tree type = TREE_TYPE (arg);
3677 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3678 ? BIT_NOT_EXPR
3679 : TRUTH_NOT_EXPR,
3680 type, arg);
3681 }
3682
3683 /* Return a simplified tree node for the truth-negation of ARG. This
3684 never alters ARG itself. We assume that ARG is an operation that
3685 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3686
3687 tree
3688 invert_truthvalue_loc (location_t loc, tree arg)
3689 {
3690 if (TREE_CODE (arg) == ERROR_MARK)
3691 return arg;
3692
3693 tree type = TREE_TYPE (arg);
3694 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3695 ? BIT_NOT_EXPR
3696 : TRUTH_NOT_EXPR,
3697 type, arg);
3698 }
3699
3700 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3701 with code CODE. This optimization is unsafe. */
3702 static tree
3703 distribute_real_division (location_t loc, enum tree_code code, tree type,
3704 tree arg0, tree arg1)
3705 {
3706 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3707 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3708
3709 /* (A / C) +- (B / C) -> (A +- B) / C. */
3710 if (mul0 == mul1
3711 && operand_equal_p (TREE_OPERAND (arg0, 1),
3712 TREE_OPERAND (arg1, 1), 0))
3713 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3714 fold_build2_loc (loc, code, type,
3715 TREE_OPERAND (arg0, 0),
3716 TREE_OPERAND (arg1, 0)),
3717 TREE_OPERAND (arg0, 1));
3718
3719 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3720 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3721 TREE_OPERAND (arg1, 0), 0)
3722 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3723 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3724 {
3725 REAL_VALUE_TYPE r0, r1;
3726 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3727 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3728 if (!mul0)
3729 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3730 if (!mul1)
3731 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3732 real_arithmetic (&r0, code, &r0, &r1);
3733 return fold_build2_loc (loc, MULT_EXPR, type,
3734 TREE_OPERAND (arg0, 0),
3735 build_real (type, r0));
3736 }
3737
3738 return NULL_TREE;
3739 }
3740 \f
3741 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3742 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero
3743 and uses reverse storage order if REVERSEP is nonzero. */
3744
3745 static tree
3746 make_bit_field_ref (location_t loc, tree inner, tree type,
3747 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
3748 int unsignedp, int reversep)
3749 {
3750 tree result, bftype;
3751
3752 if (bitpos == 0 && !reversep)
3753 {
3754 tree size = TYPE_SIZE (TREE_TYPE (inner));
3755 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3756 || POINTER_TYPE_P (TREE_TYPE (inner)))
3757 && tree_fits_shwi_p (size)
3758 && tree_to_shwi (size) == bitsize)
3759 return fold_convert_loc (loc, type, inner);
3760 }
3761
3762 bftype = type;
3763 if (TYPE_PRECISION (bftype) != bitsize
3764 || TYPE_UNSIGNED (bftype) == !unsignedp)
3765 bftype = build_nonstandard_integer_type (bitsize, 0);
3766
3767 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3768 size_int (bitsize), bitsize_int (bitpos));
3769 REF_REVERSE_STORAGE_ORDER (result) = reversep;
3770
3771 if (bftype != type)
3772 result = fold_convert_loc (loc, type, result);
3773
3774 return result;
3775 }
3776
3777 /* Optimize a bit-field compare.
3778
3779 There are two cases: First is a compare against a constant and the
3780 second is a comparison of two items where the fields are at the same
3781 bit position relative to the start of a chunk (byte, halfword, word)
3782 large enough to contain it. In these cases we can avoid the shift
3783 implicit in bitfield extractions.
3784
3785 For constants, we emit a compare of the shifted constant with the
3786 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3787 compared. For two fields at the same position, we do the ANDs with the
3788 similar mask and compare the result of the ANDs.
3789
3790 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3791 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3792 are the left and right operands of the comparison, respectively.
3793
3794 If the optimization described above can be done, we return the resulting
3795 tree. Otherwise we return zero. */
3796
3797 static tree
3798 optimize_bit_field_compare (location_t loc, enum tree_code code,
3799 tree compare_type, tree lhs, tree rhs)
3800 {
3801 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3802 tree type = TREE_TYPE (lhs);
3803 tree unsigned_type;
3804 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3805 machine_mode lmode, rmode, nmode;
3806 int lunsignedp, runsignedp;
3807 int lreversep, rreversep;
3808 int lvolatilep = 0, rvolatilep = 0;
3809 tree linner, rinner = NULL_TREE;
3810 tree mask;
3811 tree offset;
3812
3813 /* Get all the information about the extractions being done. If the bit size
3814 if the same as the size of the underlying object, we aren't doing an
3815 extraction at all and so can do nothing. We also don't want to
3816 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3817 then will no longer be able to replace it. */
3818 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3819 &lunsignedp, &lreversep, &lvolatilep, false);
3820 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3821 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3822 return 0;
3823
3824 if (const_p)
3825 rreversep = lreversep;
3826 else
3827 {
3828 /* If this is not a constant, we can only do something if bit positions,
3829 sizes, signedness and storage order are the same. */
3830 rinner
3831 = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3832 &runsignedp, &rreversep, &rvolatilep, false);
3833
3834 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3835 || lunsignedp != runsignedp || lreversep != rreversep || offset != 0
3836 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3837 return 0;
3838 }
3839
3840 /* See if we can find a mode to refer to this field. We should be able to,
3841 but fail if we can't. */
3842 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3843 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3844 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3845 TYPE_ALIGN (TREE_TYPE (rinner))),
3846 word_mode, false);
3847 if (nmode == VOIDmode)
3848 return 0;
3849
3850 /* Set signed and unsigned types of the precision of this mode for the
3851 shifts below. */
3852 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3853
3854 /* Compute the bit position and size for the new reference and our offset
3855 within it. If the new reference is the same size as the original, we
3856 won't optimize anything, so return zero. */
3857 nbitsize = GET_MODE_BITSIZE (nmode);
3858 nbitpos = lbitpos & ~ (nbitsize - 1);
3859 lbitpos -= nbitpos;
3860 if (nbitsize == lbitsize)
3861 return 0;
3862
3863 if (lreversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
3864 lbitpos = nbitsize - lbitsize - lbitpos;
3865
3866 /* Make the mask to be used against the extracted field. */
3867 mask = build_int_cst_type (unsigned_type, -1);
3868 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3869 mask = const_binop (RSHIFT_EXPR, mask,
3870 size_int (nbitsize - lbitsize - lbitpos));
3871
3872 if (! const_p)
3873 /* If not comparing with constant, just rework the comparison
3874 and return. */
3875 return fold_build2_loc (loc, code, compare_type,
3876 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3877 make_bit_field_ref (loc, linner,
3878 unsigned_type,
3879 nbitsize, nbitpos,
3880 1, lreversep),
3881 mask),
3882 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3883 make_bit_field_ref (loc, rinner,
3884 unsigned_type,
3885 nbitsize, nbitpos,
3886 1, rreversep),
3887 mask));
3888
3889 /* Otherwise, we are handling the constant case. See if the constant is too
3890 big for the field. Warn and return a tree for 0 (false) if so. We do
3891 this not only for its own sake, but to avoid having to test for this
3892 error case below. If we didn't, we might generate wrong code.
3893
3894 For unsigned fields, the constant shifted right by the field length should
3895 be all zero. For signed fields, the high-order bits should agree with
3896 the sign bit. */
3897
3898 if (lunsignedp)
3899 {
3900 if (wi::lrshift (rhs, lbitsize) != 0)
3901 {
3902 warning (0, "comparison is always %d due to width of bit-field",
3903 code == NE_EXPR);
3904 return constant_boolean_node (code == NE_EXPR, compare_type);
3905 }
3906 }
3907 else
3908 {
3909 wide_int tem = wi::arshift (rhs, lbitsize - 1);
3910 if (tem != 0 && tem != -1)
3911 {
3912 warning (0, "comparison is always %d due to width of bit-field",
3913 code == NE_EXPR);
3914 return constant_boolean_node (code == NE_EXPR, compare_type);
3915 }
3916 }
3917
3918 /* Single-bit compares should always be against zero. */
3919 if (lbitsize == 1 && ! integer_zerop (rhs))
3920 {
3921 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3922 rhs = build_int_cst (type, 0);
3923 }
3924
3925 /* Make a new bitfield reference, shift the constant over the
3926 appropriate number of bits and mask it with the computed mask
3927 (in case this was a signed field). If we changed it, make a new one. */
3928 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1,
3929 lreversep);
3930
3931 rhs = const_binop (BIT_AND_EXPR,
3932 const_binop (LSHIFT_EXPR,
3933 fold_convert_loc (loc, unsigned_type, rhs),
3934 size_int (lbitpos)),
3935 mask);
3936
3937 lhs = build2_loc (loc, code, compare_type,
3938 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3939 return lhs;
3940 }
3941 \f
3942 /* Subroutine for fold_truth_andor_1: decode a field reference.
3943
3944 If EXP is a comparison reference, we return the innermost reference.
3945
3946 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3947 set to the starting bit number.
3948
3949 If the innermost field can be completely contained in a mode-sized
3950 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3951
3952 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3953 otherwise it is not changed.
3954
3955 *PUNSIGNEDP is set to the signedness of the field.
3956
3957 *PREVERSEP is set to the storage order of the field.
3958
3959 *PMASK is set to the mask used. This is either contained in a
3960 BIT_AND_EXPR or derived from the width of the field.
3961
3962 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3963
3964 Return 0 if this is not a component reference or is one that we can't
3965 do anything with. */
3966
3967 static tree
3968 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3969 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
3970 int *punsignedp, int *preversep, int *pvolatilep,
3971 tree *pmask, tree *pand_mask)
3972 {
3973 tree outer_type = 0;
3974 tree and_mask = 0;
3975 tree mask, inner, offset;
3976 tree unsigned_type;
3977 unsigned int precision;
3978
3979 /* All the optimizations using this function assume integer fields.
3980 There are problems with FP fields since the type_for_size call
3981 below can fail for, e.g., XFmode. */
3982 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3983 return 0;
3984
3985 /* We are interested in the bare arrangement of bits, so strip everything
3986 that doesn't affect the machine mode. However, record the type of the
3987 outermost expression if it may matter below. */
3988 if (CONVERT_EXPR_P (exp)
3989 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3990 outer_type = TREE_TYPE (exp);
3991 STRIP_NOPS (exp);
3992
3993 if (TREE_CODE (exp) == BIT_AND_EXPR)
3994 {
3995 and_mask = TREE_OPERAND (exp, 1);
3996 exp = TREE_OPERAND (exp, 0);
3997 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3998 if (TREE_CODE (and_mask) != INTEGER_CST)
3999 return 0;
4000 }
4001
4002 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
4003 punsignedp, preversep, pvolatilep, false);
4004 if ((inner == exp && and_mask == 0)
4005 || *pbitsize < 0 || offset != 0
4006 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
4007 return 0;
4008
4009 /* If the number of bits in the reference is the same as the bitsize of
4010 the outer type, then the outer type gives the signedness. Otherwise
4011 (in case of a small bitfield) the signedness is unchanged. */
4012 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4013 *punsignedp = TYPE_UNSIGNED (outer_type);
4014
4015 /* Compute the mask to access the bitfield. */
4016 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4017 precision = TYPE_PRECISION (unsigned_type);
4018
4019 mask = build_int_cst_type (unsigned_type, -1);
4020
4021 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4022 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4023
4024 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4025 if (and_mask != 0)
4026 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4027 fold_convert_loc (loc, unsigned_type, and_mask), mask);
4028
4029 *pmask = mask;
4030 *pand_mask = and_mask;
4031 return inner;
4032 }
4033
4034 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4035 bit positions and MASK is SIGNED. */
4036
4037 static int
4038 all_ones_mask_p (const_tree mask, unsigned int size)
4039 {
4040 tree type = TREE_TYPE (mask);
4041 unsigned int precision = TYPE_PRECISION (type);
4042
4043 /* If this function returns true when the type of the mask is
4044 UNSIGNED, then there will be errors. In particular see
4045 gcc.c-torture/execute/990326-1.c. There does not appear to be
4046 any documentation paper trail as to why this is so. But the pre
4047 wide-int worked with that restriction and it has been preserved
4048 here. */
4049 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
4050 return false;
4051
4052 return wi::mask (size, false, precision) == mask;
4053 }
4054
4055 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4056 represents the sign bit of EXP's type. If EXP represents a sign
4057 or zero extension, also test VAL against the unextended type.
4058 The return value is the (sub)expression whose sign bit is VAL,
4059 or NULL_TREE otherwise. */
4060
4061 tree
4062 sign_bit_p (tree exp, const_tree val)
4063 {
4064 int width;
4065 tree t;
4066
4067 /* Tree EXP must have an integral type. */
4068 t = TREE_TYPE (exp);
4069 if (! INTEGRAL_TYPE_P (t))
4070 return NULL_TREE;
4071
4072 /* Tree VAL must be an integer constant. */
4073 if (TREE_CODE (val) != INTEGER_CST
4074 || TREE_OVERFLOW (val))
4075 return NULL_TREE;
4076
4077 width = TYPE_PRECISION (t);
4078 if (wi::only_sign_bit_p (val, width))
4079 return exp;
4080
4081 /* Handle extension from a narrower type. */
4082 if (TREE_CODE (exp) == NOP_EXPR
4083 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4084 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4085
4086 return NULL_TREE;
4087 }
4088
4089 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
4090 to be evaluated unconditionally. */
4091
4092 static int
4093 simple_operand_p (const_tree exp)
4094 {
4095 /* Strip any conversions that don't change the machine mode. */
4096 STRIP_NOPS (exp);
4097
4098 return (CONSTANT_CLASS_P (exp)
4099 || TREE_CODE (exp) == SSA_NAME
4100 || (DECL_P (exp)
4101 && ! TREE_ADDRESSABLE (exp)
4102 && ! TREE_THIS_VOLATILE (exp)
4103 && ! DECL_NONLOCAL (exp)
4104 /* Don't regard global variables as simple. They may be
4105 allocated in ways unknown to the compiler (shared memory,
4106 #pragma weak, etc). */
4107 && ! TREE_PUBLIC (exp)
4108 && ! DECL_EXTERNAL (exp)
4109 /* Weakrefs are not safe to be read, since they can be NULL.
4110 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4111 have DECL_WEAK flag set. */
4112 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4113 /* Loading a static variable is unduly expensive, but global
4114 registers aren't expensive. */
4115 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4116 }
4117
4118 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4119 to be evaluated unconditionally.
4120 I addition to simple_operand_p, we assume that comparisons, conversions,
4121 and logic-not operations are simple, if their operands are simple, too. */
4122
4123 static bool
4124 simple_operand_p_2 (tree exp)
4125 {
4126 enum tree_code code;
4127
4128 if (TREE_SIDE_EFFECTS (exp)
4129 || tree_could_trap_p (exp))
4130 return false;
4131
4132 while (CONVERT_EXPR_P (exp))
4133 exp = TREE_OPERAND (exp, 0);
4134
4135 code = TREE_CODE (exp);
4136
4137 if (TREE_CODE_CLASS (code) == tcc_comparison)
4138 return (simple_operand_p (TREE_OPERAND (exp, 0))
4139 && simple_operand_p (TREE_OPERAND (exp, 1)));
4140
4141 if (code == TRUTH_NOT_EXPR)
4142 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4143
4144 return simple_operand_p (exp);
4145 }
4146
4147 \f
4148 /* The following functions are subroutines to fold_range_test and allow it to
4149 try to change a logical combination of comparisons into a range test.
4150
4151 For example, both
4152 X == 2 || X == 3 || X == 4 || X == 5
4153 and
4154 X >= 2 && X <= 5
4155 are converted to
4156 (unsigned) (X - 2) <= 3
4157
4158 We describe each set of comparisons as being either inside or outside
4159 a range, using a variable named like IN_P, and then describe the
4160 range with a lower and upper bound. If one of the bounds is omitted,
4161 it represents either the highest or lowest value of the type.
4162
4163 In the comments below, we represent a range by two numbers in brackets
4164 preceded by a "+" to designate being inside that range, or a "-" to
4165 designate being outside that range, so the condition can be inverted by
4166 flipping the prefix. An omitted bound is represented by a "-". For
4167 example, "- [-, 10]" means being outside the range starting at the lowest
4168 possible value and ending at 10, in other words, being greater than 10.
4169 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4170 always false.
4171
4172 We set up things so that the missing bounds are handled in a consistent
4173 manner so neither a missing bound nor "true" and "false" need to be
4174 handled using a special case. */
4175
4176 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4177 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4178 and UPPER1_P are nonzero if the respective argument is an upper bound
4179 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4180 must be specified for a comparison. ARG1 will be converted to ARG0's
4181 type if both are specified. */
4182
4183 static tree
4184 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4185 tree arg1, int upper1_p)
4186 {
4187 tree tem;
4188 int result;
4189 int sgn0, sgn1;
4190
4191 /* If neither arg represents infinity, do the normal operation.
4192 Else, if not a comparison, return infinity. Else handle the special
4193 comparison rules. Note that most of the cases below won't occur, but
4194 are handled for consistency. */
4195
4196 if (arg0 != 0 && arg1 != 0)
4197 {
4198 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4199 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4200 STRIP_NOPS (tem);
4201 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4202 }
4203
4204 if (TREE_CODE_CLASS (code) != tcc_comparison)
4205 return 0;
4206
4207 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4208 for neither. In real maths, we cannot assume open ended ranges are
4209 the same. But, this is computer arithmetic, where numbers are finite.
4210 We can therefore make the transformation of any unbounded range with
4211 the value Z, Z being greater than any representable number. This permits
4212 us to treat unbounded ranges as equal. */
4213 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4214 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4215 switch (code)
4216 {
4217 case EQ_EXPR:
4218 result = sgn0 == sgn1;
4219 break;
4220 case NE_EXPR:
4221 result = sgn0 != sgn1;
4222 break;
4223 case LT_EXPR:
4224 result = sgn0 < sgn1;
4225 break;
4226 case LE_EXPR:
4227 result = sgn0 <= sgn1;
4228 break;
4229 case GT_EXPR:
4230 result = sgn0 > sgn1;
4231 break;
4232 case GE_EXPR:
4233 result = sgn0 >= sgn1;
4234 break;
4235 default:
4236 gcc_unreachable ();
4237 }
4238
4239 return constant_boolean_node (result, type);
4240 }
4241 \f
4242 /* Helper routine for make_range. Perform one step for it, return
4243 new expression if the loop should continue or NULL_TREE if it should
4244 stop. */
4245
4246 tree
4247 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4248 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4249 bool *strict_overflow_p)
4250 {
4251 tree arg0_type = TREE_TYPE (arg0);
4252 tree n_low, n_high, low = *p_low, high = *p_high;
4253 int in_p = *p_in_p, n_in_p;
4254
4255 switch (code)
4256 {
4257 case TRUTH_NOT_EXPR:
4258 /* We can only do something if the range is testing for zero. */
4259 if (low == NULL_TREE || high == NULL_TREE
4260 || ! integer_zerop (low) || ! integer_zerop (high))
4261 return NULL_TREE;
4262 *p_in_p = ! in_p;
4263 return arg0;
4264
4265 case EQ_EXPR: case NE_EXPR:
4266 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4267 /* We can only do something if the range is testing for zero
4268 and if the second operand is an integer constant. Note that
4269 saying something is "in" the range we make is done by
4270 complementing IN_P since it will set in the initial case of
4271 being not equal to zero; "out" is leaving it alone. */
4272 if (low == NULL_TREE || high == NULL_TREE
4273 || ! integer_zerop (low) || ! integer_zerop (high)
4274 || TREE_CODE (arg1) != INTEGER_CST)
4275 return NULL_TREE;
4276
4277 switch (code)
4278 {
4279 case NE_EXPR: /* - [c, c] */
4280 low = high = arg1;
4281 break;
4282 case EQ_EXPR: /* + [c, c] */
4283 in_p = ! in_p, low = high = arg1;
4284 break;
4285 case GT_EXPR: /* - [-, c] */
4286 low = 0, high = arg1;
4287 break;
4288 case GE_EXPR: /* + [c, -] */
4289 in_p = ! in_p, low = arg1, high = 0;
4290 break;
4291 case LT_EXPR: /* - [c, -] */
4292 low = arg1, high = 0;
4293 break;
4294 case LE_EXPR: /* + [-, c] */
4295 in_p = ! in_p, low = 0, high = arg1;
4296 break;
4297 default:
4298 gcc_unreachable ();
4299 }
4300
4301 /* If this is an unsigned comparison, we also know that EXP is
4302 greater than or equal to zero. We base the range tests we make
4303 on that fact, so we record it here so we can parse existing
4304 range tests. We test arg0_type since often the return type
4305 of, e.g. EQ_EXPR, is boolean. */
4306 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4307 {
4308 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4309 in_p, low, high, 1,
4310 build_int_cst (arg0_type, 0),
4311 NULL_TREE))
4312 return NULL_TREE;
4313
4314 in_p = n_in_p, low = n_low, high = n_high;
4315
4316 /* If the high bound is missing, but we have a nonzero low
4317 bound, reverse the range so it goes from zero to the low bound
4318 minus 1. */
4319 if (high == 0 && low && ! integer_zerop (low))
4320 {
4321 in_p = ! in_p;
4322 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4323 build_int_cst (TREE_TYPE (low), 1), 0);
4324 low = build_int_cst (arg0_type, 0);
4325 }
4326 }
4327
4328 *p_low = low;
4329 *p_high = high;
4330 *p_in_p = in_p;
4331 return arg0;
4332
4333 case NEGATE_EXPR:
4334 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4335 low and high are non-NULL, then normalize will DTRT. */
4336 if (!TYPE_UNSIGNED (arg0_type)
4337 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4338 {
4339 if (low == NULL_TREE)
4340 low = TYPE_MIN_VALUE (arg0_type);
4341 if (high == NULL_TREE)
4342 high = TYPE_MAX_VALUE (arg0_type);
4343 }
4344
4345 /* (-x) IN [a,b] -> x in [-b, -a] */
4346 n_low = range_binop (MINUS_EXPR, exp_type,
4347 build_int_cst (exp_type, 0),
4348 0, high, 1);
4349 n_high = range_binop (MINUS_EXPR, exp_type,
4350 build_int_cst (exp_type, 0),
4351 0, low, 0);
4352 if (n_high != 0 && TREE_OVERFLOW (n_high))
4353 return NULL_TREE;
4354 goto normalize;
4355
4356 case BIT_NOT_EXPR:
4357 /* ~ X -> -X - 1 */
4358 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4359 build_int_cst (exp_type, 1));
4360
4361 case PLUS_EXPR:
4362 case MINUS_EXPR:
4363 if (TREE_CODE (arg1) != INTEGER_CST)
4364 return NULL_TREE;
4365
4366 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4367 move a constant to the other side. */
4368 if (!TYPE_UNSIGNED (arg0_type)
4369 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4370 return NULL_TREE;
4371
4372 /* If EXP is signed, any overflow in the computation is undefined,
4373 so we don't worry about it so long as our computations on
4374 the bounds don't overflow. For unsigned, overflow is defined
4375 and this is exactly the right thing. */
4376 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4377 arg0_type, low, 0, arg1, 0);
4378 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4379 arg0_type, high, 1, arg1, 0);
4380 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4381 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4382 return NULL_TREE;
4383
4384 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4385 *strict_overflow_p = true;
4386
4387 normalize:
4388 /* Check for an unsigned range which has wrapped around the maximum
4389 value thus making n_high < n_low, and normalize it. */
4390 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4391 {
4392 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4393 build_int_cst (TREE_TYPE (n_high), 1), 0);
4394 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4395 build_int_cst (TREE_TYPE (n_low), 1), 0);
4396
4397 /* If the range is of the form +/- [ x+1, x ], we won't
4398 be able to normalize it. But then, it represents the
4399 whole range or the empty set, so make it
4400 +/- [ -, - ]. */
4401 if (tree_int_cst_equal (n_low, low)
4402 && tree_int_cst_equal (n_high, high))
4403 low = high = 0;
4404 else
4405 in_p = ! in_p;
4406 }
4407 else
4408 low = n_low, high = n_high;
4409
4410 *p_low = low;
4411 *p_high = high;
4412 *p_in_p = in_p;
4413 return arg0;
4414
4415 CASE_CONVERT:
4416 case NON_LVALUE_EXPR:
4417 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4418 return NULL_TREE;
4419
4420 if (! INTEGRAL_TYPE_P (arg0_type)
4421 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4422 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4423 return NULL_TREE;
4424
4425 n_low = low, n_high = high;
4426
4427 if (n_low != 0)
4428 n_low = fold_convert_loc (loc, arg0_type, n_low);
4429
4430 if (n_high != 0)
4431 n_high = fold_convert_loc (loc, arg0_type, n_high);
4432
4433 /* If we're converting arg0 from an unsigned type, to exp,
4434 a signed type, we will be doing the comparison as unsigned.
4435 The tests above have already verified that LOW and HIGH
4436 are both positive.
4437
4438 So we have to ensure that we will handle large unsigned
4439 values the same way that the current signed bounds treat
4440 negative values. */
4441
4442 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4443 {
4444 tree high_positive;
4445 tree equiv_type;
4446 /* For fixed-point modes, we need to pass the saturating flag
4447 as the 2nd parameter. */
4448 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4449 equiv_type
4450 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4451 TYPE_SATURATING (arg0_type));
4452 else
4453 equiv_type
4454 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4455
4456 /* A range without an upper bound is, naturally, unbounded.
4457 Since convert would have cropped a very large value, use
4458 the max value for the destination type. */
4459 high_positive
4460 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4461 : TYPE_MAX_VALUE (arg0_type);
4462
4463 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4464 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4465 fold_convert_loc (loc, arg0_type,
4466 high_positive),
4467 build_int_cst (arg0_type, 1));
4468
4469 /* If the low bound is specified, "and" the range with the
4470 range for which the original unsigned value will be
4471 positive. */
4472 if (low != 0)
4473 {
4474 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4475 1, fold_convert_loc (loc, arg0_type,
4476 integer_zero_node),
4477 high_positive))
4478 return NULL_TREE;
4479
4480 in_p = (n_in_p == in_p);
4481 }
4482 else
4483 {
4484 /* Otherwise, "or" the range with the range of the input
4485 that will be interpreted as negative. */
4486 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4487 1, fold_convert_loc (loc, arg0_type,
4488 integer_zero_node),
4489 high_positive))
4490 return NULL_TREE;
4491
4492 in_p = (in_p != n_in_p);
4493 }
4494 }
4495
4496 *p_low = n_low;
4497 *p_high = n_high;
4498 *p_in_p = in_p;
4499 return arg0;
4500
4501 default:
4502 return NULL_TREE;
4503 }
4504 }
4505
4506 /* Given EXP, a logical expression, set the range it is testing into
4507 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4508 actually being tested. *PLOW and *PHIGH will be made of the same
4509 type as the returned expression. If EXP is not a comparison, we
4510 will most likely not be returning a useful value and range. Set
4511 *STRICT_OVERFLOW_P to true if the return value is only valid
4512 because signed overflow is undefined; otherwise, do not change
4513 *STRICT_OVERFLOW_P. */
4514
4515 tree
4516 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4517 bool *strict_overflow_p)
4518 {
4519 enum tree_code code;
4520 tree arg0, arg1 = NULL_TREE;
4521 tree exp_type, nexp;
4522 int in_p;
4523 tree low, high;
4524 location_t loc = EXPR_LOCATION (exp);
4525
4526 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4527 and see if we can refine the range. Some of the cases below may not
4528 happen, but it doesn't seem worth worrying about this. We "continue"
4529 the outer loop when we've changed something; otherwise we "break"
4530 the switch, which will "break" the while. */
4531
4532 in_p = 0;
4533 low = high = build_int_cst (TREE_TYPE (exp), 0);
4534
4535 while (1)
4536 {
4537 code = TREE_CODE (exp);
4538 exp_type = TREE_TYPE (exp);
4539 arg0 = NULL_TREE;
4540
4541 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4542 {
4543 if (TREE_OPERAND_LENGTH (exp) > 0)
4544 arg0 = TREE_OPERAND (exp, 0);
4545 if (TREE_CODE_CLASS (code) == tcc_binary
4546 || TREE_CODE_CLASS (code) == tcc_comparison
4547 || (TREE_CODE_CLASS (code) == tcc_expression
4548 && TREE_OPERAND_LENGTH (exp) > 1))
4549 arg1 = TREE_OPERAND (exp, 1);
4550 }
4551 if (arg0 == NULL_TREE)
4552 break;
4553
4554 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4555 &high, &in_p, strict_overflow_p);
4556 if (nexp == NULL_TREE)
4557 break;
4558 exp = nexp;
4559 }
4560
4561 /* If EXP is a constant, we can evaluate whether this is true or false. */
4562 if (TREE_CODE (exp) == INTEGER_CST)
4563 {
4564 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4565 exp, 0, low, 0))
4566 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4567 exp, 1, high, 1)));
4568 low = high = 0;
4569 exp = 0;
4570 }
4571
4572 *pin_p = in_p, *plow = low, *phigh = high;
4573 return exp;
4574 }
4575 \f
4576 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4577 type, TYPE, return an expression to test if EXP is in (or out of, depending
4578 on IN_P) the range. Return 0 if the test couldn't be created. */
4579
4580 tree
4581 build_range_check (location_t loc, tree type, tree exp, int in_p,
4582 tree low, tree high)
4583 {
4584 tree etype = TREE_TYPE (exp), value;
4585
4586 /* Disable this optimization for function pointer expressions
4587 on targets that require function pointer canonicalization. */
4588 if (targetm.have_canonicalize_funcptr_for_compare ()
4589 && TREE_CODE (etype) == POINTER_TYPE
4590 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4591 return NULL_TREE;
4592
4593 if (! in_p)
4594 {
4595 value = build_range_check (loc, type, exp, 1, low, high);
4596 if (value != 0)
4597 return invert_truthvalue_loc (loc, value);
4598
4599 return 0;
4600 }
4601
4602 if (low == 0 && high == 0)
4603 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4604
4605 if (low == 0)
4606 return fold_build2_loc (loc, LE_EXPR, type, exp,
4607 fold_convert_loc (loc, etype, high));
4608
4609 if (high == 0)
4610 return fold_build2_loc (loc, GE_EXPR, type, exp,
4611 fold_convert_loc (loc, etype, low));
4612
4613 if (operand_equal_p (low, high, 0))
4614 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4615 fold_convert_loc (loc, etype, low));
4616
4617 if (integer_zerop (low))
4618 {
4619 if (! TYPE_UNSIGNED (etype))
4620 {
4621 etype = unsigned_type_for (etype);
4622 high = fold_convert_loc (loc, etype, high);
4623 exp = fold_convert_loc (loc, etype, exp);
4624 }
4625 return build_range_check (loc, type, exp, 1, 0, high);
4626 }
4627
4628 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4629 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4630 {
4631 int prec = TYPE_PRECISION (etype);
4632
4633 if (wi::mask (prec - 1, false, prec) == high)
4634 {
4635 if (TYPE_UNSIGNED (etype))
4636 {
4637 tree signed_etype = signed_type_for (etype);
4638 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4639 etype
4640 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4641 else
4642 etype = signed_etype;
4643 exp = fold_convert_loc (loc, etype, exp);
4644 }
4645 return fold_build2_loc (loc, GT_EXPR, type, exp,
4646 build_int_cst (etype, 0));
4647 }
4648 }
4649
4650 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4651 This requires wrap-around arithmetics for the type of the expression.
4652 First make sure that arithmetics in this type is valid, then make sure
4653 that it wraps around. */
4654 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4655 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4656 TYPE_UNSIGNED (etype));
4657
4658 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4659 {
4660 tree utype, minv, maxv;
4661
4662 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4663 for the type in question, as we rely on this here. */
4664 utype = unsigned_type_for (etype);
4665 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4666 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4667 build_int_cst (TREE_TYPE (maxv), 1), 1);
4668 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4669
4670 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4671 minv, 1, maxv, 1)))
4672 etype = utype;
4673 else
4674 return 0;
4675 }
4676
4677 high = fold_convert_loc (loc, etype, high);
4678 low = fold_convert_loc (loc, etype, low);
4679 exp = fold_convert_loc (loc, etype, exp);
4680
4681 value = const_binop (MINUS_EXPR, high, low);
4682
4683
4684 if (POINTER_TYPE_P (etype))
4685 {
4686 if (value != 0 && !TREE_OVERFLOW (value))
4687 {
4688 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4689 return build_range_check (loc, type,
4690 fold_build_pointer_plus_loc (loc, exp, low),
4691 1, build_int_cst (etype, 0), value);
4692 }
4693 return 0;
4694 }
4695
4696 if (value != 0 && !TREE_OVERFLOW (value))
4697 return build_range_check (loc, type,
4698 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4699 1, build_int_cst (etype, 0), value);
4700
4701 return 0;
4702 }
4703 \f
4704 /* Return the predecessor of VAL in its type, handling the infinite case. */
4705
4706 static tree
4707 range_predecessor (tree val)
4708 {
4709 tree type = TREE_TYPE (val);
4710
4711 if (INTEGRAL_TYPE_P (type)
4712 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4713 return 0;
4714 else
4715 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4716 build_int_cst (TREE_TYPE (val), 1), 0);
4717 }
4718
4719 /* Return the successor of VAL in its type, handling the infinite case. */
4720
4721 static tree
4722 range_successor (tree val)
4723 {
4724 tree type = TREE_TYPE (val);
4725
4726 if (INTEGRAL_TYPE_P (type)
4727 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4728 return 0;
4729 else
4730 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4731 build_int_cst (TREE_TYPE (val), 1), 0);
4732 }
4733
4734 /* Given two ranges, see if we can merge them into one. Return 1 if we
4735 can, 0 if we can't. Set the output range into the specified parameters. */
4736
4737 bool
4738 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4739 tree high0, int in1_p, tree low1, tree high1)
4740 {
4741 int no_overlap;
4742 int subset;
4743 int temp;
4744 tree tem;
4745 int in_p;
4746 tree low, high;
4747 int lowequal = ((low0 == 0 && low1 == 0)
4748 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4749 low0, 0, low1, 0)));
4750 int highequal = ((high0 == 0 && high1 == 0)
4751 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4752 high0, 1, high1, 1)));
4753
4754 /* Make range 0 be the range that starts first, or ends last if they
4755 start at the same value. Swap them if it isn't. */
4756 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4757 low0, 0, low1, 0))
4758 || (lowequal
4759 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4760 high1, 1, high0, 1))))
4761 {
4762 temp = in0_p, in0_p = in1_p, in1_p = temp;
4763 tem = low0, low0 = low1, low1 = tem;
4764 tem = high0, high0 = high1, high1 = tem;
4765 }
4766
4767 /* Now flag two cases, whether the ranges are disjoint or whether the
4768 second range is totally subsumed in the first. Note that the tests
4769 below are simplified by the ones above. */
4770 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4771 high0, 1, low1, 0));
4772 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4773 high1, 1, high0, 1));
4774
4775 /* We now have four cases, depending on whether we are including or
4776 excluding the two ranges. */
4777 if (in0_p && in1_p)
4778 {
4779 /* If they don't overlap, the result is false. If the second range
4780 is a subset it is the result. Otherwise, the range is from the start
4781 of the second to the end of the first. */
4782 if (no_overlap)
4783 in_p = 0, low = high = 0;
4784 else if (subset)
4785 in_p = 1, low = low1, high = high1;
4786 else
4787 in_p = 1, low = low1, high = high0;
4788 }
4789
4790 else if (in0_p && ! in1_p)
4791 {
4792 /* If they don't overlap, the result is the first range. If they are
4793 equal, the result is false. If the second range is a subset of the
4794 first, and the ranges begin at the same place, we go from just after
4795 the end of the second range to the end of the first. If the second
4796 range is not a subset of the first, or if it is a subset and both
4797 ranges end at the same place, the range starts at the start of the
4798 first range and ends just before the second range.
4799 Otherwise, we can't describe this as a single range. */
4800 if (no_overlap)
4801 in_p = 1, low = low0, high = high0;
4802 else if (lowequal && highequal)
4803 in_p = 0, low = high = 0;
4804 else if (subset && lowequal)
4805 {
4806 low = range_successor (high1);
4807 high = high0;
4808 in_p = 1;
4809 if (low == 0)
4810 {
4811 /* We are in the weird situation where high0 > high1 but
4812 high1 has no successor. Punt. */
4813 return 0;
4814 }
4815 }
4816 else if (! subset || highequal)
4817 {
4818 low = low0;
4819 high = range_predecessor (low1);
4820 in_p = 1;
4821 if (high == 0)
4822 {
4823 /* low0 < low1 but low1 has no predecessor. Punt. */
4824 return 0;
4825 }
4826 }
4827 else
4828 return 0;
4829 }
4830
4831 else if (! in0_p && in1_p)
4832 {
4833 /* If they don't overlap, the result is the second range. If the second
4834 is a subset of the first, the result is false. Otherwise,
4835 the range starts just after the first range and ends at the
4836 end of the second. */
4837 if (no_overlap)
4838 in_p = 1, low = low1, high = high1;
4839 else if (subset || highequal)
4840 in_p = 0, low = high = 0;
4841 else
4842 {
4843 low = range_successor (high0);
4844 high = high1;
4845 in_p = 1;
4846 if (low == 0)
4847 {
4848 /* high1 > high0 but high0 has no successor. Punt. */
4849 return 0;
4850 }
4851 }
4852 }
4853
4854 else
4855 {
4856 /* The case where we are excluding both ranges. Here the complex case
4857 is if they don't overlap. In that case, the only time we have a
4858 range is if they are adjacent. If the second is a subset of the
4859 first, the result is the first. Otherwise, the range to exclude
4860 starts at the beginning of the first range and ends at the end of the
4861 second. */
4862 if (no_overlap)
4863 {
4864 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4865 range_successor (high0),
4866 1, low1, 0)))
4867 in_p = 0, low = low0, high = high1;
4868 else
4869 {
4870 /* Canonicalize - [min, x] into - [-, x]. */
4871 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4872 switch (TREE_CODE (TREE_TYPE (low0)))
4873 {
4874 case ENUMERAL_TYPE:
4875 if (TYPE_PRECISION (TREE_TYPE (low0))
4876 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4877 break;
4878 /* FALLTHROUGH */
4879 case INTEGER_TYPE:
4880 if (tree_int_cst_equal (low0,
4881 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4882 low0 = 0;
4883 break;
4884 case POINTER_TYPE:
4885 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4886 && integer_zerop (low0))
4887 low0 = 0;
4888 break;
4889 default:
4890 break;
4891 }
4892
4893 /* Canonicalize - [x, max] into - [x, -]. */
4894 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4895 switch (TREE_CODE (TREE_TYPE (high1)))
4896 {
4897 case ENUMERAL_TYPE:
4898 if (TYPE_PRECISION (TREE_TYPE (high1))
4899 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4900 break;
4901 /* FALLTHROUGH */
4902 case INTEGER_TYPE:
4903 if (tree_int_cst_equal (high1,
4904 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4905 high1 = 0;
4906 break;
4907 case POINTER_TYPE:
4908 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4909 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4910 high1, 1,
4911 build_int_cst (TREE_TYPE (high1), 1),
4912 1)))
4913 high1 = 0;
4914 break;
4915 default:
4916 break;
4917 }
4918
4919 /* The ranges might be also adjacent between the maximum and
4920 minimum values of the given type. For
4921 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4922 return + [x + 1, y - 1]. */
4923 if (low0 == 0 && high1 == 0)
4924 {
4925 low = range_successor (high0);
4926 high = range_predecessor (low1);
4927 if (low == 0 || high == 0)
4928 return 0;
4929
4930 in_p = 1;
4931 }
4932 else
4933 return 0;
4934 }
4935 }
4936 else if (subset)
4937 in_p = 0, low = low0, high = high0;
4938 else
4939 in_p = 0, low = low0, high = high1;
4940 }
4941
4942 *pin_p = in_p, *plow = low, *phigh = high;
4943 return 1;
4944 }
4945 \f
4946
4947 /* Subroutine of fold, looking inside expressions of the form
4948 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4949 of the COND_EXPR. This function is being used also to optimize
4950 A op B ? C : A, by reversing the comparison first.
4951
4952 Return a folded expression whose code is not a COND_EXPR
4953 anymore, or NULL_TREE if no folding opportunity is found. */
4954
4955 static tree
4956 fold_cond_expr_with_comparison (location_t loc, tree type,
4957 tree arg0, tree arg1, tree arg2)
4958 {
4959 enum tree_code comp_code = TREE_CODE (arg0);
4960 tree arg00 = TREE_OPERAND (arg0, 0);
4961 tree arg01 = TREE_OPERAND (arg0, 1);
4962 tree arg1_type = TREE_TYPE (arg1);
4963 tree tem;
4964
4965 STRIP_NOPS (arg1);
4966 STRIP_NOPS (arg2);
4967
4968 /* If we have A op 0 ? A : -A, consider applying the following
4969 transformations:
4970
4971 A == 0? A : -A same as -A
4972 A != 0? A : -A same as A
4973 A >= 0? A : -A same as abs (A)
4974 A > 0? A : -A same as abs (A)
4975 A <= 0? A : -A same as -abs (A)
4976 A < 0? A : -A same as -abs (A)
4977
4978 None of these transformations work for modes with signed
4979 zeros. If A is +/-0, the first two transformations will
4980 change the sign of the result (from +0 to -0, or vice
4981 versa). The last four will fix the sign of the result,
4982 even though the original expressions could be positive or
4983 negative, depending on the sign of A.
4984
4985 Note that all these transformations are correct if A is
4986 NaN, since the two alternatives (A and -A) are also NaNs. */
4987 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4988 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4989 ? real_zerop (arg01)
4990 : integer_zerop (arg01))
4991 && ((TREE_CODE (arg2) == NEGATE_EXPR
4992 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4993 /* In the case that A is of the form X-Y, '-A' (arg2) may
4994 have already been folded to Y-X, check for that. */
4995 || (TREE_CODE (arg1) == MINUS_EXPR
4996 && TREE_CODE (arg2) == MINUS_EXPR
4997 && operand_equal_p (TREE_OPERAND (arg1, 0),
4998 TREE_OPERAND (arg2, 1), 0)
4999 && operand_equal_p (TREE_OPERAND (arg1, 1),
5000 TREE_OPERAND (arg2, 0), 0))))
5001 switch (comp_code)
5002 {
5003 case EQ_EXPR:
5004 case UNEQ_EXPR:
5005 tem = fold_convert_loc (loc, arg1_type, arg1);
5006 return pedantic_non_lvalue_loc (loc,
5007 fold_convert_loc (loc, type,
5008 negate_expr (tem)));
5009 case NE_EXPR:
5010 case LTGT_EXPR:
5011 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5012 case UNGE_EXPR:
5013 case UNGT_EXPR:
5014 if (flag_trapping_math)
5015 break;
5016 /* Fall through. */
5017 case GE_EXPR:
5018 case GT_EXPR:
5019 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5020 break;
5021 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5022 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5023 case UNLE_EXPR:
5024 case UNLT_EXPR:
5025 if (flag_trapping_math)
5026 break;
5027 case LE_EXPR:
5028 case LT_EXPR:
5029 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5030 break;
5031 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5032 return negate_expr (fold_convert_loc (loc, type, tem));
5033 default:
5034 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5035 break;
5036 }
5037
5038 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5039 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5040 both transformations are correct when A is NaN: A != 0
5041 is then true, and A == 0 is false. */
5042
5043 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5044 && integer_zerop (arg01) && integer_zerop (arg2))
5045 {
5046 if (comp_code == NE_EXPR)
5047 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5048 else if (comp_code == EQ_EXPR)
5049 return build_zero_cst (type);
5050 }
5051
5052 /* Try some transformations of A op B ? A : B.
5053
5054 A == B? A : B same as B
5055 A != B? A : B same as A
5056 A >= B? A : B same as max (A, B)
5057 A > B? A : B same as max (B, A)
5058 A <= B? A : B same as min (A, B)
5059 A < B? A : B same as min (B, A)
5060
5061 As above, these transformations don't work in the presence
5062 of signed zeros. For example, if A and B are zeros of
5063 opposite sign, the first two transformations will change
5064 the sign of the result. In the last four, the original
5065 expressions give different results for (A=+0, B=-0) and
5066 (A=-0, B=+0), but the transformed expressions do not.
5067
5068 The first two transformations are correct if either A or B
5069 is a NaN. In the first transformation, the condition will
5070 be false, and B will indeed be chosen. In the case of the
5071 second transformation, the condition A != B will be true,
5072 and A will be chosen.
5073
5074 The conversions to max() and min() are not correct if B is
5075 a number and A is not. The conditions in the original
5076 expressions will be false, so all four give B. The min()
5077 and max() versions would give a NaN instead. */
5078 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5079 && operand_equal_for_comparison_p (arg01, arg2, arg00)
5080 /* Avoid these transformations if the COND_EXPR may be used
5081 as an lvalue in the C++ front-end. PR c++/19199. */
5082 && (in_gimple_form
5083 || VECTOR_TYPE_P (type)
5084 || (! lang_GNU_CXX ()
5085 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5086 || ! maybe_lvalue_p (arg1)
5087 || ! maybe_lvalue_p (arg2)))
5088 {
5089 tree comp_op0 = arg00;
5090 tree comp_op1 = arg01;
5091 tree comp_type = TREE_TYPE (comp_op0);
5092
5093 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
5094 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
5095 {
5096 comp_type = type;
5097 comp_op0 = arg1;
5098 comp_op1 = arg2;
5099 }
5100
5101 switch (comp_code)
5102 {
5103 case EQ_EXPR:
5104 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
5105 case NE_EXPR:
5106 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5107 case LE_EXPR:
5108 case LT_EXPR:
5109 case UNLE_EXPR:
5110 case UNLT_EXPR:
5111 /* In C++ a ?: expression can be an lvalue, so put the
5112 operand which will be used if they are equal first
5113 so that we can convert this back to the
5114 corresponding COND_EXPR. */
5115 if (!HONOR_NANS (arg1))
5116 {
5117 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5118 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5119 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5120 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5121 : fold_build2_loc (loc, MIN_EXPR, comp_type,
5122 comp_op1, comp_op0);
5123 return pedantic_non_lvalue_loc (loc,
5124 fold_convert_loc (loc, type, tem));
5125 }
5126 break;
5127 case GE_EXPR:
5128 case GT_EXPR:
5129 case UNGE_EXPR:
5130 case UNGT_EXPR:
5131 if (!HONOR_NANS (arg1))
5132 {
5133 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5134 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5135 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5136 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5137 : fold_build2_loc (loc, MAX_EXPR, comp_type,
5138 comp_op1, comp_op0);
5139 return pedantic_non_lvalue_loc (loc,
5140 fold_convert_loc (loc, type, tem));
5141 }
5142 break;
5143 case UNEQ_EXPR:
5144 if (!HONOR_NANS (arg1))
5145 return pedantic_non_lvalue_loc (loc,
5146 fold_convert_loc (loc, type, arg2));
5147 break;
5148 case LTGT_EXPR:
5149 if (!HONOR_NANS (arg1))
5150 return pedantic_non_lvalue_loc (loc,
5151 fold_convert_loc (loc, type, arg1));
5152 break;
5153 default:
5154 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5155 break;
5156 }
5157 }
5158
5159 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5160 we might still be able to simplify this. For example,
5161 if C1 is one less or one more than C2, this might have started
5162 out as a MIN or MAX and been transformed by this function.
5163 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5164
5165 if (INTEGRAL_TYPE_P (type)
5166 && TREE_CODE (arg01) == INTEGER_CST
5167 && TREE_CODE (arg2) == INTEGER_CST)
5168 switch (comp_code)
5169 {
5170 case EQ_EXPR:
5171 if (TREE_CODE (arg1) == INTEGER_CST)
5172 break;
5173 /* We can replace A with C1 in this case. */
5174 arg1 = fold_convert_loc (loc, type, arg01);
5175 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
5176
5177 case LT_EXPR:
5178 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
5179 MIN_EXPR, to preserve the signedness of the comparison. */
5180 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5181 OEP_ONLY_CONST)
5182 && operand_equal_p (arg01,
5183 const_binop (PLUS_EXPR, arg2,
5184 build_int_cst (type, 1)),
5185 OEP_ONLY_CONST))
5186 {
5187 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5188 fold_convert_loc (loc, TREE_TYPE (arg00),
5189 arg2));
5190 return pedantic_non_lvalue_loc (loc,
5191 fold_convert_loc (loc, type, tem));
5192 }
5193 break;
5194
5195 case LE_EXPR:
5196 /* If C1 is C2 - 1, this is min(A, C2), with the same care
5197 as above. */
5198 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5199 OEP_ONLY_CONST)
5200 && operand_equal_p (arg01,
5201 const_binop (MINUS_EXPR, arg2,
5202 build_int_cst (type, 1)),
5203 OEP_ONLY_CONST))
5204 {
5205 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5206 fold_convert_loc (loc, TREE_TYPE (arg00),
5207 arg2));
5208 return pedantic_non_lvalue_loc (loc,
5209 fold_convert_loc (loc, type, tem));
5210 }
5211 break;
5212
5213 case GT_EXPR:
5214 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
5215 MAX_EXPR, to preserve the signedness of the comparison. */
5216 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5217 OEP_ONLY_CONST)
5218 && operand_equal_p (arg01,
5219 const_binop (MINUS_EXPR, arg2,
5220 build_int_cst (type, 1)),
5221 OEP_ONLY_CONST))
5222 {
5223 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5224 fold_convert_loc (loc, TREE_TYPE (arg00),
5225 arg2));
5226 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5227 }
5228 break;
5229
5230 case GE_EXPR:
5231 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
5232 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5233 OEP_ONLY_CONST)
5234 && operand_equal_p (arg01,
5235 const_binop (PLUS_EXPR, arg2,
5236 build_int_cst (type, 1)),
5237 OEP_ONLY_CONST))
5238 {
5239 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5240 fold_convert_loc (loc, TREE_TYPE (arg00),
5241 arg2));
5242 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5243 }
5244 break;
5245 case NE_EXPR:
5246 break;
5247 default:
5248 gcc_unreachable ();
5249 }
5250
5251 return NULL_TREE;
5252 }
5253
5254
5255 \f
5256 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5257 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5258 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5259 false) >= 2)
5260 #endif
5261
5262 /* EXP is some logical combination of boolean tests. See if we can
5263 merge it into some range test. Return the new tree if so. */
5264
5265 static tree
5266 fold_range_test (location_t loc, enum tree_code code, tree type,
5267 tree op0, tree op1)
5268 {
5269 int or_op = (code == TRUTH_ORIF_EXPR
5270 || code == TRUTH_OR_EXPR);
5271 int in0_p, in1_p, in_p;
5272 tree low0, low1, low, high0, high1, high;
5273 bool strict_overflow_p = false;
5274 tree tem, lhs, rhs;
5275 const char * const warnmsg = G_("assuming signed overflow does not occur "
5276 "when simplifying range test");
5277
5278 if (!INTEGRAL_TYPE_P (type))
5279 return 0;
5280
5281 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5282 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5283
5284 /* If this is an OR operation, invert both sides; we will invert
5285 again at the end. */
5286 if (or_op)
5287 in0_p = ! in0_p, in1_p = ! in1_p;
5288
5289 /* If both expressions are the same, if we can merge the ranges, and we
5290 can build the range test, return it or it inverted. If one of the
5291 ranges is always true or always false, consider it to be the same
5292 expression as the other. */
5293 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5294 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5295 in1_p, low1, high1)
5296 && 0 != (tem = (build_range_check (loc, type,
5297 lhs != 0 ? lhs
5298 : rhs != 0 ? rhs : integer_zero_node,
5299 in_p, low, high))))
5300 {
5301 if (strict_overflow_p)
5302 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5303 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5304 }
5305
5306 /* On machines where the branch cost is expensive, if this is a
5307 short-circuited branch and the underlying object on both sides
5308 is the same, make a non-short-circuit operation. */
5309 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5310 && lhs != 0 && rhs != 0
5311 && (code == TRUTH_ANDIF_EXPR
5312 || code == TRUTH_ORIF_EXPR)
5313 && operand_equal_p (lhs, rhs, 0))
5314 {
5315 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5316 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5317 which cases we can't do this. */
5318 if (simple_operand_p (lhs))
5319 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5320 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5321 type, op0, op1);
5322
5323 else if (!lang_hooks.decls.global_bindings_p ()
5324 && !CONTAINS_PLACEHOLDER_P (lhs))
5325 {
5326 tree common = save_expr (lhs);
5327
5328 if (0 != (lhs = build_range_check (loc, type, common,
5329 or_op ? ! in0_p : in0_p,
5330 low0, high0))
5331 && (0 != (rhs = build_range_check (loc, type, common,
5332 or_op ? ! in1_p : in1_p,
5333 low1, high1))))
5334 {
5335 if (strict_overflow_p)
5336 fold_overflow_warning (warnmsg,
5337 WARN_STRICT_OVERFLOW_COMPARISON);
5338 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5339 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5340 type, lhs, rhs);
5341 }
5342 }
5343 }
5344
5345 return 0;
5346 }
5347 \f
5348 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5349 bit value. Arrange things so the extra bits will be set to zero if and
5350 only if C is signed-extended to its full width. If MASK is nonzero,
5351 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5352
5353 static tree
5354 unextend (tree c, int p, int unsignedp, tree mask)
5355 {
5356 tree type = TREE_TYPE (c);
5357 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5358 tree temp;
5359
5360 if (p == modesize || unsignedp)
5361 return c;
5362
5363 /* We work by getting just the sign bit into the low-order bit, then
5364 into the high-order bit, then sign-extend. We then XOR that value
5365 with C. */
5366 temp = build_int_cst (TREE_TYPE (c), wi::extract_uhwi (c, p - 1, 1));
5367
5368 /* We must use a signed type in order to get an arithmetic right shift.
5369 However, we must also avoid introducing accidental overflows, so that
5370 a subsequent call to integer_zerop will work. Hence we must
5371 do the type conversion here. At this point, the constant is either
5372 zero or one, and the conversion to a signed type can never overflow.
5373 We could get an overflow if this conversion is done anywhere else. */
5374 if (TYPE_UNSIGNED (type))
5375 temp = fold_convert (signed_type_for (type), temp);
5376
5377 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5378 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5379 if (mask != 0)
5380 temp = const_binop (BIT_AND_EXPR, temp,
5381 fold_convert (TREE_TYPE (c), mask));
5382 /* If necessary, convert the type back to match the type of C. */
5383 if (TYPE_UNSIGNED (type))
5384 temp = fold_convert (type, temp);
5385
5386 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5387 }
5388 \f
5389 /* For an expression that has the form
5390 (A && B) || ~B
5391 or
5392 (A || B) && ~B,
5393 we can drop one of the inner expressions and simplify to
5394 A || ~B
5395 or
5396 A && ~B
5397 LOC is the location of the resulting expression. OP is the inner
5398 logical operation; the left-hand side in the examples above, while CMPOP
5399 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5400 removing a condition that guards another, as in
5401 (A != NULL && A->...) || A == NULL
5402 which we must not transform. If RHS_ONLY is true, only eliminate the
5403 right-most operand of the inner logical operation. */
5404
5405 static tree
5406 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5407 bool rhs_only)
5408 {
5409 tree type = TREE_TYPE (cmpop);
5410 enum tree_code code = TREE_CODE (cmpop);
5411 enum tree_code truthop_code = TREE_CODE (op);
5412 tree lhs = TREE_OPERAND (op, 0);
5413 tree rhs = TREE_OPERAND (op, 1);
5414 tree orig_lhs = lhs, orig_rhs = rhs;
5415 enum tree_code rhs_code = TREE_CODE (rhs);
5416 enum tree_code lhs_code = TREE_CODE (lhs);
5417 enum tree_code inv_code;
5418
5419 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5420 return NULL_TREE;
5421
5422 if (TREE_CODE_CLASS (code) != tcc_comparison)
5423 return NULL_TREE;
5424
5425 if (rhs_code == truthop_code)
5426 {
5427 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5428 if (newrhs != NULL_TREE)
5429 {
5430 rhs = newrhs;
5431 rhs_code = TREE_CODE (rhs);
5432 }
5433 }
5434 if (lhs_code == truthop_code && !rhs_only)
5435 {
5436 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5437 if (newlhs != NULL_TREE)
5438 {
5439 lhs = newlhs;
5440 lhs_code = TREE_CODE (lhs);
5441 }
5442 }
5443
5444 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
5445 if (inv_code == rhs_code
5446 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5447 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5448 return lhs;
5449 if (!rhs_only && inv_code == lhs_code
5450 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5451 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5452 return rhs;
5453 if (rhs != orig_rhs || lhs != orig_lhs)
5454 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5455 lhs, rhs);
5456 return NULL_TREE;
5457 }
5458
5459 /* Find ways of folding logical expressions of LHS and RHS:
5460 Try to merge two comparisons to the same innermost item.
5461 Look for range tests like "ch >= '0' && ch <= '9'".
5462 Look for combinations of simple terms on machines with expensive branches
5463 and evaluate the RHS unconditionally.
5464
5465 For example, if we have p->a == 2 && p->b == 4 and we can make an
5466 object large enough to span both A and B, we can do this with a comparison
5467 against the object ANDed with the a mask.
5468
5469 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5470 operations to do this with one comparison.
5471
5472 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5473 function and the one above.
5474
5475 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5476 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5477
5478 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5479 two operands.
5480
5481 We return the simplified tree or 0 if no optimization is possible. */
5482
5483 static tree
5484 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5485 tree lhs, tree rhs)
5486 {
5487 /* If this is the "or" of two comparisons, we can do something if
5488 the comparisons are NE_EXPR. If this is the "and", we can do something
5489 if the comparisons are EQ_EXPR. I.e.,
5490 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5491
5492 WANTED_CODE is this operation code. For single bit fields, we can
5493 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5494 comparison for one-bit fields. */
5495
5496 enum tree_code wanted_code;
5497 enum tree_code lcode, rcode;
5498 tree ll_arg, lr_arg, rl_arg, rr_arg;
5499 tree ll_inner, lr_inner, rl_inner, rr_inner;
5500 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5501 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5502 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5503 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5504 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5505 int ll_reversep, lr_reversep, rl_reversep, rr_reversep;
5506 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5507 machine_mode lnmode, rnmode;
5508 tree ll_mask, lr_mask, rl_mask, rr_mask;
5509 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5510 tree l_const, r_const;
5511 tree lntype, rntype, result;
5512 HOST_WIDE_INT first_bit, end_bit;
5513 int volatilep;
5514
5515 /* Start by getting the comparison codes. Fail if anything is volatile.
5516 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5517 it were surrounded with a NE_EXPR. */
5518
5519 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5520 return 0;
5521
5522 lcode = TREE_CODE (lhs);
5523 rcode = TREE_CODE (rhs);
5524
5525 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5526 {
5527 lhs = build2 (NE_EXPR, truth_type, lhs,
5528 build_int_cst (TREE_TYPE (lhs), 0));
5529 lcode = NE_EXPR;
5530 }
5531
5532 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5533 {
5534 rhs = build2 (NE_EXPR, truth_type, rhs,
5535 build_int_cst (TREE_TYPE (rhs), 0));
5536 rcode = NE_EXPR;
5537 }
5538
5539 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5540 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5541 return 0;
5542
5543 ll_arg = TREE_OPERAND (lhs, 0);
5544 lr_arg = TREE_OPERAND (lhs, 1);
5545 rl_arg = TREE_OPERAND (rhs, 0);
5546 rr_arg = TREE_OPERAND (rhs, 1);
5547
5548 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5549 if (simple_operand_p (ll_arg)
5550 && simple_operand_p (lr_arg))
5551 {
5552 if (operand_equal_p (ll_arg, rl_arg, 0)
5553 && operand_equal_p (lr_arg, rr_arg, 0))
5554 {
5555 result = combine_comparisons (loc, code, lcode, rcode,
5556 truth_type, ll_arg, lr_arg);
5557 if (result)
5558 return result;
5559 }
5560 else if (operand_equal_p (ll_arg, rr_arg, 0)
5561 && operand_equal_p (lr_arg, rl_arg, 0))
5562 {
5563 result = combine_comparisons (loc, code, lcode,
5564 swap_tree_comparison (rcode),
5565 truth_type, ll_arg, lr_arg);
5566 if (result)
5567 return result;
5568 }
5569 }
5570
5571 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5572 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5573
5574 /* If the RHS can be evaluated unconditionally and its operands are
5575 simple, it wins to evaluate the RHS unconditionally on machines
5576 with expensive branches. In this case, this isn't a comparison
5577 that can be merged. */
5578
5579 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5580 false) >= 2
5581 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5582 && simple_operand_p (rl_arg)
5583 && simple_operand_p (rr_arg))
5584 {
5585 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5586 if (code == TRUTH_OR_EXPR
5587 && lcode == NE_EXPR && integer_zerop (lr_arg)
5588 && rcode == NE_EXPR && integer_zerop (rr_arg)
5589 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5590 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5591 return build2_loc (loc, NE_EXPR, truth_type,
5592 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5593 ll_arg, rl_arg),
5594 build_int_cst (TREE_TYPE (ll_arg), 0));
5595
5596 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5597 if (code == TRUTH_AND_EXPR
5598 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5599 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5600 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5601 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5602 return build2_loc (loc, EQ_EXPR, truth_type,
5603 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5604 ll_arg, rl_arg),
5605 build_int_cst (TREE_TYPE (ll_arg), 0));
5606 }
5607
5608 /* See if the comparisons can be merged. Then get all the parameters for
5609 each side. */
5610
5611 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5612 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5613 return 0;
5614
5615 ll_reversep = lr_reversep = rl_reversep = rr_reversep = 0;
5616 volatilep = 0;
5617 ll_inner = decode_field_reference (loc, ll_arg,
5618 &ll_bitsize, &ll_bitpos, &ll_mode,
5619 &ll_unsignedp, &ll_reversep, &volatilep,
5620 &ll_mask, &ll_and_mask);
5621 lr_inner = decode_field_reference (loc, lr_arg,
5622 &lr_bitsize, &lr_bitpos, &lr_mode,
5623 &lr_unsignedp, &lr_reversep, &volatilep,
5624 &lr_mask, &lr_and_mask);
5625 rl_inner = decode_field_reference (loc, rl_arg,
5626 &rl_bitsize, &rl_bitpos, &rl_mode,
5627 &rl_unsignedp, &rl_reversep, &volatilep,
5628 &rl_mask, &rl_and_mask);
5629 rr_inner = decode_field_reference (loc, rr_arg,
5630 &rr_bitsize, &rr_bitpos, &rr_mode,
5631 &rr_unsignedp, &rr_reversep, &volatilep,
5632 &rr_mask, &rr_and_mask);
5633
5634 /* It must be true that the inner operation on the lhs of each
5635 comparison must be the same if we are to be able to do anything.
5636 Then see if we have constants. If not, the same must be true for
5637 the rhs's. */
5638 if (volatilep
5639 || ll_reversep != rl_reversep
5640 || ll_inner == 0 || rl_inner == 0
5641 || ! operand_equal_p (ll_inner, rl_inner, 0))
5642 return 0;
5643
5644 if (TREE_CODE (lr_arg) == INTEGER_CST
5645 && TREE_CODE (rr_arg) == INTEGER_CST)
5646 {
5647 l_const = lr_arg, r_const = rr_arg;
5648 lr_reversep = ll_reversep;
5649 }
5650 else if (lr_reversep != rr_reversep
5651 || lr_inner == 0 || rr_inner == 0
5652 || ! operand_equal_p (lr_inner, rr_inner, 0))
5653 return 0;
5654 else
5655 l_const = r_const = 0;
5656
5657 /* If either comparison code is not correct for our logical operation,
5658 fail. However, we can convert a one-bit comparison against zero into
5659 the opposite comparison against that bit being set in the field. */
5660
5661 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5662 if (lcode != wanted_code)
5663 {
5664 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5665 {
5666 /* Make the left operand unsigned, since we are only interested
5667 in the value of one bit. Otherwise we are doing the wrong
5668 thing below. */
5669 ll_unsignedp = 1;
5670 l_const = ll_mask;
5671 }
5672 else
5673 return 0;
5674 }
5675
5676 /* This is analogous to the code for l_const above. */
5677 if (rcode != wanted_code)
5678 {
5679 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5680 {
5681 rl_unsignedp = 1;
5682 r_const = rl_mask;
5683 }
5684 else
5685 return 0;
5686 }
5687
5688 /* See if we can find a mode that contains both fields being compared on
5689 the left. If we can't, fail. Otherwise, update all constants and masks
5690 to be relative to a field of that size. */
5691 first_bit = MIN (ll_bitpos, rl_bitpos);
5692 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5693 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5694 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5695 volatilep);
5696 if (lnmode == VOIDmode)
5697 return 0;
5698
5699 lnbitsize = GET_MODE_BITSIZE (lnmode);
5700 lnbitpos = first_bit & ~ (lnbitsize - 1);
5701 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5702 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5703
5704 if (ll_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5705 {
5706 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5707 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5708 }
5709
5710 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5711 size_int (xll_bitpos));
5712 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5713 size_int (xrl_bitpos));
5714
5715 if (l_const)
5716 {
5717 l_const = fold_convert_loc (loc, lntype, l_const);
5718 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5719 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5720 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5721 fold_build1_loc (loc, BIT_NOT_EXPR,
5722 lntype, ll_mask))))
5723 {
5724 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5725
5726 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5727 }
5728 }
5729 if (r_const)
5730 {
5731 r_const = fold_convert_loc (loc, lntype, r_const);
5732 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5733 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5734 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5735 fold_build1_loc (loc, BIT_NOT_EXPR,
5736 lntype, rl_mask))))
5737 {
5738 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5739
5740 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5741 }
5742 }
5743
5744 /* If the right sides are not constant, do the same for it. Also,
5745 disallow this optimization if a size or signedness mismatch occurs
5746 between the left and right sides. */
5747 if (l_const == 0)
5748 {
5749 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5750 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5751 /* Make sure the two fields on the right
5752 correspond to the left without being swapped. */
5753 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5754 return 0;
5755
5756 first_bit = MIN (lr_bitpos, rr_bitpos);
5757 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5758 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5759 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5760 volatilep);
5761 if (rnmode == VOIDmode)
5762 return 0;
5763
5764 rnbitsize = GET_MODE_BITSIZE (rnmode);
5765 rnbitpos = first_bit & ~ (rnbitsize - 1);
5766 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5767 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5768
5769 if (lr_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5770 {
5771 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5772 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5773 }
5774
5775 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5776 rntype, lr_mask),
5777 size_int (xlr_bitpos));
5778 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5779 rntype, rr_mask),
5780 size_int (xrr_bitpos));
5781
5782 /* Make a mask that corresponds to both fields being compared.
5783 Do this for both items being compared. If the operands are the
5784 same size and the bits being compared are in the same position
5785 then we can do this by masking both and comparing the masked
5786 results. */
5787 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5788 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5789 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5790 {
5791 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5792 ll_unsignedp || rl_unsignedp, ll_reversep);
5793 if (! all_ones_mask_p (ll_mask, lnbitsize))
5794 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5795
5796 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5797 lr_unsignedp || rr_unsignedp, lr_reversep);
5798 if (! all_ones_mask_p (lr_mask, rnbitsize))
5799 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5800
5801 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5802 }
5803
5804 /* There is still another way we can do something: If both pairs of
5805 fields being compared are adjacent, we may be able to make a wider
5806 field containing them both.
5807
5808 Note that we still must mask the lhs/rhs expressions. Furthermore,
5809 the mask must be shifted to account for the shift done by
5810 make_bit_field_ref. */
5811 if ((ll_bitsize + ll_bitpos == rl_bitpos
5812 && lr_bitsize + lr_bitpos == rr_bitpos)
5813 || (ll_bitpos == rl_bitpos + rl_bitsize
5814 && lr_bitpos == rr_bitpos + rr_bitsize))
5815 {
5816 tree type;
5817
5818 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5819 ll_bitsize + rl_bitsize,
5820 MIN (ll_bitpos, rl_bitpos),
5821 ll_unsignedp, ll_reversep);
5822 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5823 lr_bitsize + rr_bitsize,
5824 MIN (lr_bitpos, rr_bitpos),
5825 lr_unsignedp, lr_reversep);
5826
5827 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5828 size_int (MIN (xll_bitpos, xrl_bitpos)));
5829 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5830 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5831
5832 /* Convert to the smaller type before masking out unwanted bits. */
5833 type = lntype;
5834 if (lntype != rntype)
5835 {
5836 if (lnbitsize > rnbitsize)
5837 {
5838 lhs = fold_convert_loc (loc, rntype, lhs);
5839 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5840 type = rntype;
5841 }
5842 else if (lnbitsize < rnbitsize)
5843 {
5844 rhs = fold_convert_loc (loc, lntype, rhs);
5845 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5846 type = lntype;
5847 }
5848 }
5849
5850 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5851 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5852
5853 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5854 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5855
5856 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5857 }
5858
5859 return 0;
5860 }
5861
5862 /* Handle the case of comparisons with constants. If there is something in
5863 common between the masks, those bits of the constants must be the same.
5864 If not, the condition is always false. Test for this to avoid generating
5865 incorrect code below. */
5866 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5867 if (! integer_zerop (result)
5868 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5869 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5870 {
5871 if (wanted_code == NE_EXPR)
5872 {
5873 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5874 return constant_boolean_node (true, truth_type);
5875 }
5876 else
5877 {
5878 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5879 return constant_boolean_node (false, truth_type);
5880 }
5881 }
5882
5883 /* Construct the expression we will return. First get the component
5884 reference we will make. Unless the mask is all ones the width of
5885 that field, perform the mask operation. Then compare with the
5886 merged constant. */
5887 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5888 ll_unsignedp || rl_unsignedp, ll_reversep);
5889
5890 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5891 if (! all_ones_mask_p (ll_mask, lnbitsize))
5892 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5893
5894 return build2_loc (loc, wanted_code, truth_type, result,
5895 const_binop (BIT_IOR_EXPR, l_const, r_const));
5896 }
5897 \f
5898 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5899 constant. */
5900
5901 static tree
5902 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5903 tree op0, tree op1)
5904 {
5905 tree arg0 = op0;
5906 enum tree_code op_code;
5907 tree comp_const;
5908 tree minmax_const;
5909 int consts_equal, consts_lt;
5910 tree inner;
5911
5912 STRIP_SIGN_NOPS (arg0);
5913
5914 op_code = TREE_CODE (arg0);
5915 minmax_const = TREE_OPERAND (arg0, 1);
5916 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5917 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5918 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5919 inner = TREE_OPERAND (arg0, 0);
5920
5921 /* If something does not permit us to optimize, return the original tree. */
5922 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5923 || TREE_CODE (comp_const) != INTEGER_CST
5924 || TREE_OVERFLOW (comp_const)
5925 || TREE_CODE (minmax_const) != INTEGER_CST
5926 || TREE_OVERFLOW (minmax_const))
5927 return NULL_TREE;
5928
5929 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5930 and GT_EXPR, doing the rest with recursive calls using logical
5931 simplifications. */
5932 switch (code)
5933 {
5934 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5935 {
5936 tree tem
5937 = optimize_minmax_comparison (loc,
5938 invert_tree_comparison (code, false),
5939 type, op0, op1);
5940 if (tem)
5941 return invert_truthvalue_loc (loc, tem);
5942 return NULL_TREE;
5943 }
5944
5945 case GE_EXPR:
5946 return
5947 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5948 optimize_minmax_comparison
5949 (loc, EQ_EXPR, type, arg0, comp_const),
5950 optimize_minmax_comparison
5951 (loc, GT_EXPR, type, arg0, comp_const));
5952
5953 case EQ_EXPR:
5954 if (op_code == MAX_EXPR && consts_equal)
5955 /* MAX (X, 0) == 0 -> X <= 0 */
5956 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5957
5958 else if (op_code == MAX_EXPR && consts_lt)
5959 /* MAX (X, 0) == 5 -> X == 5 */
5960 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5961
5962 else if (op_code == MAX_EXPR)
5963 /* MAX (X, 0) == -1 -> false */
5964 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5965
5966 else if (consts_equal)
5967 /* MIN (X, 0) == 0 -> X >= 0 */
5968 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5969
5970 else if (consts_lt)
5971 /* MIN (X, 0) == 5 -> false */
5972 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5973
5974 else
5975 /* MIN (X, 0) == -1 -> X == -1 */
5976 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5977
5978 case GT_EXPR:
5979 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5980 /* MAX (X, 0) > 0 -> X > 0
5981 MAX (X, 0) > 5 -> X > 5 */
5982 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5983
5984 else if (op_code == MAX_EXPR)
5985 /* MAX (X, 0) > -1 -> true */
5986 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5987
5988 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5989 /* MIN (X, 0) > 0 -> false
5990 MIN (X, 0) > 5 -> false */
5991 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5992
5993 else
5994 /* MIN (X, 0) > -1 -> X > -1 */
5995 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5996
5997 default:
5998 return NULL_TREE;
5999 }
6000 }
6001 \f
6002 /* T is an integer expression that is being multiplied, divided, or taken a
6003 modulus (CODE says which and what kind of divide or modulus) by a
6004 constant C. See if we can eliminate that operation by folding it with
6005 other operations already in T. WIDE_TYPE, if non-null, is a type that
6006 should be used for the computation if wider than our type.
6007
6008 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6009 (X * 2) + (Y * 4). We must, however, be assured that either the original
6010 expression would not overflow or that overflow is undefined for the type
6011 in the language in question.
6012
6013 If we return a non-null expression, it is an equivalent form of the
6014 original computation, but need not be in the original type.
6015
6016 We set *STRICT_OVERFLOW_P to true if the return values depends on
6017 signed overflow being undefined. Otherwise we do not change
6018 *STRICT_OVERFLOW_P. */
6019
6020 static tree
6021 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6022 bool *strict_overflow_p)
6023 {
6024 /* To avoid exponential search depth, refuse to allow recursion past
6025 three levels. Beyond that (1) it's highly unlikely that we'll find
6026 something interesting and (2) we've probably processed it before
6027 when we built the inner expression. */
6028
6029 static int depth;
6030 tree ret;
6031
6032 if (depth > 3)
6033 return NULL;
6034
6035 depth++;
6036 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6037 depth--;
6038
6039 return ret;
6040 }
6041
6042 static tree
6043 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6044 bool *strict_overflow_p)
6045 {
6046 tree type = TREE_TYPE (t);
6047 enum tree_code tcode = TREE_CODE (t);
6048 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
6049 > GET_MODE_SIZE (TYPE_MODE (type)))
6050 ? wide_type : type);
6051 tree t1, t2;
6052 int same_p = tcode == code;
6053 tree op0 = NULL_TREE, op1 = NULL_TREE;
6054 bool sub_strict_overflow_p;
6055
6056 /* Don't deal with constants of zero here; they confuse the code below. */
6057 if (integer_zerop (c))
6058 return NULL_TREE;
6059
6060 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6061 op0 = TREE_OPERAND (t, 0);
6062
6063 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6064 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6065
6066 /* Note that we need not handle conditional operations here since fold
6067 already handles those cases. So just do arithmetic here. */
6068 switch (tcode)
6069 {
6070 case INTEGER_CST:
6071 /* For a constant, we can always simplify if we are a multiply
6072 or (for divide and modulus) if it is a multiple of our constant. */
6073 if (code == MULT_EXPR
6074 || wi::multiple_of_p (t, c, TYPE_SIGN (type)))
6075 {
6076 tree tem = const_binop (code, fold_convert (ctype, t),
6077 fold_convert (ctype, c));
6078 /* If the multiplication overflowed to INT_MIN then we lost sign
6079 information on it and a subsequent multiplication might
6080 spuriously overflow. See PR68142. */
6081 if (TREE_OVERFLOW (tem)
6082 && wi::eq_p (tem, wi::min_value (TYPE_PRECISION (ctype), SIGNED)))
6083 return NULL_TREE;
6084 return tem;
6085 }
6086 break;
6087
6088 CASE_CONVERT: case NON_LVALUE_EXPR:
6089 /* If op0 is an expression ... */
6090 if ((COMPARISON_CLASS_P (op0)
6091 || UNARY_CLASS_P (op0)
6092 || BINARY_CLASS_P (op0)
6093 || VL_EXP_CLASS_P (op0)
6094 || EXPRESSION_CLASS_P (op0))
6095 /* ... and has wrapping overflow, and its type is smaller
6096 than ctype, then we cannot pass through as widening. */
6097 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6098 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0)))
6099 && (TYPE_PRECISION (ctype)
6100 > TYPE_PRECISION (TREE_TYPE (op0))))
6101 /* ... or this is a truncation (t is narrower than op0),
6102 then we cannot pass through this narrowing. */
6103 || (TYPE_PRECISION (type)
6104 < TYPE_PRECISION (TREE_TYPE (op0)))
6105 /* ... or signedness changes for division or modulus,
6106 then we cannot pass through this conversion. */
6107 || (code != MULT_EXPR
6108 && (TYPE_UNSIGNED (ctype)
6109 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6110 /* ... or has undefined overflow while the converted to
6111 type has not, we cannot do the operation in the inner type
6112 as that would introduce undefined overflow. */
6113 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6114 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0)))
6115 && !TYPE_OVERFLOW_UNDEFINED (type))))
6116 break;
6117
6118 /* Pass the constant down and see if we can make a simplification. If
6119 we can, replace this expression with the inner simplification for
6120 possible later conversion to our or some other type. */
6121 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6122 && TREE_CODE (t2) == INTEGER_CST
6123 && !TREE_OVERFLOW (t2)
6124 && (0 != (t1 = extract_muldiv (op0, t2, code,
6125 code == MULT_EXPR
6126 ? ctype : NULL_TREE,
6127 strict_overflow_p))))
6128 return t1;
6129 break;
6130
6131 case ABS_EXPR:
6132 /* If widening the type changes it from signed to unsigned, then we
6133 must avoid building ABS_EXPR itself as unsigned. */
6134 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6135 {
6136 tree cstype = (*signed_type_for) (ctype);
6137 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6138 != 0)
6139 {
6140 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6141 return fold_convert (ctype, t1);
6142 }
6143 break;
6144 }
6145 /* If the constant is negative, we cannot simplify this. */
6146 if (tree_int_cst_sgn (c) == -1)
6147 break;
6148 /* FALLTHROUGH */
6149 case NEGATE_EXPR:
6150 /* For division and modulus, type can't be unsigned, as e.g.
6151 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6152 For signed types, even with wrapping overflow, this is fine. */
6153 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6154 break;
6155 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6156 != 0)
6157 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6158 break;
6159
6160 case MIN_EXPR: case MAX_EXPR:
6161 /* If widening the type changes the signedness, then we can't perform
6162 this optimization as that changes the result. */
6163 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6164 break;
6165
6166 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6167 sub_strict_overflow_p = false;
6168 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6169 &sub_strict_overflow_p)) != 0
6170 && (t2 = extract_muldiv (op1, c, code, wide_type,
6171 &sub_strict_overflow_p)) != 0)
6172 {
6173 if (tree_int_cst_sgn (c) < 0)
6174 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6175 if (sub_strict_overflow_p)
6176 *strict_overflow_p = true;
6177 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6178 fold_convert (ctype, t2));
6179 }
6180 break;
6181
6182 case LSHIFT_EXPR: case RSHIFT_EXPR:
6183 /* If the second operand is constant, this is a multiplication
6184 or floor division, by a power of two, so we can treat it that
6185 way unless the multiplier or divisor overflows. Signed
6186 left-shift overflow is implementation-defined rather than
6187 undefined in C90, so do not convert signed left shift into
6188 multiplication. */
6189 if (TREE_CODE (op1) == INTEGER_CST
6190 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6191 /* const_binop may not detect overflow correctly,
6192 so check for it explicitly here. */
6193 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
6194 && 0 != (t1 = fold_convert (ctype,
6195 const_binop (LSHIFT_EXPR,
6196 size_one_node,
6197 op1)))
6198 && !TREE_OVERFLOW (t1))
6199 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6200 ? MULT_EXPR : FLOOR_DIV_EXPR,
6201 ctype,
6202 fold_convert (ctype, op0),
6203 t1),
6204 c, code, wide_type, strict_overflow_p);
6205 break;
6206
6207 case PLUS_EXPR: case MINUS_EXPR:
6208 /* See if we can eliminate the operation on both sides. If we can, we
6209 can return a new PLUS or MINUS. If we can't, the only remaining
6210 cases where we can do anything are if the second operand is a
6211 constant. */
6212 sub_strict_overflow_p = false;
6213 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6214 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6215 if (t1 != 0 && t2 != 0
6216 && (code == MULT_EXPR
6217 /* If not multiplication, we can only do this if both operands
6218 are divisible by c. */
6219 || (multiple_of_p (ctype, op0, c)
6220 && multiple_of_p (ctype, op1, c))))
6221 {
6222 if (sub_strict_overflow_p)
6223 *strict_overflow_p = true;
6224 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6225 fold_convert (ctype, t2));
6226 }
6227
6228 /* If this was a subtraction, negate OP1 and set it to be an addition.
6229 This simplifies the logic below. */
6230 if (tcode == MINUS_EXPR)
6231 {
6232 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6233 /* If OP1 was not easily negatable, the constant may be OP0. */
6234 if (TREE_CODE (op0) == INTEGER_CST)
6235 {
6236 std::swap (op0, op1);
6237 std::swap (t1, t2);
6238 }
6239 }
6240
6241 if (TREE_CODE (op1) != INTEGER_CST)
6242 break;
6243
6244 /* If either OP1 or C are negative, this optimization is not safe for
6245 some of the division and remainder types while for others we need
6246 to change the code. */
6247 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6248 {
6249 if (code == CEIL_DIV_EXPR)
6250 code = FLOOR_DIV_EXPR;
6251 else if (code == FLOOR_DIV_EXPR)
6252 code = CEIL_DIV_EXPR;
6253 else if (code != MULT_EXPR
6254 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6255 break;
6256 }
6257
6258 /* If it's a multiply or a division/modulus operation of a multiple
6259 of our constant, do the operation and verify it doesn't overflow. */
6260 if (code == MULT_EXPR
6261 || wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6262 {
6263 op1 = const_binop (code, fold_convert (ctype, op1),
6264 fold_convert (ctype, c));
6265 /* We allow the constant to overflow with wrapping semantics. */
6266 if (op1 == 0
6267 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6268 break;
6269 }
6270 else
6271 break;
6272
6273 /* If we have an unsigned type, we cannot widen the operation since it
6274 will change the result if the original computation overflowed. */
6275 if (TYPE_UNSIGNED (ctype) && ctype != type)
6276 break;
6277
6278 /* If we were able to eliminate our operation from the first side,
6279 apply our operation to the second side and reform the PLUS. */
6280 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
6281 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
6282
6283 /* The last case is if we are a multiply. In that case, we can
6284 apply the distributive law to commute the multiply and addition
6285 if the multiplication of the constants doesn't overflow
6286 and overflow is defined. With undefined overflow
6287 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
6288 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
6289 return fold_build2 (tcode, ctype,
6290 fold_build2 (code, ctype,
6291 fold_convert (ctype, op0),
6292 fold_convert (ctype, c)),
6293 op1);
6294
6295 break;
6296
6297 case MULT_EXPR:
6298 /* We have a special case here if we are doing something like
6299 (C * 8) % 4 since we know that's zero. */
6300 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6301 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6302 /* If the multiplication can overflow we cannot optimize this. */
6303 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6304 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6305 && wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6306 {
6307 *strict_overflow_p = true;
6308 return omit_one_operand (type, integer_zero_node, op0);
6309 }
6310
6311 /* ... fall through ... */
6312
6313 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6314 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6315 /* If we can extract our operation from the LHS, do so and return a
6316 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6317 do something only if the second operand is a constant. */
6318 if (same_p
6319 && (t1 = extract_muldiv (op0, c, code, wide_type,
6320 strict_overflow_p)) != 0)
6321 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6322 fold_convert (ctype, op1));
6323 else if (tcode == MULT_EXPR && code == MULT_EXPR
6324 && (t1 = extract_muldiv (op1, c, code, wide_type,
6325 strict_overflow_p)) != 0)
6326 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6327 fold_convert (ctype, t1));
6328 else if (TREE_CODE (op1) != INTEGER_CST)
6329 return 0;
6330
6331 /* If these are the same operation types, we can associate them
6332 assuming no overflow. */
6333 if (tcode == code)
6334 {
6335 bool overflow_p = false;
6336 bool overflow_mul_p;
6337 signop sign = TYPE_SIGN (ctype);
6338 wide_int mul = wi::mul (op1, c, sign, &overflow_mul_p);
6339 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6340 if (overflow_mul_p
6341 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6342 overflow_p = true;
6343 if (!overflow_p)
6344 {
6345 mul = wide_int::from (mul, TYPE_PRECISION (ctype),
6346 TYPE_SIGN (TREE_TYPE (op1)));
6347 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6348 wide_int_to_tree (ctype, mul));
6349 }
6350 }
6351
6352 /* If these operations "cancel" each other, we have the main
6353 optimizations of this pass, which occur when either constant is a
6354 multiple of the other, in which case we replace this with either an
6355 operation or CODE or TCODE.
6356
6357 If we have an unsigned type, we cannot do this since it will change
6358 the result if the original computation overflowed. */
6359 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6360 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6361 || (tcode == MULT_EXPR
6362 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6363 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6364 && code != MULT_EXPR)))
6365 {
6366 if (wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6367 {
6368 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6369 *strict_overflow_p = true;
6370 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6371 fold_convert (ctype,
6372 const_binop (TRUNC_DIV_EXPR,
6373 op1, c)));
6374 }
6375 else if (wi::multiple_of_p (c, op1, TYPE_SIGN (type)))
6376 {
6377 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6378 *strict_overflow_p = true;
6379 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6380 fold_convert (ctype,
6381 const_binop (TRUNC_DIV_EXPR,
6382 c, op1)));
6383 }
6384 }
6385 break;
6386
6387 default:
6388 break;
6389 }
6390
6391 return 0;
6392 }
6393 \f
6394 /* Return a node which has the indicated constant VALUE (either 0 or
6395 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6396 and is of the indicated TYPE. */
6397
6398 tree
6399 constant_boolean_node (bool value, tree type)
6400 {
6401 if (type == integer_type_node)
6402 return value ? integer_one_node : integer_zero_node;
6403 else if (type == boolean_type_node)
6404 return value ? boolean_true_node : boolean_false_node;
6405 else if (TREE_CODE (type) == VECTOR_TYPE)
6406 return build_vector_from_val (type,
6407 build_int_cst (TREE_TYPE (type),
6408 value ? -1 : 0));
6409 else
6410 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6411 }
6412
6413
6414 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6415 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6416 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6417 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6418 COND is the first argument to CODE; otherwise (as in the example
6419 given here), it is the second argument. TYPE is the type of the
6420 original expression. Return NULL_TREE if no simplification is
6421 possible. */
6422
6423 static tree
6424 fold_binary_op_with_conditional_arg (location_t loc,
6425 enum tree_code code,
6426 tree type, tree op0, tree op1,
6427 tree cond, tree arg, int cond_first_p)
6428 {
6429 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6430 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6431 tree test, true_value, false_value;
6432 tree lhs = NULL_TREE;
6433 tree rhs = NULL_TREE;
6434 enum tree_code cond_code = COND_EXPR;
6435
6436 if (TREE_CODE (cond) == COND_EXPR
6437 || TREE_CODE (cond) == VEC_COND_EXPR)
6438 {
6439 test = TREE_OPERAND (cond, 0);
6440 true_value = TREE_OPERAND (cond, 1);
6441 false_value = TREE_OPERAND (cond, 2);
6442 /* If this operand throws an expression, then it does not make
6443 sense to try to perform a logical or arithmetic operation
6444 involving it. */
6445 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6446 lhs = true_value;
6447 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6448 rhs = false_value;
6449 }
6450 else
6451 {
6452 tree testtype = TREE_TYPE (cond);
6453 test = cond;
6454 true_value = constant_boolean_node (true, testtype);
6455 false_value = constant_boolean_node (false, testtype);
6456 }
6457
6458 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6459 cond_code = VEC_COND_EXPR;
6460
6461 /* This transformation is only worthwhile if we don't have to wrap ARG
6462 in a SAVE_EXPR and the operation can be simplified without recursing
6463 on at least one of the branches once its pushed inside the COND_EXPR. */
6464 if (!TREE_CONSTANT (arg)
6465 && (TREE_SIDE_EFFECTS (arg)
6466 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6467 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6468 return NULL_TREE;
6469
6470 arg = fold_convert_loc (loc, arg_type, arg);
6471 if (lhs == 0)
6472 {
6473 true_value = fold_convert_loc (loc, cond_type, true_value);
6474 if (cond_first_p)
6475 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6476 else
6477 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6478 }
6479 if (rhs == 0)
6480 {
6481 false_value = fold_convert_loc (loc, cond_type, false_value);
6482 if (cond_first_p)
6483 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6484 else
6485 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6486 }
6487
6488 /* Check that we have simplified at least one of the branches. */
6489 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6490 return NULL_TREE;
6491
6492 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6493 }
6494
6495 \f
6496 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6497
6498 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6499 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6500 ADDEND is the same as X.
6501
6502 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6503 and finite. The problematic cases are when X is zero, and its mode
6504 has signed zeros. In the case of rounding towards -infinity,
6505 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6506 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6507
6508 bool
6509 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6510 {
6511 if (!real_zerop (addend))
6512 return false;
6513
6514 /* Don't allow the fold with -fsignaling-nans. */
6515 if (HONOR_SNANS (element_mode (type)))
6516 return false;
6517
6518 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6519 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
6520 return true;
6521
6522 /* In a vector or complex, we would need to check the sign of all zeros. */
6523 if (TREE_CODE (addend) != REAL_CST)
6524 return false;
6525
6526 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6527 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6528 negate = !negate;
6529
6530 /* The mode has signed zeros, and we have to honor their sign.
6531 In this situation, there is only one case we can return true for.
6532 X - 0 is the same as X unless rounding towards -infinity is
6533 supported. */
6534 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type));
6535 }
6536
6537 /* Subroutine of fold() that optimizes comparisons of a division by
6538 a nonzero integer constant against an integer constant, i.e.
6539 X/C1 op C2.
6540
6541 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6542 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6543 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6544
6545 The function returns the constant folded tree if a simplification
6546 can be made, and NULL_TREE otherwise. */
6547
6548 static tree
6549 fold_div_compare (location_t loc,
6550 enum tree_code code, tree type, tree arg0, tree arg1)
6551 {
6552 tree prod, tmp, hi, lo;
6553 tree arg00 = TREE_OPERAND (arg0, 0);
6554 tree arg01 = TREE_OPERAND (arg0, 1);
6555 signop sign = TYPE_SIGN (TREE_TYPE (arg0));
6556 bool neg_overflow = false;
6557 bool overflow;
6558
6559 /* We have to do this the hard way to detect unsigned overflow.
6560 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6561 wide_int val = wi::mul (arg01, arg1, sign, &overflow);
6562 prod = force_fit_type (TREE_TYPE (arg00), val, -1, overflow);
6563 neg_overflow = false;
6564
6565 if (sign == UNSIGNED)
6566 {
6567 tmp = int_const_binop (MINUS_EXPR, arg01,
6568 build_int_cst (TREE_TYPE (arg01), 1));
6569 lo = prod;
6570
6571 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6572 val = wi::add (prod, tmp, sign, &overflow);
6573 hi = force_fit_type (TREE_TYPE (arg00), val,
6574 -1, overflow | TREE_OVERFLOW (prod));
6575 }
6576 else if (tree_int_cst_sgn (arg01) >= 0)
6577 {
6578 tmp = int_const_binop (MINUS_EXPR, arg01,
6579 build_int_cst (TREE_TYPE (arg01), 1));
6580 switch (tree_int_cst_sgn (arg1))
6581 {
6582 case -1:
6583 neg_overflow = true;
6584 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6585 hi = prod;
6586 break;
6587
6588 case 0:
6589 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6590 hi = tmp;
6591 break;
6592
6593 case 1:
6594 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6595 lo = prod;
6596 break;
6597
6598 default:
6599 gcc_unreachable ();
6600 }
6601 }
6602 else
6603 {
6604 /* A negative divisor reverses the relational operators. */
6605 code = swap_tree_comparison (code);
6606
6607 tmp = int_const_binop (PLUS_EXPR, arg01,
6608 build_int_cst (TREE_TYPE (arg01), 1));
6609 switch (tree_int_cst_sgn (arg1))
6610 {
6611 case -1:
6612 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6613 lo = prod;
6614 break;
6615
6616 case 0:
6617 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6618 lo = tmp;
6619 break;
6620
6621 case 1:
6622 neg_overflow = true;
6623 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6624 hi = prod;
6625 break;
6626
6627 default:
6628 gcc_unreachable ();
6629 }
6630 }
6631
6632 switch (code)
6633 {
6634 case EQ_EXPR:
6635 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6636 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6637 if (TREE_OVERFLOW (hi))
6638 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6639 if (TREE_OVERFLOW (lo))
6640 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6641 return build_range_check (loc, type, arg00, 1, lo, hi);
6642
6643 case NE_EXPR:
6644 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6645 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6646 if (TREE_OVERFLOW (hi))
6647 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6648 if (TREE_OVERFLOW (lo))
6649 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6650 return build_range_check (loc, type, arg00, 0, lo, hi);
6651
6652 case LT_EXPR:
6653 if (TREE_OVERFLOW (lo))
6654 {
6655 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6656 return omit_one_operand_loc (loc, type, tmp, arg00);
6657 }
6658 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6659
6660 case LE_EXPR:
6661 if (TREE_OVERFLOW (hi))
6662 {
6663 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6664 return omit_one_operand_loc (loc, type, tmp, arg00);
6665 }
6666 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6667
6668 case GT_EXPR:
6669 if (TREE_OVERFLOW (hi))
6670 {
6671 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6672 return omit_one_operand_loc (loc, type, tmp, arg00);
6673 }
6674 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6675
6676 case GE_EXPR:
6677 if (TREE_OVERFLOW (lo))
6678 {
6679 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6680 return omit_one_operand_loc (loc, type, tmp, arg00);
6681 }
6682 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6683
6684 default:
6685 break;
6686 }
6687
6688 return NULL_TREE;
6689 }
6690
6691
6692 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6693 equality/inequality test, then return a simplified form of the test
6694 using a sign testing. Otherwise return NULL. TYPE is the desired
6695 result type. */
6696
6697 static tree
6698 fold_single_bit_test_into_sign_test (location_t loc,
6699 enum tree_code code, tree arg0, tree arg1,
6700 tree result_type)
6701 {
6702 /* If this is testing a single bit, we can optimize the test. */
6703 if ((code == NE_EXPR || code == EQ_EXPR)
6704 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6705 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6706 {
6707 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6708 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6709 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6710
6711 if (arg00 != NULL_TREE
6712 /* This is only a win if casting to a signed type is cheap,
6713 i.e. when arg00's type is not a partial mode. */
6714 && TYPE_PRECISION (TREE_TYPE (arg00))
6715 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6716 {
6717 tree stype = signed_type_for (TREE_TYPE (arg00));
6718 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6719 result_type,
6720 fold_convert_loc (loc, stype, arg00),
6721 build_int_cst (stype, 0));
6722 }
6723 }
6724
6725 return NULL_TREE;
6726 }
6727
6728 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6729 equality/inequality test, then return a simplified form of
6730 the test using shifts and logical operations. Otherwise return
6731 NULL. TYPE is the desired result type. */
6732
6733 tree
6734 fold_single_bit_test (location_t loc, enum tree_code code,
6735 tree arg0, tree arg1, tree result_type)
6736 {
6737 /* If this is testing a single bit, we can optimize the test. */
6738 if ((code == NE_EXPR || code == EQ_EXPR)
6739 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6740 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6741 {
6742 tree inner = TREE_OPERAND (arg0, 0);
6743 tree type = TREE_TYPE (arg0);
6744 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6745 machine_mode operand_mode = TYPE_MODE (type);
6746 int ops_unsigned;
6747 tree signed_type, unsigned_type, intermediate_type;
6748 tree tem, one;
6749
6750 /* First, see if we can fold the single bit test into a sign-bit
6751 test. */
6752 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6753 result_type);
6754 if (tem)
6755 return tem;
6756
6757 /* Otherwise we have (A & C) != 0 where C is a single bit,
6758 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6759 Similarly for (A & C) == 0. */
6760
6761 /* If INNER is a right shift of a constant and it plus BITNUM does
6762 not overflow, adjust BITNUM and INNER. */
6763 if (TREE_CODE (inner) == RSHIFT_EXPR
6764 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6765 && bitnum < TYPE_PRECISION (type)
6766 && wi::ltu_p (TREE_OPERAND (inner, 1),
6767 TYPE_PRECISION (type) - bitnum))
6768 {
6769 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6770 inner = TREE_OPERAND (inner, 0);
6771 }
6772
6773 /* If we are going to be able to omit the AND below, we must do our
6774 operations as unsigned. If we must use the AND, we have a choice.
6775 Normally unsigned is faster, but for some machines signed is. */
6776 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6777 && !flag_syntax_only) ? 0 : 1;
6778
6779 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6780 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6781 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6782 inner = fold_convert_loc (loc, intermediate_type, inner);
6783
6784 if (bitnum != 0)
6785 inner = build2 (RSHIFT_EXPR, intermediate_type,
6786 inner, size_int (bitnum));
6787
6788 one = build_int_cst (intermediate_type, 1);
6789
6790 if (code == EQ_EXPR)
6791 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6792
6793 /* Put the AND last so it can combine with more things. */
6794 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6795
6796 /* Make sure to return the proper type. */
6797 inner = fold_convert_loc (loc, result_type, inner);
6798
6799 return inner;
6800 }
6801 return NULL_TREE;
6802 }
6803
6804 /* Check whether we are allowed to reorder operands arg0 and arg1,
6805 such that the evaluation of arg1 occurs before arg0. */
6806
6807 static bool
6808 reorder_operands_p (const_tree arg0, const_tree arg1)
6809 {
6810 if (! flag_evaluation_order)
6811 return true;
6812 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6813 return true;
6814 return ! TREE_SIDE_EFFECTS (arg0)
6815 && ! TREE_SIDE_EFFECTS (arg1);
6816 }
6817
6818 /* Test whether it is preferable two swap two operands, ARG0 and
6819 ARG1, for example because ARG0 is an integer constant and ARG1
6820 isn't. If REORDER is true, only recommend swapping if we can
6821 evaluate the operands in reverse order. */
6822
6823 bool
6824 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6825 {
6826 if (CONSTANT_CLASS_P (arg1))
6827 return 0;
6828 if (CONSTANT_CLASS_P (arg0))
6829 return 1;
6830
6831 STRIP_NOPS (arg0);
6832 STRIP_NOPS (arg1);
6833
6834 if (TREE_CONSTANT (arg1))
6835 return 0;
6836 if (TREE_CONSTANT (arg0))
6837 return 1;
6838
6839 if (reorder && flag_evaluation_order
6840 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6841 return 0;
6842
6843 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6844 for commutative and comparison operators. Ensuring a canonical
6845 form allows the optimizers to find additional redundancies without
6846 having to explicitly check for both orderings. */
6847 if (TREE_CODE (arg0) == SSA_NAME
6848 && TREE_CODE (arg1) == SSA_NAME
6849 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6850 return 1;
6851
6852 /* Put SSA_NAMEs last. */
6853 if (TREE_CODE (arg1) == SSA_NAME)
6854 return 0;
6855 if (TREE_CODE (arg0) == SSA_NAME)
6856 return 1;
6857
6858 /* Put variables last. */
6859 if (DECL_P (arg1))
6860 return 0;
6861 if (DECL_P (arg0))
6862 return 1;
6863
6864 return 0;
6865 }
6866
6867
6868 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6869 means A >= Y && A != MAX, but in this case we know that
6870 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6871
6872 static tree
6873 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6874 {
6875 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6876
6877 if (TREE_CODE (bound) == LT_EXPR)
6878 a = TREE_OPERAND (bound, 0);
6879 else if (TREE_CODE (bound) == GT_EXPR)
6880 a = TREE_OPERAND (bound, 1);
6881 else
6882 return NULL_TREE;
6883
6884 typea = TREE_TYPE (a);
6885 if (!INTEGRAL_TYPE_P (typea)
6886 && !POINTER_TYPE_P (typea))
6887 return NULL_TREE;
6888
6889 if (TREE_CODE (ineq) == LT_EXPR)
6890 {
6891 a1 = TREE_OPERAND (ineq, 1);
6892 y = TREE_OPERAND (ineq, 0);
6893 }
6894 else if (TREE_CODE (ineq) == GT_EXPR)
6895 {
6896 a1 = TREE_OPERAND (ineq, 0);
6897 y = TREE_OPERAND (ineq, 1);
6898 }
6899 else
6900 return NULL_TREE;
6901
6902 if (TREE_TYPE (a1) != typea)
6903 return NULL_TREE;
6904
6905 if (POINTER_TYPE_P (typea))
6906 {
6907 /* Convert the pointer types into integer before taking the difference. */
6908 tree ta = fold_convert_loc (loc, ssizetype, a);
6909 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
6910 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
6911 }
6912 else
6913 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
6914
6915 if (!diff || !integer_onep (diff))
6916 return NULL_TREE;
6917
6918 return fold_build2_loc (loc, GE_EXPR, type, a, y);
6919 }
6920
6921 /* Fold a sum or difference of at least one multiplication.
6922 Returns the folded tree or NULL if no simplification could be made. */
6923
6924 static tree
6925 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
6926 tree arg0, tree arg1)
6927 {
6928 tree arg00, arg01, arg10, arg11;
6929 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6930
6931 /* (A * C) +- (B * C) -> (A+-B) * C.
6932 (A * C) +- A -> A * (C+-1).
6933 We are most concerned about the case where C is a constant,
6934 but other combinations show up during loop reduction. Since
6935 it is not difficult, try all four possibilities. */
6936
6937 if (TREE_CODE (arg0) == MULT_EXPR)
6938 {
6939 arg00 = TREE_OPERAND (arg0, 0);
6940 arg01 = TREE_OPERAND (arg0, 1);
6941 }
6942 else if (TREE_CODE (arg0) == INTEGER_CST)
6943 {
6944 arg00 = build_one_cst (type);
6945 arg01 = arg0;
6946 }
6947 else
6948 {
6949 /* We cannot generate constant 1 for fract. */
6950 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6951 return NULL_TREE;
6952 arg00 = arg0;
6953 arg01 = build_one_cst (type);
6954 }
6955 if (TREE_CODE (arg1) == MULT_EXPR)
6956 {
6957 arg10 = TREE_OPERAND (arg1, 0);
6958 arg11 = TREE_OPERAND (arg1, 1);
6959 }
6960 else if (TREE_CODE (arg1) == INTEGER_CST)
6961 {
6962 arg10 = build_one_cst (type);
6963 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
6964 the purpose of this canonicalization. */
6965 if (wi::neg_p (arg1, TYPE_SIGN (TREE_TYPE (arg1)))
6966 && negate_expr_p (arg1)
6967 && code == PLUS_EXPR)
6968 {
6969 arg11 = negate_expr (arg1);
6970 code = MINUS_EXPR;
6971 }
6972 else
6973 arg11 = arg1;
6974 }
6975 else
6976 {
6977 /* We cannot generate constant 1 for fract. */
6978 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6979 return NULL_TREE;
6980 arg10 = arg1;
6981 arg11 = build_one_cst (type);
6982 }
6983 same = NULL_TREE;
6984
6985 if (operand_equal_p (arg01, arg11, 0))
6986 same = arg01, alt0 = arg00, alt1 = arg10;
6987 else if (operand_equal_p (arg00, arg10, 0))
6988 same = arg00, alt0 = arg01, alt1 = arg11;
6989 else if (operand_equal_p (arg00, arg11, 0))
6990 same = arg00, alt0 = arg01, alt1 = arg10;
6991 else if (operand_equal_p (arg01, arg10, 0))
6992 same = arg01, alt0 = arg00, alt1 = arg11;
6993
6994 /* No identical multiplicands; see if we can find a common
6995 power-of-two factor in non-power-of-two multiplies. This
6996 can help in multi-dimensional array access. */
6997 else if (tree_fits_shwi_p (arg01)
6998 && tree_fits_shwi_p (arg11))
6999 {
7000 HOST_WIDE_INT int01, int11, tmp;
7001 bool swap = false;
7002 tree maybe_same;
7003 int01 = tree_to_shwi (arg01);
7004 int11 = tree_to_shwi (arg11);
7005
7006 /* Move min of absolute values to int11. */
7007 if (absu_hwi (int01) < absu_hwi (int11))
7008 {
7009 tmp = int01, int01 = int11, int11 = tmp;
7010 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7011 maybe_same = arg01;
7012 swap = true;
7013 }
7014 else
7015 maybe_same = arg11;
7016
7017 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7018 /* The remainder should not be a constant, otherwise we
7019 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7020 increased the number of multiplications necessary. */
7021 && TREE_CODE (arg10) != INTEGER_CST)
7022 {
7023 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7024 build_int_cst (TREE_TYPE (arg00),
7025 int01 / int11));
7026 alt1 = arg10;
7027 same = maybe_same;
7028 if (swap)
7029 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7030 }
7031 }
7032
7033 if (same)
7034 return fold_build2_loc (loc, MULT_EXPR, type,
7035 fold_build2_loc (loc, code, type,
7036 fold_convert_loc (loc, type, alt0),
7037 fold_convert_loc (loc, type, alt1)),
7038 fold_convert_loc (loc, type, same));
7039
7040 return NULL_TREE;
7041 }
7042
7043 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7044 specified by EXPR into the buffer PTR of length LEN bytes.
7045 Return the number of bytes placed in the buffer, or zero
7046 upon failure. */
7047
7048 static int
7049 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7050 {
7051 tree type = TREE_TYPE (expr);
7052 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7053 int byte, offset, word, words;
7054 unsigned char value;
7055
7056 if ((off == -1 && total_bytes > len)
7057 || off >= total_bytes)
7058 return 0;
7059 if (off == -1)
7060 off = 0;
7061 words = total_bytes / UNITS_PER_WORD;
7062
7063 for (byte = 0; byte < total_bytes; byte++)
7064 {
7065 int bitpos = byte * BITS_PER_UNIT;
7066 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7067 number of bytes. */
7068 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7069
7070 if (total_bytes > UNITS_PER_WORD)
7071 {
7072 word = byte / UNITS_PER_WORD;
7073 if (WORDS_BIG_ENDIAN)
7074 word = (words - 1) - word;
7075 offset = word * UNITS_PER_WORD;
7076 if (BYTES_BIG_ENDIAN)
7077 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7078 else
7079 offset += byte % UNITS_PER_WORD;
7080 }
7081 else
7082 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7083 if (offset >= off
7084 && offset - off < len)
7085 ptr[offset - off] = value;
7086 }
7087 return MIN (len, total_bytes - off);
7088 }
7089
7090
7091 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7092 specified by EXPR into the buffer PTR of length LEN bytes.
7093 Return the number of bytes placed in the buffer, or zero
7094 upon failure. */
7095
7096 static int
7097 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7098 {
7099 tree type = TREE_TYPE (expr);
7100 machine_mode mode = TYPE_MODE (type);
7101 int total_bytes = GET_MODE_SIZE (mode);
7102 FIXED_VALUE_TYPE value;
7103 tree i_value, i_type;
7104
7105 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7106 return 0;
7107
7108 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7109
7110 if (NULL_TREE == i_type
7111 || TYPE_PRECISION (i_type) != total_bytes)
7112 return 0;
7113
7114 value = TREE_FIXED_CST (expr);
7115 i_value = double_int_to_tree (i_type, value.data);
7116
7117 return native_encode_int (i_value, ptr, len, off);
7118 }
7119
7120
7121 /* Subroutine of native_encode_expr. Encode the REAL_CST
7122 specified by EXPR into the buffer PTR of length LEN bytes.
7123 Return the number of bytes placed in the buffer, or zero
7124 upon failure. */
7125
7126 static int
7127 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7128 {
7129 tree type = TREE_TYPE (expr);
7130 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7131 int byte, offset, word, words, bitpos;
7132 unsigned char value;
7133
7134 /* There are always 32 bits in each long, no matter the size of
7135 the hosts long. We handle floating point representations with
7136 up to 192 bits. */
7137 long tmp[6];
7138
7139 if ((off == -1 && total_bytes > len)
7140 || off >= total_bytes)
7141 return 0;
7142 if (off == -1)
7143 off = 0;
7144 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7145
7146 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7147
7148 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7149 bitpos += BITS_PER_UNIT)
7150 {
7151 byte = (bitpos / BITS_PER_UNIT) & 3;
7152 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7153
7154 if (UNITS_PER_WORD < 4)
7155 {
7156 word = byte / UNITS_PER_WORD;
7157 if (WORDS_BIG_ENDIAN)
7158 word = (words - 1) - word;
7159 offset = word * UNITS_PER_WORD;
7160 if (BYTES_BIG_ENDIAN)
7161 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7162 else
7163 offset += byte % UNITS_PER_WORD;
7164 }
7165 else
7166 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7167 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7168 if (offset >= off
7169 && offset - off < len)
7170 ptr[offset - off] = value;
7171 }
7172 return MIN (len, total_bytes - off);
7173 }
7174
7175 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7176 specified by EXPR into the buffer PTR of length LEN bytes.
7177 Return the number of bytes placed in the buffer, or zero
7178 upon failure. */
7179
7180 static int
7181 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7182 {
7183 int rsize, isize;
7184 tree part;
7185
7186 part = TREE_REALPART (expr);
7187 rsize = native_encode_expr (part, ptr, len, off);
7188 if (off == -1
7189 && rsize == 0)
7190 return 0;
7191 part = TREE_IMAGPART (expr);
7192 if (off != -1)
7193 off = MAX (0, off - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part))));
7194 isize = native_encode_expr (part, ptr+rsize, len-rsize, off);
7195 if (off == -1
7196 && isize != rsize)
7197 return 0;
7198 return rsize + isize;
7199 }
7200
7201
7202 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7203 specified by EXPR into the buffer PTR of length LEN bytes.
7204 Return the number of bytes placed in the buffer, or zero
7205 upon failure. */
7206
7207 static int
7208 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7209 {
7210 unsigned i, count;
7211 int size, offset;
7212 tree itype, elem;
7213
7214 offset = 0;
7215 count = VECTOR_CST_NELTS (expr);
7216 itype = TREE_TYPE (TREE_TYPE (expr));
7217 size = GET_MODE_SIZE (TYPE_MODE (itype));
7218 for (i = 0; i < count; i++)
7219 {
7220 if (off >= size)
7221 {
7222 off -= size;
7223 continue;
7224 }
7225 elem = VECTOR_CST_ELT (expr, i);
7226 int res = native_encode_expr (elem, ptr+offset, len-offset, off);
7227 if ((off == -1 && res != size)
7228 || res == 0)
7229 return 0;
7230 offset += res;
7231 if (offset >= len)
7232 return offset;
7233 if (off != -1)
7234 off = 0;
7235 }
7236 return offset;
7237 }
7238
7239
7240 /* Subroutine of native_encode_expr. Encode the STRING_CST
7241 specified by EXPR into the buffer PTR of length LEN bytes.
7242 Return the number of bytes placed in the buffer, or zero
7243 upon failure. */
7244
7245 static int
7246 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7247 {
7248 tree type = TREE_TYPE (expr);
7249 HOST_WIDE_INT total_bytes;
7250
7251 if (TREE_CODE (type) != ARRAY_TYPE
7252 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7253 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7254 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7255 return 0;
7256 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7257 if ((off == -1 && total_bytes > len)
7258 || off >= total_bytes)
7259 return 0;
7260 if (off == -1)
7261 off = 0;
7262 if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7263 {
7264 int written = 0;
7265 if (off < TREE_STRING_LENGTH (expr))
7266 {
7267 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7268 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7269 }
7270 memset (ptr + written, 0,
7271 MIN (total_bytes - written, len - written));
7272 }
7273 else
7274 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7275 return MIN (total_bytes - off, len);
7276 }
7277
7278
7279 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7280 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7281 buffer PTR of length LEN bytes. If OFF is not -1 then start
7282 the encoding at byte offset OFF and encode at most LEN bytes.
7283 Return the number of bytes placed in the buffer, or zero upon failure. */
7284
7285 int
7286 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7287 {
7288 /* We don't support starting at negative offset and -1 is special. */
7289 if (off < -1)
7290 return 0;
7291
7292 switch (TREE_CODE (expr))
7293 {
7294 case INTEGER_CST:
7295 return native_encode_int (expr, ptr, len, off);
7296
7297 case REAL_CST:
7298 return native_encode_real (expr, ptr, len, off);
7299
7300 case FIXED_CST:
7301 return native_encode_fixed (expr, ptr, len, off);
7302
7303 case COMPLEX_CST:
7304 return native_encode_complex (expr, ptr, len, off);
7305
7306 case VECTOR_CST:
7307 return native_encode_vector (expr, ptr, len, off);
7308
7309 case STRING_CST:
7310 return native_encode_string (expr, ptr, len, off);
7311
7312 default:
7313 return 0;
7314 }
7315 }
7316
7317
7318 /* Subroutine of native_interpret_expr. Interpret the contents of
7319 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7320 If the buffer cannot be interpreted, return NULL_TREE. */
7321
7322 static tree
7323 native_interpret_int (tree type, const unsigned char *ptr, int len)
7324 {
7325 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7326
7327 if (total_bytes > len
7328 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7329 return NULL_TREE;
7330
7331 wide_int result = wi::from_buffer (ptr, total_bytes);
7332
7333 return wide_int_to_tree (type, result);
7334 }
7335
7336
7337 /* Subroutine of native_interpret_expr. Interpret the contents of
7338 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7339 If the buffer cannot be interpreted, return NULL_TREE. */
7340
7341 static tree
7342 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7343 {
7344 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7345 double_int result;
7346 FIXED_VALUE_TYPE fixed_value;
7347
7348 if (total_bytes > len
7349 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7350 return NULL_TREE;
7351
7352 result = double_int::from_buffer (ptr, total_bytes);
7353 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7354
7355 return build_fixed (type, fixed_value);
7356 }
7357
7358
7359 /* Subroutine of native_interpret_expr. Interpret the contents of
7360 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7361 If the buffer cannot be interpreted, return NULL_TREE. */
7362
7363 static tree
7364 native_interpret_real (tree type, const unsigned char *ptr, int len)
7365 {
7366 machine_mode mode = TYPE_MODE (type);
7367 int total_bytes = GET_MODE_SIZE (mode);
7368 unsigned char value;
7369 /* There are always 32 bits in each long, no matter the size of
7370 the hosts long. We handle floating point representations with
7371 up to 192 bits. */
7372 REAL_VALUE_TYPE r;
7373 long tmp[6];
7374
7375 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7376 if (total_bytes > len || total_bytes > 24)
7377 return NULL_TREE;
7378 int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7379
7380 memset (tmp, 0, sizeof (tmp));
7381 for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7382 bitpos += BITS_PER_UNIT)
7383 {
7384 /* Both OFFSET and BYTE index within a long;
7385 bitpos indexes the whole float. */
7386 int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
7387 if (UNITS_PER_WORD < 4)
7388 {
7389 int word = byte / UNITS_PER_WORD;
7390 if (WORDS_BIG_ENDIAN)
7391 word = (words - 1) - word;
7392 offset = word * UNITS_PER_WORD;
7393 if (BYTES_BIG_ENDIAN)
7394 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7395 else
7396 offset += byte % UNITS_PER_WORD;
7397 }
7398 else
7399 {
7400 offset = byte;
7401 if (BYTES_BIG_ENDIAN)
7402 {
7403 /* Reverse bytes within each long, or within the entire float
7404 if it's smaller than a long (for HFmode). */
7405 offset = MIN (3, total_bytes - 1) - offset;
7406 gcc_assert (offset >= 0);
7407 }
7408 }
7409 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7410
7411 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7412 }
7413
7414 real_from_target (&r, tmp, mode);
7415 return build_real (type, r);
7416 }
7417
7418
7419 /* Subroutine of native_interpret_expr. Interpret the contents of
7420 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7421 If the buffer cannot be interpreted, return NULL_TREE. */
7422
7423 static tree
7424 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7425 {
7426 tree etype, rpart, ipart;
7427 int size;
7428
7429 etype = TREE_TYPE (type);
7430 size = GET_MODE_SIZE (TYPE_MODE (etype));
7431 if (size * 2 > len)
7432 return NULL_TREE;
7433 rpart = native_interpret_expr (etype, ptr, size);
7434 if (!rpart)
7435 return NULL_TREE;
7436 ipart = native_interpret_expr (etype, ptr+size, size);
7437 if (!ipart)
7438 return NULL_TREE;
7439 return build_complex (type, rpart, ipart);
7440 }
7441
7442
7443 /* Subroutine of native_interpret_expr. Interpret the contents of
7444 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7445 If the buffer cannot be interpreted, return NULL_TREE. */
7446
7447 static tree
7448 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7449 {
7450 tree etype, elem;
7451 int i, size, count;
7452 tree *elements;
7453
7454 etype = TREE_TYPE (type);
7455 size = GET_MODE_SIZE (TYPE_MODE (etype));
7456 count = TYPE_VECTOR_SUBPARTS (type);
7457 if (size * count > len)
7458 return NULL_TREE;
7459
7460 elements = XALLOCAVEC (tree, count);
7461 for (i = count - 1; i >= 0; i--)
7462 {
7463 elem = native_interpret_expr (etype, ptr+(i*size), size);
7464 if (!elem)
7465 return NULL_TREE;
7466 elements[i] = elem;
7467 }
7468 return build_vector (type, elements);
7469 }
7470
7471
7472 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7473 the buffer PTR of length LEN as a constant of type TYPE. For
7474 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7475 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7476 return NULL_TREE. */
7477
7478 tree
7479 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7480 {
7481 switch (TREE_CODE (type))
7482 {
7483 case INTEGER_TYPE:
7484 case ENUMERAL_TYPE:
7485 case BOOLEAN_TYPE:
7486 case POINTER_TYPE:
7487 case REFERENCE_TYPE:
7488 return native_interpret_int (type, ptr, len);
7489
7490 case REAL_TYPE:
7491 return native_interpret_real (type, ptr, len);
7492
7493 case FIXED_POINT_TYPE:
7494 return native_interpret_fixed (type, ptr, len);
7495
7496 case COMPLEX_TYPE:
7497 return native_interpret_complex (type, ptr, len);
7498
7499 case VECTOR_TYPE:
7500 return native_interpret_vector (type, ptr, len);
7501
7502 default:
7503 return NULL_TREE;
7504 }
7505 }
7506
7507 /* Returns true if we can interpret the contents of a native encoding
7508 as TYPE. */
7509
7510 static bool
7511 can_native_interpret_type_p (tree type)
7512 {
7513 switch (TREE_CODE (type))
7514 {
7515 case INTEGER_TYPE:
7516 case ENUMERAL_TYPE:
7517 case BOOLEAN_TYPE:
7518 case POINTER_TYPE:
7519 case REFERENCE_TYPE:
7520 case FIXED_POINT_TYPE:
7521 case REAL_TYPE:
7522 case COMPLEX_TYPE:
7523 case VECTOR_TYPE:
7524 return true;
7525 default:
7526 return false;
7527 }
7528 }
7529
7530 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7531 TYPE at compile-time. If we're unable to perform the conversion
7532 return NULL_TREE. */
7533
7534 static tree
7535 fold_view_convert_expr (tree type, tree expr)
7536 {
7537 /* We support up to 512-bit values (for V8DFmode). */
7538 unsigned char buffer[64];
7539 int len;
7540
7541 /* Check that the host and target are sane. */
7542 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7543 return NULL_TREE;
7544
7545 len = native_encode_expr (expr, buffer, sizeof (buffer));
7546 if (len == 0)
7547 return NULL_TREE;
7548
7549 return native_interpret_expr (type, buffer, len);
7550 }
7551
7552 /* Build an expression for the address of T. Folds away INDIRECT_REF
7553 to avoid confusing the gimplify process. */
7554
7555 tree
7556 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7557 {
7558 /* The size of the object is not relevant when talking about its address. */
7559 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7560 t = TREE_OPERAND (t, 0);
7561
7562 if (TREE_CODE (t) == INDIRECT_REF)
7563 {
7564 t = TREE_OPERAND (t, 0);
7565
7566 if (TREE_TYPE (t) != ptrtype)
7567 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7568 }
7569 else if (TREE_CODE (t) == MEM_REF
7570 && integer_zerop (TREE_OPERAND (t, 1)))
7571 return TREE_OPERAND (t, 0);
7572 else if (TREE_CODE (t) == MEM_REF
7573 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7574 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7575 TREE_OPERAND (t, 0),
7576 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7577 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7578 {
7579 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7580
7581 if (TREE_TYPE (t) != ptrtype)
7582 t = fold_convert_loc (loc, ptrtype, t);
7583 }
7584 else
7585 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7586
7587 return t;
7588 }
7589
7590 /* Build an expression for the address of T. */
7591
7592 tree
7593 build_fold_addr_expr_loc (location_t loc, tree t)
7594 {
7595 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7596
7597 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7598 }
7599
7600 /* Fold a unary expression of code CODE and type TYPE with operand
7601 OP0. Return the folded expression if folding is successful.
7602 Otherwise, return NULL_TREE. */
7603
7604 tree
7605 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7606 {
7607 tree tem;
7608 tree arg0;
7609 enum tree_code_class kind = TREE_CODE_CLASS (code);
7610
7611 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7612 && TREE_CODE_LENGTH (code) == 1);
7613
7614 arg0 = op0;
7615 if (arg0)
7616 {
7617 if (CONVERT_EXPR_CODE_P (code)
7618 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7619 {
7620 /* Don't use STRIP_NOPS, because signedness of argument type
7621 matters. */
7622 STRIP_SIGN_NOPS (arg0);
7623 }
7624 else
7625 {
7626 /* Strip any conversions that don't change the mode. This
7627 is safe for every expression, except for a comparison
7628 expression because its signedness is derived from its
7629 operands.
7630
7631 Note that this is done as an internal manipulation within
7632 the constant folder, in order to find the simplest
7633 representation of the arguments so that their form can be
7634 studied. In any cases, the appropriate type conversions
7635 should be put back in the tree that will get out of the
7636 constant folder. */
7637 STRIP_NOPS (arg0);
7638 }
7639
7640 if (CONSTANT_CLASS_P (arg0))
7641 {
7642 tree tem = const_unop (code, type, arg0);
7643 if (tem)
7644 {
7645 if (TREE_TYPE (tem) != type)
7646 tem = fold_convert_loc (loc, type, tem);
7647 return tem;
7648 }
7649 }
7650 }
7651
7652 tem = generic_simplify (loc, code, type, op0);
7653 if (tem)
7654 return tem;
7655
7656 if (TREE_CODE_CLASS (code) == tcc_unary)
7657 {
7658 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7659 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7660 fold_build1_loc (loc, code, type,
7661 fold_convert_loc (loc, TREE_TYPE (op0),
7662 TREE_OPERAND (arg0, 1))));
7663 else if (TREE_CODE (arg0) == COND_EXPR)
7664 {
7665 tree arg01 = TREE_OPERAND (arg0, 1);
7666 tree arg02 = TREE_OPERAND (arg0, 2);
7667 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7668 arg01 = fold_build1_loc (loc, code, type,
7669 fold_convert_loc (loc,
7670 TREE_TYPE (op0), arg01));
7671 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7672 arg02 = fold_build1_loc (loc, code, type,
7673 fold_convert_loc (loc,
7674 TREE_TYPE (op0), arg02));
7675 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7676 arg01, arg02);
7677
7678 /* If this was a conversion, and all we did was to move into
7679 inside the COND_EXPR, bring it back out. But leave it if
7680 it is a conversion from integer to integer and the
7681 result precision is no wider than a word since such a
7682 conversion is cheap and may be optimized away by combine,
7683 while it couldn't if it were outside the COND_EXPR. Then return
7684 so we don't get into an infinite recursion loop taking the
7685 conversion out and then back in. */
7686
7687 if ((CONVERT_EXPR_CODE_P (code)
7688 || code == NON_LVALUE_EXPR)
7689 && TREE_CODE (tem) == COND_EXPR
7690 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7691 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7692 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7693 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7694 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7695 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7696 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7697 && (INTEGRAL_TYPE_P
7698 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7699 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7700 || flag_syntax_only))
7701 tem = build1_loc (loc, code, type,
7702 build3 (COND_EXPR,
7703 TREE_TYPE (TREE_OPERAND
7704 (TREE_OPERAND (tem, 1), 0)),
7705 TREE_OPERAND (tem, 0),
7706 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7707 TREE_OPERAND (TREE_OPERAND (tem, 2),
7708 0)));
7709 return tem;
7710 }
7711 }
7712
7713 switch (code)
7714 {
7715 case NON_LVALUE_EXPR:
7716 if (!maybe_lvalue_p (op0))
7717 return fold_convert_loc (loc, type, op0);
7718 return NULL_TREE;
7719
7720 CASE_CONVERT:
7721 case FLOAT_EXPR:
7722 case FIX_TRUNC_EXPR:
7723 if (COMPARISON_CLASS_P (op0))
7724 {
7725 /* If we have (type) (a CMP b) and type is an integral type, return
7726 new expression involving the new type. Canonicalize
7727 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7728 non-integral type.
7729 Do not fold the result as that would not simplify further, also
7730 folding again results in recursions. */
7731 if (TREE_CODE (type) == BOOLEAN_TYPE)
7732 return build2_loc (loc, TREE_CODE (op0), type,
7733 TREE_OPERAND (op0, 0),
7734 TREE_OPERAND (op0, 1));
7735 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7736 && TREE_CODE (type) != VECTOR_TYPE)
7737 return build3_loc (loc, COND_EXPR, type, op0,
7738 constant_boolean_node (true, type),
7739 constant_boolean_node (false, type));
7740 }
7741
7742 /* Handle (T *)&A.B.C for A being of type T and B and C
7743 living at offset zero. This occurs frequently in
7744 C++ upcasting and then accessing the base. */
7745 if (TREE_CODE (op0) == ADDR_EXPR
7746 && POINTER_TYPE_P (type)
7747 && handled_component_p (TREE_OPERAND (op0, 0)))
7748 {
7749 HOST_WIDE_INT bitsize, bitpos;
7750 tree offset;
7751 machine_mode mode;
7752 int unsignedp, reversep, volatilep;
7753 tree base
7754 = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
7755 &offset, &mode, &unsignedp, &reversep,
7756 &volatilep, false);
7757 /* If the reference was to a (constant) zero offset, we can use
7758 the address of the base if it has the same base type
7759 as the result type and the pointer type is unqualified. */
7760 if (! offset && bitpos == 0
7761 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7762 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7763 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7764 return fold_convert_loc (loc, type,
7765 build_fold_addr_expr_loc (loc, base));
7766 }
7767
7768 if (TREE_CODE (op0) == MODIFY_EXPR
7769 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7770 /* Detect assigning a bitfield. */
7771 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7772 && DECL_BIT_FIELD
7773 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7774 {
7775 /* Don't leave an assignment inside a conversion
7776 unless assigning a bitfield. */
7777 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7778 /* First do the assignment, then return converted constant. */
7779 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7780 TREE_NO_WARNING (tem) = 1;
7781 TREE_USED (tem) = 1;
7782 return tem;
7783 }
7784
7785 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7786 constants (if x has signed type, the sign bit cannot be set
7787 in c). This folds extension into the BIT_AND_EXPR.
7788 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7789 very likely don't have maximal range for their precision and this
7790 transformation effectively doesn't preserve non-maximal ranges. */
7791 if (TREE_CODE (type) == INTEGER_TYPE
7792 && TREE_CODE (op0) == BIT_AND_EXPR
7793 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7794 {
7795 tree and_expr = op0;
7796 tree and0 = TREE_OPERAND (and_expr, 0);
7797 tree and1 = TREE_OPERAND (and_expr, 1);
7798 int change = 0;
7799
7800 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7801 || (TYPE_PRECISION (type)
7802 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7803 change = 1;
7804 else if (TYPE_PRECISION (TREE_TYPE (and1))
7805 <= HOST_BITS_PER_WIDE_INT
7806 && tree_fits_uhwi_p (and1))
7807 {
7808 unsigned HOST_WIDE_INT cst;
7809
7810 cst = tree_to_uhwi (and1);
7811 cst &= HOST_WIDE_INT_M1U
7812 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7813 change = (cst == 0);
7814 if (change
7815 && !flag_syntax_only
7816 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7817 == ZERO_EXTEND))
7818 {
7819 tree uns = unsigned_type_for (TREE_TYPE (and0));
7820 and0 = fold_convert_loc (loc, uns, and0);
7821 and1 = fold_convert_loc (loc, uns, and1);
7822 }
7823 }
7824 if (change)
7825 {
7826 tem = force_fit_type (type, wi::to_widest (and1), 0,
7827 TREE_OVERFLOW (and1));
7828 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7829 fold_convert_loc (loc, type, and0), tem);
7830 }
7831 }
7832
7833 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
7834 cast (T1)X will fold away. We assume that this happens when X itself
7835 is a cast. */
7836 if (POINTER_TYPE_P (type)
7837 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7838 && CONVERT_EXPR_P (TREE_OPERAND (arg0, 0)))
7839 {
7840 tree arg00 = TREE_OPERAND (arg0, 0);
7841 tree arg01 = TREE_OPERAND (arg0, 1);
7842
7843 return fold_build_pointer_plus_loc
7844 (loc, fold_convert_loc (loc, type, arg00), arg01);
7845 }
7846
7847 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7848 of the same precision, and X is an integer type not narrower than
7849 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7850 if (INTEGRAL_TYPE_P (type)
7851 && TREE_CODE (op0) == BIT_NOT_EXPR
7852 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7853 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7854 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7855 {
7856 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7857 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7858 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7859 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7860 fold_convert_loc (loc, type, tem));
7861 }
7862
7863 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7864 type of X and Y (integer types only). */
7865 if (INTEGRAL_TYPE_P (type)
7866 && TREE_CODE (op0) == MULT_EXPR
7867 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7868 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7869 {
7870 /* Be careful not to introduce new overflows. */
7871 tree mult_type;
7872 if (TYPE_OVERFLOW_WRAPS (type))
7873 mult_type = type;
7874 else
7875 mult_type = unsigned_type_for (type);
7876
7877 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7878 {
7879 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7880 fold_convert_loc (loc, mult_type,
7881 TREE_OPERAND (op0, 0)),
7882 fold_convert_loc (loc, mult_type,
7883 TREE_OPERAND (op0, 1)));
7884 return fold_convert_loc (loc, type, tem);
7885 }
7886 }
7887
7888 return NULL_TREE;
7889
7890 case VIEW_CONVERT_EXPR:
7891 if (TREE_CODE (op0) == MEM_REF)
7892 {
7893 tem = fold_build2_loc (loc, MEM_REF, type,
7894 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
7895 REF_REVERSE_STORAGE_ORDER (tem) = REF_REVERSE_STORAGE_ORDER (op0);
7896 return tem;
7897 }
7898
7899 return NULL_TREE;
7900
7901 case NEGATE_EXPR:
7902 tem = fold_negate_expr (loc, arg0);
7903 if (tem)
7904 return fold_convert_loc (loc, type, tem);
7905 return NULL_TREE;
7906
7907 case ABS_EXPR:
7908 /* Convert fabs((double)float) into (double)fabsf(float). */
7909 if (TREE_CODE (arg0) == NOP_EXPR
7910 && TREE_CODE (type) == REAL_TYPE)
7911 {
7912 tree targ0 = strip_float_extensions (arg0);
7913 if (targ0 != arg0)
7914 return fold_convert_loc (loc, type,
7915 fold_build1_loc (loc, ABS_EXPR,
7916 TREE_TYPE (targ0),
7917 targ0));
7918 }
7919 return NULL_TREE;
7920
7921 case BIT_NOT_EXPR:
7922 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7923 if (TREE_CODE (arg0) == BIT_XOR_EXPR
7924 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7925 fold_convert_loc (loc, type,
7926 TREE_OPERAND (arg0, 0)))))
7927 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
7928 fold_convert_loc (loc, type,
7929 TREE_OPERAND (arg0, 1)));
7930 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7931 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7932 fold_convert_loc (loc, type,
7933 TREE_OPERAND (arg0, 1)))))
7934 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
7935 fold_convert_loc (loc, type,
7936 TREE_OPERAND (arg0, 0)), tem);
7937
7938 return NULL_TREE;
7939
7940 case TRUTH_NOT_EXPR:
7941 /* Note that the operand of this must be an int
7942 and its values must be 0 or 1.
7943 ("true" is a fixed value perhaps depending on the language,
7944 but we don't handle values other than 1 correctly yet.) */
7945 tem = fold_truth_not_expr (loc, arg0);
7946 if (!tem)
7947 return NULL_TREE;
7948 return fold_convert_loc (loc, type, tem);
7949
7950 case INDIRECT_REF:
7951 /* Fold *&X to X if X is an lvalue. */
7952 if (TREE_CODE (op0) == ADDR_EXPR)
7953 {
7954 tree op00 = TREE_OPERAND (op0, 0);
7955 if ((TREE_CODE (op00) == VAR_DECL
7956 || TREE_CODE (op00) == PARM_DECL
7957 || TREE_CODE (op00) == RESULT_DECL)
7958 && !TREE_READONLY (op00))
7959 return op00;
7960 }
7961 return NULL_TREE;
7962
7963 default:
7964 return NULL_TREE;
7965 } /* switch (code) */
7966 }
7967
7968
7969 /* If the operation was a conversion do _not_ mark a resulting constant
7970 with TREE_OVERFLOW if the original constant was not. These conversions
7971 have implementation defined behavior and retaining the TREE_OVERFLOW
7972 flag here would confuse later passes such as VRP. */
7973 tree
7974 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
7975 tree type, tree op0)
7976 {
7977 tree res = fold_unary_loc (loc, code, type, op0);
7978 if (res
7979 && TREE_CODE (res) == INTEGER_CST
7980 && TREE_CODE (op0) == INTEGER_CST
7981 && CONVERT_EXPR_CODE_P (code))
7982 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
7983
7984 return res;
7985 }
7986
7987 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
7988 operands OP0 and OP1. LOC is the location of the resulting expression.
7989 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
7990 Return the folded expression if folding is successful. Otherwise,
7991 return NULL_TREE. */
7992 static tree
7993 fold_truth_andor (location_t loc, enum tree_code code, tree type,
7994 tree arg0, tree arg1, tree op0, tree op1)
7995 {
7996 tree tem;
7997
7998 /* We only do these simplifications if we are optimizing. */
7999 if (!optimize)
8000 return NULL_TREE;
8001
8002 /* Check for things like (A || B) && (A || C). We can convert this
8003 to A || (B && C). Note that either operator can be any of the four
8004 truth and/or operations and the transformation will still be
8005 valid. Also note that we only care about order for the
8006 ANDIF and ORIF operators. If B contains side effects, this
8007 might change the truth-value of A. */
8008 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8009 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8010 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8011 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8012 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8013 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8014 {
8015 tree a00 = TREE_OPERAND (arg0, 0);
8016 tree a01 = TREE_OPERAND (arg0, 1);
8017 tree a10 = TREE_OPERAND (arg1, 0);
8018 tree a11 = TREE_OPERAND (arg1, 1);
8019 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8020 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8021 && (code == TRUTH_AND_EXPR
8022 || code == TRUTH_OR_EXPR));
8023
8024 if (operand_equal_p (a00, a10, 0))
8025 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8026 fold_build2_loc (loc, code, type, a01, a11));
8027 else if (commutative && operand_equal_p (a00, a11, 0))
8028 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8029 fold_build2_loc (loc, code, type, a01, a10));
8030 else if (commutative && operand_equal_p (a01, a10, 0))
8031 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8032 fold_build2_loc (loc, code, type, a00, a11));
8033
8034 /* This case if tricky because we must either have commutative
8035 operators or else A10 must not have side-effects. */
8036
8037 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8038 && operand_equal_p (a01, a11, 0))
8039 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8040 fold_build2_loc (loc, code, type, a00, a10),
8041 a01);
8042 }
8043
8044 /* See if we can build a range comparison. */
8045 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8046 return tem;
8047
8048 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8049 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8050 {
8051 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8052 if (tem)
8053 return fold_build2_loc (loc, code, type, tem, arg1);
8054 }
8055
8056 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8057 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8058 {
8059 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8060 if (tem)
8061 return fold_build2_loc (loc, code, type, arg0, tem);
8062 }
8063
8064 /* Check for the possibility of merging component references. If our
8065 lhs is another similar operation, try to merge its rhs with our
8066 rhs. Then try to merge our lhs and rhs. */
8067 if (TREE_CODE (arg0) == code
8068 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8069 TREE_OPERAND (arg0, 1), arg1)))
8070 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8071
8072 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8073 return tem;
8074
8075 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8076 && (code == TRUTH_AND_EXPR
8077 || code == TRUTH_ANDIF_EXPR
8078 || code == TRUTH_OR_EXPR
8079 || code == TRUTH_ORIF_EXPR))
8080 {
8081 enum tree_code ncode, icode;
8082
8083 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8084 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8085 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8086
8087 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8088 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8089 We don't want to pack more than two leafs to a non-IF AND/OR
8090 expression.
8091 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8092 equal to IF-CODE, then we don't want to add right-hand operand.
8093 If the inner right-hand side of left-hand operand has
8094 side-effects, or isn't simple, then we can't add to it,
8095 as otherwise we might destroy if-sequence. */
8096 if (TREE_CODE (arg0) == icode
8097 && simple_operand_p_2 (arg1)
8098 /* Needed for sequence points to handle trappings, and
8099 side-effects. */
8100 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8101 {
8102 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8103 arg1);
8104 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8105 tem);
8106 }
8107 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8108 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8109 else if (TREE_CODE (arg1) == icode
8110 && simple_operand_p_2 (arg0)
8111 /* Needed for sequence points to handle trappings, and
8112 side-effects. */
8113 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8114 {
8115 tem = fold_build2_loc (loc, ncode, type,
8116 arg0, TREE_OPERAND (arg1, 0));
8117 return fold_build2_loc (loc, icode, type, tem,
8118 TREE_OPERAND (arg1, 1));
8119 }
8120 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8121 into (A OR B).
8122 For sequence point consistancy, we need to check for trapping,
8123 and side-effects. */
8124 else if (code == icode && simple_operand_p_2 (arg0)
8125 && simple_operand_p_2 (arg1))
8126 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8127 }
8128
8129 return NULL_TREE;
8130 }
8131
8132 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8133 by changing CODE to reduce the magnitude of constants involved in
8134 ARG0 of the comparison.
8135 Returns a canonicalized comparison tree if a simplification was
8136 possible, otherwise returns NULL_TREE.
8137 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8138 valid if signed overflow is undefined. */
8139
8140 static tree
8141 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8142 tree arg0, tree arg1,
8143 bool *strict_overflow_p)
8144 {
8145 enum tree_code code0 = TREE_CODE (arg0);
8146 tree t, cst0 = NULL_TREE;
8147 int sgn0;
8148
8149 /* Match A +- CST code arg1. We can change this only if overflow
8150 is undefined. */
8151 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8152 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8153 /* In principle pointers also have undefined overflow behavior,
8154 but that causes problems elsewhere. */
8155 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8156 && (code0 == MINUS_EXPR
8157 || code0 == PLUS_EXPR)
8158 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
8159 return NULL_TREE;
8160
8161 /* Identify the constant in arg0 and its sign. */
8162 cst0 = TREE_OPERAND (arg0, 1);
8163 sgn0 = tree_int_cst_sgn (cst0);
8164
8165 /* Overflowed constants and zero will cause problems. */
8166 if (integer_zerop (cst0)
8167 || TREE_OVERFLOW (cst0))
8168 return NULL_TREE;
8169
8170 /* See if we can reduce the magnitude of the constant in
8171 arg0 by changing the comparison code. */
8172 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8173 if (code == LT_EXPR
8174 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8175 code = LE_EXPR;
8176 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8177 else if (code == GT_EXPR
8178 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8179 code = GE_EXPR;
8180 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8181 else if (code == LE_EXPR
8182 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8183 code = LT_EXPR;
8184 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8185 else if (code == GE_EXPR
8186 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8187 code = GT_EXPR;
8188 else
8189 return NULL_TREE;
8190 *strict_overflow_p = true;
8191
8192 /* Now build the constant reduced in magnitude. But not if that
8193 would produce one outside of its types range. */
8194 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8195 && ((sgn0 == 1
8196 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8197 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8198 || (sgn0 == -1
8199 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8200 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8201 return NULL_TREE;
8202
8203 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8204 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8205 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8206 t = fold_convert (TREE_TYPE (arg1), t);
8207
8208 return fold_build2_loc (loc, code, type, t, arg1);
8209 }
8210
8211 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8212 overflow further. Try to decrease the magnitude of constants involved
8213 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8214 and put sole constants at the second argument position.
8215 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8216
8217 static tree
8218 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8219 tree arg0, tree arg1)
8220 {
8221 tree t;
8222 bool strict_overflow_p;
8223 const char * const warnmsg = G_("assuming signed overflow does not occur "
8224 "when reducing constant in comparison");
8225
8226 /* Try canonicalization by simplifying arg0. */
8227 strict_overflow_p = false;
8228 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8229 &strict_overflow_p);
8230 if (t)
8231 {
8232 if (strict_overflow_p)
8233 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8234 return t;
8235 }
8236
8237 /* Try canonicalization by simplifying arg1 using the swapped
8238 comparison. */
8239 code = swap_tree_comparison (code);
8240 strict_overflow_p = false;
8241 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8242 &strict_overflow_p);
8243 if (t && strict_overflow_p)
8244 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8245 return t;
8246 }
8247
8248 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8249 space. This is used to avoid issuing overflow warnings for
8250 expressions like &p->x which can not wrap. */
8251
8252 static bool
8253 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8254 {
8255 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8256 return true;
8257
8258 if (bitpos < 0)
8259 return true;
8260
8261 wide_int wi_offset;
8262 int precision = TYPE_PRECISION (TREE_TYPE (base));
8263 if (offset == NULL_TREE)
8264 wi_offset = wi::zero (precision);
8265 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8266 return true;
8267 else
8268 wi_offset = offset;
8269
8270 bool overflow;
8271 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8272 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8273 if (overflow)
8274 return true;
8275
8276 if (!wi::fits_uhwi_p (total))
8277 return true;
8278
8279 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8280 if (size <= 0)
8281 return true;
8282
8283 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8284 array. */
8285 if (TREE_CODE (base) == ADDR_EXPR)
8286 {
8287 HOST_WIDE_INT base_size;
8288
8289 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8290 if (base_size > 0 && size < base_size)
8291 size = base_size;
8292 }
8293
8294 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8295 }
8296
8297 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8298 kind INTEGER_CST. This makes sure to properly sign-extend the
8299 constant. */
8300
8301 static HOST_WIDE_INT
8302 size_low_cst (const_tree t)
8303 {
8304 HOST_WIDE_INT w = TREE_INT_CST_ELT (t, 0);
8305 int prec = TYPE_PRECISION (TREE_TYPE (t));
8306 if (prec < HOST_BITS_PER_WIDE_INT)
8307 return sext_hwi (w, prec);
8308 return w;
8309 }
8310
8311 /* Subroutine of fold_binary. This routine performs all of the
8312 transformations that are common to the equality/inequality
8313 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8314 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8315 fold_binary should call fold_binary. Fold a comparison with
8316 tree code CODE and type TYPE with operands OP0 and OP1. Return
8317 the folded comparison or NULL_TREE. */
8318
8319 static tree
8320 fold_comparison (location_t loc, enum tree_code code, tree type,
8321 tree op0, tree op1)
8322 {
8323 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8324 tree arg0, arg1, tem;
8325
8326 arg0 = op0;
8327 arg1 = op1;
8328
8329 STRIP_SIGN_NOPS (arg0);
8330 STRIP_SIGN_NOPS (arg1);
8331
8332 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
8333 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8334 && (equality_code
8335 || (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8336 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
8337 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8338 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8339 && TREE_CODE (arg1) == INTEGER_CST
8340 && !TREE_OVERFLOW (arg1))
8341 {
8342 const enum tree_code
8343 reverse_op = TREE_CODE (arg0) == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR;
8344 tree const1 = TREE_OPERAND (arg0, 1);
8345 tree const2 = fold_convert_loc (loc, TREE_TYPE (const1), arg1);
8346 tree variable = TREE_OPERAND (arg0, 0);
8347 tree new_const = int_const_binop (reverse_op, const2, const1);
8348
8349 /* If the constant operation overflowed this can be
8350 simplified as a comparison against INT_MAX/INT_MIN. */
8351 if (TREE_OVERFLOW (new_const)
8352 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
8353 {
8354 int const1_sgn = tree_int_cst_sgn (const1);
8355 enum tree_code code2 = code;
8356
8357 /* Get the sign of the constant on the lhs if the
8358 operation were VARIABLE + CONST1. */
8359 if (TREE_CODE (arg0) == MINUS_EXPR)
8360 const1_sgn = -const1_sgn;
8361
8362 /* The sign of the constant determines if we overflowed
8363 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8364 Canonicalize to the INT_MIN overflow by swapping the comparison
8365 if necessary. */
8366 if (const1_sgn == -1)
8367 code2 = swap_tree_comparison (code);
8368
8369 /* We now can look at the canonicalized case
8370 VARIABLE + 1 CODE2 INT_MIN
8371 and decide on the result. */
8372 switch (code2)
8373 {
8374 case EQ_EXPR:
8375 case LT_EXPR:
8376 case LE_EXPR:
8377 return
8378 omit_one_operand_loc (loc, type, boolean_false_node, variable);
8379
8380 case NE_EXPR:
8381 case GE_EXPR:
8382 case GT_EXPR:
8383 return
8384 omit_one_operand_loc (loc, type, boolean_true_node, variable);
8385
8386 default:
8387 gcc_unreachable ();
8388 }
8389 }
8390 else
8391 {
8392 if (!equality_code)
8393 fold_overflow_warning ("assuming signed overflow does not occur "
8394 "when changing X +- C1 cmp C2 to "
8395 "X cmp C2 -+ C1",
8396 WARN_STRICT_OVERFLOW_COMPARISON);
8397 return fold_build2_loc (loc, code, type, variable, new_const);
8398 }
8399 }
8400
8401 /* For comparisons of pointers we can decompose it to a compile time
8402 comparison of the base objects and the offsets into the object.
8403 This requires at least one operand being an ADDR_EXPR or a
8404 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8405 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8406 && (TREE_CODE (arg0) == ADDR_EXPR
8407 || TREE_CODE (arg1) == ADDR_EXPR
8408 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8409 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8410 {
8411 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8412 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8413 machine_mode mode;
8414 int volatilep, reversep, unsignedp;
8415 bool indirect_base0 = false, indirect_base1 = false;
8416
8417 /* Get base and offset for the access. Strip ADDR_EXPR for
8418 get_inner_reference, but put it back by stripping INDIRECT_REF
8419 off the base object if possible. indirect_baseN will be true
8420 if baseN is not an address but refers to the object itself. */
8421 base0 = arg0;
8422 if (TREE_CODE (arg0) == ADDR_EXPR)
8423 {
8424 base0
8425 = get_inner_reference (TREE_OPERAND (arg0, 0),
8426 &bitsize, &bitpos0, &offset0, &mode,
8427 &unsignedp, &reversep, &volatilep, false);
8428 if (TREE_CODE (base0) == INDIRECT_REF)
8429 base0 = TREE_OPERAND (base0, 0);
8430 else
8431 indirect_base0 = true;
8432 }
8433 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8434 {
8435 base0 = TREE_OPERAND (arg0, 0);
8436 STRIP_SIGN_NOPS (base0);
8437 if (TREE_CODE (base0) == ADDR_EXPR)
8438 {
8439 base0 = TREE_OPERAND (base0, 0);
8440 indirect_base0 = true;
8441 }
8442 offset0 = TREE_OPERAND (arg0, 1);
8443 if (tree_fits_shwi_p (offset0))
8444 {
8445 HOST_WIDE_INT off = size_low_cst (offset0);
8446 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8447 * BITS_PER_UNIT)
8448 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8449 {
8450 bitpos0 = off * BITS_PER_UNIT;
8451 offset0 = NULL_TREE;
8452 }
8453 }
8454 }
8455
8456 base1 = arg1;
8457 if (TREE_CODE (arg1) == ADDR_EXPR)
8458 {
8459 base1
8460 = get_inner_reference (TREE_OPERAND (arg1, 0),
8461 &bitsize, &bitpos1, &offset1, &mode,
8462 &unsignedp, &reversep, &volatilep, false);
8463 if (TREE_CODE (base1) == INDIRECT_REF)
8464 base1 = TREE_OPERAND (base1, 0);
8465 else
8466 indirect_base1 = true;
8467 }
8468 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8469 {
8470 base1 = TREE_OPERAND (arg1, 0);
8471 STRIP_SIGN_NOPS (base1);
8472 if (TREE_CODE (base1) == ADDR_EXPR)
8473 {
8474 base1 = TREE_OPERAND (base1, 0);
8475 indirect_base1 = true;
8476 }
8477 offset1 = TREE_OPERAND (arg1, 1);
8478 if (tree_fits_shwi_p (offset1))
8479 {
8480 HOST_WIDE_INT off = size_low_cst (offset1);
8481 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8482 * BITS_PER_UNIT)
8483 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8484 {
8485 bitpos1 = off * BITS_PER_UNIT;
8486 offset1 = NULL_TREE;
8487 }
8488 }
8489 }
8490
8491 /* If we have equivalent bases we might be able to simplify. */
8492 if (indirect_base0 == indirect_base1
8493 && operand_equal_p (base0, base1,
8494 indirect_base0 ? OEP_ADDRESS_OF : 0))
8495 {
8496 /* We can fold this expression to a constant if the non-constant
8497 offset parts are equal. */
8498 if ((offset0 == offset1
8499 || (offset0 && offset1
8500 && operand_equal_p (offset0, offset1, 0)))
8501 && (code == EQ_EXPR
8502 || code == NE_EXPR
8503 || (indirect_base0 && DECL_P (base0))
8504 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8505
8506 {
8507 if (!equality_code
8508 && bitpos0 != bitpos1
8509 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8510 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8511 fold_overflow_warning (("assuming pointer wraparound does not "
8512 "occur when comparing P +- C1 with "
8513 "P +- C2"),
8514 WARN_STRICT_OVERFLOW_CONDITIONAL);
8515
8516 switch (code)
8517 {
8518 case EQ_EXPR:
8519 return constant_boolean_node (bitpos0 == bitpos1, type);
8520 case NE_EXPR:
8521 return constant_boolean_node (bitpos0 != bitpos1, type);
8522 case LT_EXPR:
8523 return constant_boolean_node (bitpos0 < bitpos1, type);
8524 case LE_EXPR:
8525 return constant_boolean_node (bitpos0 <= bitpos1, type);
8526 case GE_EXPR:
8527 return constant_boolean_node (bitpos0 >= bitpos1, type);
8528 case GT_EXPR:
8529 return constant_boolean_node (bitpos0 > bitpos1, type);
8530 default:;
8531 }
8532 }
8533 /* We can simplify the comparison to a comparison of the variable
8534 offset parts if the constant offset parts are equal.
8535 Be careful to use signed sizetype here because otherwise we
8536 mess with array offsets in the wrong way. This is possible
8537 because pointer arithmetic is restricted to retain within an
8538 object and overflow on pointer differences is undefined as of
8539 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8540 else if (bitpos0 == bitpos1
8541 && (equality_code
8542 || (indirect_base0 && DECL_P (base0))
8543 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8544 {
8545 /* By converting to signed sizetype we cover middle-end pointer
8546 arithmetic which operates on unsigned pointer types of size
8547 type size and ARRAY_REF offsets which are properly sign or
8548 zero extended from their type in case it is narrower than
8549 sizetype. */
8550 if (offset0 == NULL_TREE)
8551 offset0 = build_int_cst (ssizetype, 0);
8552 else
8553 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8554 if (offset1 == NULL_TREE)
8555 offset1 = build_int_cst (ssizetype, 0);
8556 else
8557 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8558
8559 if (!equality_code
8560 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8561 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8562 fold_overflow_warning (("assuming pointer wraparound does not "
8563 "occur when comparing P +- C1 with "
8564 "P +- C2"),
8565 WARN_STRICT_OVERFLOW_COMPARISON);
8566
8567 return fold_build2_loc (loc, code, type, offset0, offset1);
8568 }
8569 }
8570 /* For equal offsets we can simplify to a comparison of the
8571 base addresses. */
8572 else if (bitpos0 == bitpos1
8573 && (indirect_base0
8574 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8575 && (indirect_base1
8576 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8577 && ((offset0 == offset1)
8578 || (offset0 && offset1
8579 && operand_equal_p (offset0, offset1, 0))))
8580 {
8581 if (indirect_base0)
8582 base0 = build_fold_addr_expr_loc (loc, base0);
8583 if (indirect_base1)
8584 base1 = build_fold_addr_expr_loc (loc, base1);
8585 return fold_build2_loc (loc, code, type, base0, base1);
8586 }
8587 }
8588
8589 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8590 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8591 the resulting offset is smaller in absolute value than the
8592 original one and has the same sign. */
8593 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8594 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8595 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8596 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8597 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8598 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8599 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8600 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8601 {
8602 tree const1 = TREE_OPERAND (arg0, 1);
8603 tree const2 = TREE_OPERAND (arg1, 1);
8604 tree variable1 = TREE_OPERAND (arg0, 0);
8605 tree variable2 = TREE_OPERAND (arg1, 0);
8606 tree cst;
8607 const char * const warnmsg = G_("assuming signed overflow does not "
8608 "occur when combining constants around "
8609 "a comparison");
8610
8611 /* Put the constant on the side where it doesn't overflow and is
8612 of lower absolute value and of same sign than before. */
8613 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8614 ? MINUS_EXPR : PLUS_EXPR,
8615 const2, const1);
8616 if (!TREE_OVERFLOW (cst)
8617 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
8618 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
8619 {
8620 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8621 return fold_build2_loc (loc, code, type,
8622 variable1,
8623 fold_build2_loc (loc, TREE_CODE (arg1),
8624 TREE_TYPE (arg1),
8625 variable2, cst));
8626 }
8627
8628 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8629 ? MINUS_EXPR : PLUS_EXPR,
8630 const1, const2);
8631 if (!TREE_OVERFLOW (cst)
8632 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
8633 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
8634 {
8635 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8636 return fold_build2_loc (loc, code, type,
8637 fold_build2_loc (loc, TREE_CODE (arg0),
8638 TREE_TYPE (arg0),
8639 variable1, cst),
8640 variable2);
8641 }
8642 }
8643
8644 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
8645 if (tem)
8646 return tem;
8647
8648 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8649 constant, we can simplify it. */
8650 if (TREE_CODE (arg1) == INTEGER_CST
8651 && (TREE_CODE (arg0) == MIN_EXPR
8652 || TREE_CODE (arg0) == MAX_EXPR)
8653 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8654 {
8655 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
8656 if (tem)
8657 return tem;
8658 }
8659
8660 /* If we are comparing an expression that just has comparisons
8661 of two integer values, arithmetic expressions of those comparisons,
8662 and constants, we can simplify it. There are only three cases
8663 to check: the two values can either be equal, the first can be
8664 greater, or the second can be greater. Fold the expression for
8665 those three values. Since each value must be 0 or 1, we have
8666 eight possibilities, each of which corresponds to the constant 0
8667 or 1 or one of the six possible comparisons.
8668
8669 This handles common cases like (a > b) == 0 but also handles
8670 expressions like ((x > y) - (y > x)) > 0, which supposedly
8671 occur in macroized code. */
8672
8673 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8674 {
8675 tree cval1 = 0, cval2 = 0;
8676 int save_p = 0;
8677
8678 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8679 /* Don't handle degenerate cases here; they should already
8680 have been handled anyway. */
8681 && cval1 != 0 && cval2 != 0
8682 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8683 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8684 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8685 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8686 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8687 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8688 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8689 {
8690 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8691 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8692
8693 /* We can't just pass T to eval_subst in case cval1 or cval2
8694 was the same as ARG1. */
8695
8696 tree high_result
8697 = fold_build2_loc (loc, code, type,
8698 eval_subst (loc, arg0, cval1, maxval,
8699 cval2, minval),
8700 arg1);
8701 tree equal_result
8702 = fold_build2_loc (loc, code, type,
8703 eval_subst (loc, arg0, cval1, maxval,
8704 cval2, maxval),
8705 arg1);
8706 tree low_result
8707 = fold_build2_loc (loc, code, type,
8708 eval_subst (loc, arg0, cval1, minval,
8709 cval2, maxval),
8710 arg1);
8711
8712 /* All three of these results should be 0 or 1. Confirm they are.
8713 Then use those values to select the proper code to use. */
8714
8715 if (TREE_CODE (high_result) == INTEGER_CST
8716 && TREE_CODE (equal_result) == INTEGER_CST
8717 && TREE_CODE (low_result) == INTEGER_CST)
8718 {
8719 /* Make a 3-bit mask with the high-order bit being the
8720 value for `>', the next for '=', and the low for '<'. */
8721 switch ((integer_onep (high_result) * 4)
8722 + (integer_onep (equal_result) * 2)
8723 + integer_onep (low_result))
8724 {
8725 case 0:
8726 /* Always false. */
8727 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
8728 case 1:
8729 code = LT_EXPR;
8730 break;
8731 case 2:
8732 code = EQ_EXPR;
8733 break;
8734 case 3:
8735 code = LE_EXPR;
8736 break;
8737 case 4:
8738 code = GT_EXPR;
8739 break;
8740 case 5:
8741 code = NE_EXPR;
8742 break;
8743 case 6:
8744 code = GE_EXPR;
8745 break;
8746 case 7:
8747 /* Always true. */
8748 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
8749 }
8750
8751 if (save_p)
8752 {
8753 tem = save_expr (build2 (code, type, cval1, cval2));
8754 SET_EXPR_LOCATION (tem, loc);
8755 return tem;
8756 }
8757 return fold_build2_loc (loc, code, type, cval1, cval2);
8758 }
8759 }
8760 }
8761
8762 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8763 into a single range test. */
8764 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
8765 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
8766 && TREE_CODE (arg1) == INTEGER_CST
8767 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8768 && !integer_zerop (TREE_OPERAND (arg0, 1))
8769 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8770 && !TREE_OVERFLOW (arg1))
8771 {
8772 tem = fold_div_compare (loc, code, type, arg0, arg1);
8773 if (tem != NULL_TREE)
8774 return tem;
8775 }
8776
8777 return NULL_TREE;
8778 }
8779
8780
8781 /* Subroutine of fold_binary. Optimize complex multiplications of the
8782 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8783 argument EXPR represents the expression "z" of type TYPE. */
8784
8785 static tree
8786 fold_mult_zconjz (location_t loc, tree type, tree expr)
8787 {
8788 tree itype = TREE_TYPE (type);
8789 tree rpart, ipart, tem;
8790
8791 if (TREE_CODE (expr) == COMPLEX_EXPR)
8792 {
8793 rpart = TREE_OPERAND (expr, 0);
8794 ipart = TREE_OPERAND (expr, 1);
8795 }
8796 else if (TREE_CODE (expr) == COMPLEX_CST)
8797 {
8798 rpart = TREE_REALPART (expr);
8799 ipart = TREE_IMAGPART (expr);
8800 }
8801 else
8802 {
8803 expr = save_expr (expr);
8804 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
8805 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
8806 }
8807
8808 rpart = save_expr (rpart);
8809 ipart = save_expr (ipart);
8810 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
8811 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
8812 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
8813 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
8814 build_zero_cst (itype));
8815 }
8816
8817
8818 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
8819 CONSTRUCTOR ARG into array ELTS and return true if successful. */
8820
8821 static bool
8822 vec_cst_ctor_to_array (tree arg, tree *elts)
8823 {
8824 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
8825
8826 if (TREE_CODE (arg) == VECTOR_CST)
8827 {
8828 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
8829 elts[i] = VECTOR_CST_ELT (arg, i);
8830 }
8831 else if (TREE_CODE (arg) == CONSTRUCTOR)
8832 {
8833 constructor_elt *elt;
8834
8835 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
8836 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
8837 return false;
8838 else
8839 elts[i] = elt->value;
8840 }
8841 else
8842 return false;
8843 for (; i < nelts; i++)
8844 elts[i]
8845 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
8846 return true;
8847 }
8848
8849 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
8850 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
8851 NULL_TREE otherwise. */
8852
8853 static tree
8854 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
8855 {
8856 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8857 tree *elts;
8858 bool need_ctor = false;
8859
8860 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
8861 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
8862 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
8863 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
8864 return NULL_TREE;
8865
8866 elts = XALLOCAVEC (tree, nelts * 3);
8867 if (!vec_cst_ctor_to_array (arg0, elts)
8868 || !vec_cst_ctor_to_array (arg1, elts + nelts))
8869 return NULL_TREE;
8870
8871 for (i = 0; i < nelts; i++)
8872 {
8873 if (!CONSTANT_CLASS_P (elts[sel[i]]))
8874 need_ctor = true;
8875 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
8876 }
8877
8878 if (need_ctor)
8879 {
8880 vec<constructor_elt, va_gc> *v;
8881 vec_alloc (v, nelts);
8882 for (i = 0; i < nelts; i++)
8883 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
8884 return build_constructor (type, v);
8885 }
8886 else
8887 return build_vector (type, &elts[2 * nelts]);
8888 }
8889
8890 /* Try to fold a pointer difference of type TYPE two address expressions of
8891 array references AREF0 and AREF1 using location LOC. Return a
8892 simplified expression for the difference or NULL_TREE. */
8893
8894 static tree
8895 fold_addr_of_array_ref_difference (location_t loc, tree type,
8896 tree aref0, tree aref1)
8897 {
8898 tree base0 = TREE_OPERAND (aref0, 0);
8899 tree base1 = TREE_OPERAND (aref1, 0);
8900 tree base_offset = build_int_cst (type, 0);
8901
8902 /* If the bases are array references as well, recurse. If the bases
8903 are pointer indirections compute the difference of the pointers.
8904 If the bases are equal, we are set. */
8905 if ((TREE_CODE (base0) == ARRAY_REF
8906 && TREE_CODE (base1) == ARRAY_REF
8907 && (base_offset
8908 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
8909 || (INDIRECT_REF_P (base0)
8910 && INDIRECT_REF_P (base1)
8911 && (base_offset
8912 = fold_binary_loc (loc, MINUS_EXPR, type,
8913 fold_convert (type, TREE_OPERAND (base0, 0)),
8914 fold_convert (type,
8915 TREE_OPERAND (base1, 0)))))
8916 || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
8917 {
8918 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
8919 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
8920 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
8921 tree diff = build2 (MINUS_EXPR, type, op0, op1);
8922 return fold_build2_loc (loc, PLUS_EXPR, type,
8923 base_offset,
8924 fold_build2_loc (loc, MULT_EXPR, type,
8925 diff, esz));
8926 }
8927 return NULL_TREE;
8928 }
8929
8930 /* If the real or vector real constant CST of type TYPE has an exact
8931 inverse, return it, else return NULL. */
8932
8933 tree
8934 exact_inverse (tree type, tree cst)
8935 {
8936 REAL_VALUE_TYPE r;
8937 tree unit_type, *elts;
8938 machine_mode mode;
8939 unsigned vec_nelts, i;
8940
8941 switch (TREE_CODE (cst))
8942 {
8943 case REAL_CST:
8944 r = TREE_REAL_CST (cst);
8945
8946 if (exact_real_inverse (TYPE_MODE (type), &r))
8947 return build_real (type, r);
8948
8949 return NULL_TREE;
8950
8951 case VECTOR_CST:
8952 vec_nelts = VECTOR_CST_NELTS (cst);
8953 elts = XALLOCAVEC (tree, vec_nelts);
8954 unit_type = TREE_TYPE (type);
8955 mode = TYPE_MODE (unit_type);
8956
8957 for (i = 0; i < vec_nelts; i++)
8958 {
8959 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
8960 if (!exact_real_inverse (mode, &r))
8961 return NULL_TREE;
8962 elts[i] = build_real (unit_type, r);
8963 }
8964
8965 return build_vector (type, elts);
8966
8967 default:
8968 return NULL_TREE;
8969 }
8970 }
8971
8972 /* Mask out the tz least significant bits of X of type TYPE where
8973 tz is the number of trailing zeroes in Y. */
8974 static wide_int
8975 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
8976 {
8977 int tz = wi::ctz (y);
8978 if (tz > 0)
8979 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
8980 return x;
8981 }
8982
8983 /* Return true when T is an address and is known to be nonzero.
8984 For floating point we further ensure that T is not denormal.
8985 Similar logic is present in nonzero_address in rtlanal.h.
8986
8987 If the return value is based on the assumption that signed overflow
8988 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
8989 change *STRICT_OVERFLOW_P. */
8990
8991 static bool
8992 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
8993 {
8994 tree type = TREE_TYPE (t);
8995 enum tree_code code;
8996
8997 /* Doing something useful for floating point would need more work. */
8998 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
8999 return false;
9000
9001 code = TREE_CODE (t);
9002 switch (TREE_CODE_CLASS (code))
9003 {
9004 case tcc_unary:
9005 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9006 strict_overflow_p);
9007 case tcc_binary:
9008 case tcc_comparison:
9009 return tree_binary_nonzero_warnv_p (code, type,
9010 TREE_OPERAND (t, 0),
9011 TREE_OPERAND (t, 1),
9012 strict_overflow_p);
9013 case tcc_constant:
9014 case tcc_declaration:
9015 case tcc_reference:
9016 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9017
9018 default:
9019 break;
9020 }
9021
9022 switch (code)
9023 {
9024 case TRUTH_NOT_EXPR:
9025 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9026 strict_overflow_p);
9027
9028 case TRUTH_AND_EXPR:
9029 case TRUTH_OR_EXPR:
9030 case TRUTH_XOR_EXPR:
9031 return tree_binary_nonzero_warnv_p (code, type,
9032 TREE_OPERAND (t, 0),
9033 TREE_OPERAND (t, 1),
9034 strict_overflow_p);
9035
9036 case COND_EXPR:
9037 case CONSTRUCTOR:
9038 case OBJ_TYPE_REF:
9039 case ASSERT_EXPR:
9040 case ADDR_EXPR:
9041 case WITH_SIZE_EXPR:
9042 case SSA_NAME:
9043 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9044
9045 case COMPOUND_EXPR:
9046 case MODIFY_EXPR:
9047 case BIND_EXPR:
9048 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
9049 strict_overflow_p);
9050
9051 case SAVE_EXPR:
9052 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
9053 strict_overflow_p);
9054
9055 case CALL_EXPR:
9056 {
9057 tree fndecl = get_callee_fndecl (t);
9058 if (!fndecl) return false;
9059 if (flag_delete_null_pointer_checks && !flag_check_new
9060 && DECL_IS_OPERATOR_NEW (fndecl)
9061 && !TREE_NOTHROW (fndecl))
9062 return true;
9063 if (flag_delete_null_pointer_checks
9064 && lookup_attribute ("returns_nonnull",
9065 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9066 return true;
9067 return alloca_call_p (t);
9068 }
9069
9070 default:
9071 break;
9072 }
9073 return false;
9074 }
9075
9076 /* Return true when T is an address and is known to be nonzero.
9077 Handle warnings about undefined signed overflow. */
9078
9079 static bool
9080 tree_expr_nonzero_p (tree t)
9081 {
9082 bool ret, strict_overflow_p;
9083
9084 strict_overflow_p = false;
9085 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9086 if (strict_overflow_p)
9087 fold_overflow_warning (("assuming signed overflow does not occur when "
9088 "determining that expression is always "
9089 "non-zero"),
9090 WARN_STRICT_OVERFLOW_MISC);
9091 return ret;
9092 }
9093
9094 /* Fold a binary expression of code CODE and type TYPE with operands
9095 OP0 and OP1. LOC is the location of the resulting expression.
9096 Return the folded expression if folding is successful. Otherwise,
9097 return NULL_TREE. */
9098
9099 tree
9100 fold_binary_loc (location_t loc,
9101 enum tree_code code, tree type, tree op0, tree op1)
9102 {
9103 enum tree_code_class kind = TREE_CODE_CLASS (code);
9104 tree arg0, arg1, tem;
9105 tree t1 = NULL_TREE;
9106 bool strict_overflow_p;
9107 unsigned int prec;
9108
9109 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9110 && TREE_CODE_LENGTH (code) == 2
9111 && op0 != NULL_TREE
9112 && op1 != NULL_TREE);
9113
9114 arg0 = op0;
9115 arg1 = op1;
9116
9117 /* Strip any conversions that don't change the mode. This is
9118 safe for every expression, except for a comparison expression
9119 because its signedness is derived from its operands. So, in
9120 the latter case, only strip conversions that don't change the
9121 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9122 preserved.
9123
9124 Note that this is done as an internal manipulation within the
9125 constant folder, in order to find the simplest representation
9126 of the arguments so that their form can be studied. In any
9127 cases, the appropriate type conversions should be put back in
9128 the tree that will get out of the constant folder. */
9129
9130 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9131 {
9132 STRIP_SIGN_NOPS (arg0);
9133 STRIP_SIGN_NOPS (arg1);
9134 }
9135 else
9136 {
9137 STRIP_NOPS (arg0);
9138 STRIP_NOPS (arg1);
9139 }
9140
9141 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9142 constant but we can't do arithmetic on them. */
9143 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
9144 {
9145 tem = const_binop (code, type, arg0, arg1);
9146 if (tem != NULL_TREE)
9147 {
9148 if (TREE_TYPE (tem) != type)
9149 tem = fold_convert_loc (loc, type, tem);
9150 return tem;
9151 }
9152 }
9153
9154 /* If this is a commutative operation, and ARG0 is a constant, move it
9155 to ARG1 to reduce the number of tests below. */
9156 if (commutative_tree_code (code)
9157 && tree_swap_operands_p (arg0, arg1, true))
9158 return fold_build2_loc (loc, code, type, op1, op0);
9159
9160 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9161 to ARG1 to reduce the number of tests below. */
9162 if (kind == tcc_comparison
9163 && tree_swap_operands_p (arg0, arg1, true))
9164 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9165
9166 tem = generic_simplify (loc, code, type, op0, op1);
9167 if (tem)
9168 return tem;
9169
9170 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9171
9172 First check for cases where an arithmetic operation is applied to a
9173 compound, conditional, or comparison operation. Push the arithmetic
9174 operation inside the compound or conditional to see if any folding
9175 can then be done. Convert comparison to conditional for this purpose.
9176 The also optimizes non-constant cases that used to be done in
9177 expand_expr.
9178
9179 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9180 one of the operands is a comparison and the other is a comparison, a
9181 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9182 code below would make the expression more complex. Change it to a
9183 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9184 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9185
9186 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9187 || code == EQ_EXPR || code == NE_EXPR)
9188 && TREE_CODE (type) != VECTOR_TYPE
9189 && ((truth_value_p (TREE_CODE (arg0))
9190 && (truth_value_p (TREE_CODE (arg1))
9191 || (TREE_CODE (arg1) == BIT_AND_EXPR
9192 && integer_onep (TREE_OPERAND (arg1, 1)))))
9193 || (truth_value_p (TREE_CODE (arg1))
9194 && (truth_value_p (TREE_CODE (arg0))
9195 || (TREE_CODE (arg0) == BIT_AND_EXPR
9196 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9197 {
9198 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9199 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9200 : TRUTH_XOR_EXPR,
9201 boolean_type_node,
9202 fold_convert_loc (loc, boolean_type_node, arg0),
9203 fold_convert_loc (loc, boolean_type_node, arg1));
9204
9205 if (code == EQ_EXPR)
9206 tem = invert_truthvalue_loc (loc, tem);
9207
9208 return fold_convert_loc (loc, type, tem);
9209 }
9210
9211 if (TREE_CODE_CLASS (code) == tcc_binary
9212 || TREE_CODE_CLASS (code) == tcc_comparison)
9213 {
9214 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9215 {
9216 tem = fold_build2_loc (loc, code, type,
9217 fold_convert_loc (loc, TREE_TYPE (op0),
9218 TREE_OPERAND (arg0, 1)), op1);
9219 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9220 tem);
9221 }
9222 if (TREE_CODE (arg1) == COMPOUND_EXPR
9223 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9224 {
9225 tem = fold_build2_loc (loc, code, type, op0,
9226 fold_convert_loc (loc, TREE_TYPE (op1),
9227 TREE_OPERAND (arg1, 1)));
9228 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9229 tem);
9230 }
9231
9232 if (TREE_CODE (arg0) == COND_EXPR
9233 || TREE_CODE (arg0) == VEC_COND_EXPR
9234 || COMPARISON_CLASS_P (arg0))
9235 {
9236 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9237 arg0, arg1,
9238 /*cond_first_p=*/1);
9239 if (tem != NULL_TREE)
9240 return tem;
9241 }
9242
9243 if (TREE_CODE (arg1) == COND_EXPR
9244 || TREE_CODE (arg1) == VEC_COND_EXPR
9245 || COMPARISON_CLASS_P (arg1))
9246 {
9247 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9248 arg1, arg0,
9249 /*cond_first_p=*/0);
9250 if (tem != NULL_TREE)
9251 return tem;
9252 }
9253 }
9254
9255 switch (code)
9256 {
9257 case MEM_REF:
9258 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9259 if (TREE_CODE (arg0) == ADDR_EXPR
9260 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9261 {
9262 tree iref = TREE_OPERAND (arg0, 0);
9263 return fold_build2 (MEM_REF, type,
9264 TREE_OPERAND (iref, 0),
9265 int_const_binop (PLUS_EXPR, arg1,
9266 TREE_OPERAND (iref, 1)));
9267 }
9268
9269 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9270 if (TREE_CODE (arg0) == ADDR_EXPR
9271 && handled_component_p (TREE_OPERAND (arg0, 0)))
9272 {
9273 tree base;
9274 HOST_WIDE_INT coffset;
9275 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9276 &coffset);
9277 if (!base)
9278 return NULL_TREE;
9279 return fold_build2 (MEM_REF, type,
9280 build_fold_addr_expr (base),
9281 int_const_binop (PLUS_EXPR, arg1,
9282 size_int (coffset)));
9283 }
9284
9285 return NULL_TREE;
9286
9287 case POINTER_PLUS_EXPR:
9288 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9289 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9290 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9291 return fold_convert_loc (loc, type,
9292 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9293 fold_convert_loc (loc, sizetype,
9294 arg1),
9295 fold_convert_loc (loc, sizetype,
9296 arg0)));
9297
9298 return NULL_TREE;
9299
9300 case PLUS_EXPR:
9301 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
9302 {
9303 /* X + (X / CST) * -CST is X % CST. */
9304 if (TREE_CODE (arg1) == MULT_EXPR
9305 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9306 && operand_equal_p (arg0,
9307 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9308 {
9309 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9310 tree cst1 = TREE_OPERAND (arg1, 1);
9311 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9312 cst1, cst0);
9313 if (sum && integer_zerop (sum))
9314 return fold_convert_loc (loc, type,
9315 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9316 TREE_TYPE (arg0), arg0,
9317 cst0));
9318 }
9319 }
9320
9321 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
9322 one. Make sure the type is not saturating and has the signedness of
9323 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9324 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9325 if ((TREE_CODE (arg0) == MULT_EXPR
9326 || TREE_CODE (arg1) == MULT_EXPR)
9327 && !TYPE_SATURATING (type)
9328 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9329 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9330 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9331 {
9332 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9333 if (tem)
9334 return tem;
9335 }
9336
9337 if (! FLOAT_TYPE_P (type))
9338 {
9339 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9340 (plus (plus (mult) (mult)) (foo)) so that we can
9341 take advantage of the factoring cases below. */
9342 if (ANY_INTEGRAL_TYPE_P (type)
9343 && TYPE_OVERFLOW_WRAPS (type)
9344 && (((TREE_CODE (arg0) == PLUS_EXPR
9345 || TREE_CODE (arg0) == MINUS_EXPR)
9346 && TREE_CODE (arg1) == MULT_EXPR)
9347 || ((TREE_CODE (arg1) == PLUS_EXPR
9348 || TREE_CODE (arg1) == MINUS_EXPR)
9349 && TREE_CODE (arg0) == MULT_EXPR)))
9350 {
9351 tree parg0, parg1, parg, marg;
9352 enum tree_code pcode;
9353
9354 if (TREE_CODE (arg1) == MULT_EXPR)
9355 parg = arg0, marg = arg1;
9356 else
9357 parg = arg1, marg = arg0;
9358 pcode = TREE_CODE (parg);
9359 parg0 = TREE_OPERAND (parg, 0);
9360 parg1 = TREE_OPERAND (parg, 1);
9361 STRIP_NOPS (parg0);
9362 STRIP_NOPS (parg1);
9363
9364 if (TREE_CODE (parg0) == MULT_EXPR
9365 && TREE_CODE (parg1) != MULT_EXPR)
9366 return fold_build2_loc (loc, pcode, type,
9367 fold_build2_loc (loc, PLUS_EXPR, type,
9368 fold_convert_loc (loc, type,
9369 parg0),
9370 fold_convert_loc (loc, type,
9371 marg)),
9372 fold_convert_loc (loc, type, parg1));
9373 if (TREE_CODE (parg0) != MULT_EXPR
9374 && TREE_CODE (parg1) == MULT_EXPR)
9375 return
9376 fold_build2_loc (loc, PLUS_EXPR, type,
9377 fold_convert_loc (loc, type, parg0),
9378 fold_build2_loc (loc, pcode, type,
9379 fold_convert_loc (loc, type, marg),
9380 fold_convert_loc (loc, type,
9381 parg1)));
9382 }
9383 }
9384 else
9385 {
9386 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9387 to __complex__ ( x, y ). This is not the same for SNaNs or
9388 if signed zeros are involved. */
9389 if (!HONOR_SNANS (element_mode (arg0))
9390 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9391 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9392 {
9393 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9394 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9395 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9396 bool arg0rz = false, arg0iz = false;
9397 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9398 || (arg0i && (arg0iz = real_zerop (arg0i))))
9399 {
9400 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9401 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9402 if (arg0rz && arg1i && real_zerop (arg1i))
9403 {
9404 tree rp = arg1r ? arg1r
9405 : build1 (REALPART_EXPR, rtype, arg1);
9406 tree ip = arg0i ? arg0i
9407 : build1 (IMAGPART_EXPR, rtype, arg0);
9408 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9409 }
9410 else if (arg0iz && arg1r && real_zerop (arg1r))
9411 {
9412 tree rp = arg0r ? arg0r
9413 : build1 (REALPART_EXPR, rtype, arg0);
9414 tree ip = arg1i ? arg1i
9415 : build1 (IMAGPART_EXPR, rtype, arg1);
9416 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9417 }
9418 }
9419 }
9420
9421 if (flag_unsafe_math_optimizations
9422 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9423 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9424 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9425 return tem;
9426
9427 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9428 We associate floats only if the user has specified
9429 -fassociative-math. */
9430 if (flag_associative_math
9431 && TREE_CODE (arg1) == PLUS_EXPR
9432 && TREE_CODE (arg0) != MULT_EXPR)
9433 {
9434 tree tree10 = TREE_OPERAND (arg1, 0);
9435 tree tree11 = TREE_OPERAND (arg1, 1);
9436 if (TREE_CODE (tree11) == MULT_EXPR
9437 && TREE_CODE (tree10) == MULT_EXPR)
9438 {
9439 tree tree0;
9440 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
9441 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
9442 }
9443 }
9444 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9445 We associate floats only if the user has specified
9446 -fassociative-math. */
9447 if (flag_associative_math
9448 && TREE_CODE (arg0) == PLUS_EXPR
9449 && TREE_CODE (arg1) != MULT_EXPR)
9450 {
9451 tree tree00 = TREE_OPERAND (arg0, 0);
9452 tree tree01 = TREE_OPERAND (arg0, 1);
9453 if (TREE_CODE (tree01) == MULT_EXPR
9454 && TREE_CODE (tree00) == MULT_EXPR)
9455 {
9456 tree tree0;
9457 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
9458 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
9459 }
9460 }
9461 }
9462
9463 bit_rotate:
9464 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9465 is a rotate of A by C1 bits. */
9466 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9467 is a rotate of A by B bits. */
9468 {
9469 enum tree_code code0, code1;
9470 tree rtype;
9471 code0 = TREE_CODE (arg0);
9472 code1 = TREE_CODE (arg1);
9473 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9474 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9475 && operand_equal_p (TREE_OPERAND (arg0, 0),
9476 TREE_OPERAND (arg1, 0), 0)
9477 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9478 TYPE_UNSIGNED (rtype))
9479 /* Only create rotates in complete modes. Other cases are not
9480 expanded properly. */
9481 && (element_precision (rtype)
9482 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
9483 {
9484 tree tree01, tree11;
9485 enum tree_code code01, code11;
9486
9487 tree01 = TREE_OPERAND (arg0, 1);
9488 tree11 = TREE_OPERAND (arg1, 1);
9489 STRIP_NOPS (tree01);
9490 STRIP_NOPS (tree11);
9491 code01 = TREE_CODE (tree01);
9492 code11 = TREE_CODE (tree11);
9493 if (code01 == INTEGER_CST
9494 && code11 == INTEGER_CST
9495 && (wi::to_widest (tree01) + wi::to_widest (tree11)
9496 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9497 {
9498 tem = build2_loc (loc, LROTATE_EXPR,
9499 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9500 TREE_OPERAND (arg0, 0),
9501 code0 == LSHIFT_EXPR
9502 ? TREE_OPERAND (arg0, 1)
9503 : TREE_OPERAND (arg1, 1));
9504 return fold_convert_loc (loc, type, tem);
9505 }
9506 else if (code11 == MINUS_EXPR)
9507 {
9508 tree tree110, tree111;
9509 tree110 = TREE_OPERAND (tree11, 0);
9510 tree111 = TREE_OPERAND (tree11, 1);
9511 STRIP_NOPS (tree110);
9512 STRIP_NOPS (tree111);
9513 if (TREE_CODE (tree110) == INTEGER_CST
9514 && 0 == compare_tree_int (tree110,
9515 element_precision
9516 (TREE_TYPE (TREE_OPERAND
9517 (arg0, 0))))
9518 && operand_equal_p (tree01, tree111, 0))
9519 return
9520 fold_convert_loc (loc, type,
9521 build2 ((code0 == LSHIFT_EXPR
9522 ? LROTATE_EXPR
9523 : RROTATE_EXPR),
9524 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9525 TREE_OPERAND (arg0, 0),
9526 TREE_OPERAND (arg0, 1)));
9527 }
9528 else if (code01 == MINUS_EXPR)
9529 {
9530 tree tree010, tree011;
9531 tree010 = TREE_OPERAND (tree01, 0);
9532 tree011 = TREE_OPERAND (tree01, 1);
9533 STRIP_NOPS (tree010);
9534 STRIP_NOPS (tree011);
9535 if (TREE_CODE (tree010) == INTEGER_CST
9536 && 0 == compare_tree_int (tree010,
9537 element_precision
9538 (TREE_TYPE (TREE_OPERAND
9539 (arg0, 0))))
9540 && operand_equal_p (tree11, tree011, 0))
9541 return fold_convert_loc
9542 (loc, type,
9543 build2 ((code0 != LSHIFT_EXPR
9544 ? LROTATE_EXPR
9545 : RROTATE_EXPR),
9546 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9547 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1)));
9548 }
9549 }
9550 }
9551
9552 associate:
9553 /* In most languages, can't associate operations on floats through
9554 parentheses. Rather than remember where the parentheses were, we
9555 don't associate floats at all, unless the user has specified
9556 -fassociative-math.
9557 And, we need to make sure type is not saturating. */
9558
9559 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9560 && !TYPE_SATURATING (type))
9561 {
9562 tree var0, con0, lit0, minus_lit0;
9563 tree var1, con1, lit1, minus_lit1;
9564 tree atype = type;
9565 bool ok = true;
9566
9567 /* Split both trees into variables, constants, and literals. Then
9568 associate each group together, the constants with literals,
9569 then the result with variables. This increases the chances of
9570 literals being recombined later and of generating relocatable
9571 expressions for the sum of a constant and literal. */
9572 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
9573 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
9574 code == MINUS_EXPR);
9575
9576 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9577 if (code == MINUS_EXPR)
9578 code = PLUS_EXPR;
9579
9580 /* With undefined overflow prefer doing association in a type
9581 which wraps on overflow, if that is one of the operand types. */
9582 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9583 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9584 {
9585 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9586 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
9587 atype = TREE_TYPE (arg0);
9588 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9589 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
9590 atype = TREE_TYPE (arg1);
9591 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
9592 }
9593
9594 /* With undefined overflow we can only associate constants with one
9595 variable, and constants whose association doesn't overflow. */
9596 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9597 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
9598 {
9599 if (var0 && var1)
9600 {
9601 tree tmp0 = var0;
9602 tree tmp1 = var1;
9603 bool one_neg = false;
9604
9605 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9606 {
9607 tmp0 = TREE_OPERAND (tmp0, 0);
9608 one_neg = !one_neg;
9609 }
9610 if (CONVERT_EXPR_P (tmp0)
9611 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9612 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9613 <= TYPE_PRECISION (atype)))
9614 tmp0 = TREE_OPERAND (tmp0, 0);
9615 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9616 {
9617 tmp1 = TREE_OPERAND (tmp1, 0);
9618 one_neg = !one_neg;
9619 }
9620 if (CONVERT_EXPR_P (tmp1)
9621 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9622 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9623 <= TYPE_PRECISION (atype)))
9624 tmp1 = TREE_OPERAND (tmp1, 0);
9625 /* The only case we can still associate with two variables
9626 is if they cancel out. */
9627 if (!one_neg
9628 || !operand_equal_p (tmp0, tmp1, 0))
9629 ok = false;
9630 }
9631 }
9632
9633 /* Only do something if we found more than two objects. Otherwise,
9634 nothing has changed and we risk infinite recursion. */
9635 if (ok
9636 && (2 < ((var0 != 0) + (var1 != 0)
9637 + (con0 != 0) + (con1 != 0)
9638 + (lit0 != 0) + (lit1 != 0)
9639 + (minus_lit0 != 0) + (minus_lit1 != 0))))
9640 {
9641 bool any_overflows = false;
9642 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
9643 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
9644 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
9645 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
9646 var0 = associate_trees (loc, var0, var1, code, atype);
9647 con0 = associate_trees (loc, con0, con1, code, atype);
9648 lit0 = associate_trees (loc, lit0, lit1, code, atype);
9649 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
9650 code, atype);
9651
9652 /* Preserve the MINUS_EXPR if the negative part of the literal is
9653 greater than the positive part. Otherwise, the multiplicative
9654 folding code (i.e extract_muldiv) may be fooled in case
9655 unsigned constants are subtracted, like in the following
9656 example: ((X*2 + 4) - 8U)/2. */
9657 if (minus_lit0 && lit0)
9658 {
9659 if (TREE_CODE (lit0) == INTEGER_CST
9660 && TREE_CODE (minus_lit0) == INTEGER_CST
9661 && tree_int_cst_lt (lit0, minus_lit0))
9662 {
9663 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
9664 MINUS_EXPR, atype);
9665 lit0 = 0;
9666 }
9667 else
9668 {
9669 lit0 = associate_trees (loc, lit0, minus_lit0,
9670 MINUS_EXPR, atype);
9671 minus_lit0 = 0;
9672 }
9673 }
9674
9675 /* Don't introduce overflows through reassociation. */
9676 if (!any_overflows
9677 && ((lit0 && TREE_OVERFLOW_P (lit0))
9678 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0))))
9679 return NULL_TREE;
9680
9681 if (minus_lit0)
9682 {
9683 if (con0 == 0)
9684 return
9685 fold_convert_loc (loc, type,
9686 associate_trees (loc, var0, minus_lit0,
9687 MINUS_EXPR, atype));
9688 else
9689 {
9690 con0 = associate_trees (loc, con0, minus_lit0,
9691 MINUS_EXPR, atype);
9692 return
9693 fold_convert_loc (loc, type,
9694 associate_trees (loc, var0, con0,
9695 PLUS_EXPR, atype));
9696 }
9697 }
9698
9699 con0 = associate_trees (loc, con0, lit0, code, atype);
9700 return
9701 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
9702 code, atype));
9703 }
9704 }
9705
9706 return NULL_TREE;
9707
9708 case MINUS_EXPR:
9709 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9710 if (TREE_CODE (arg0) == NEGATE_EXPR
9711 && negate_expr_p (op1)
9712 && reorder_operands_p (arg0, arg1))
9713 return fold_build2_loc (loc, MINUS_EXPR, type,
9714 negate_expr (op1),
9715 fold_convert_loc (loc, type,
9716 TREE_OPERAND (arg0, 0)));
9717
9718 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
9719 __complex__ ( x, -y ). This is not the same for SNaNs or if
9720 signed zeros are involved. */
9721 if (!HONOR_SNANS (element_mode (arg0))
9722 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9723 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9724 {
9725 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9726 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9727 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9728 bool arg0rz = false, arg0iz = false;
9729 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9730 || (arg0i && (arg0iz = real_zerop (arg0i))))
9731 {
9732 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9733 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9734 if (arg0rz && arg1i && real_zerop (arg1i))
9735 {
9736 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9737 arg1r ? arg1r
9738 : build1 (REALPART_EXPR, rtype, arg1));
9739 tree ip = arg0i ? arg0i
9740 : build1 (IMAGPART_EXPR, rtype, arg0);
9741 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9742 }
9743 else if (arg0iz && arg1r && real_zerop (arg1r))
9744 {
9745 tree rp = arg0r ? arg0r
9746 : build1 (REALPART_EXPR, rtype, arg0);
9747 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9748 arg1i ? arg1i
9749 : build1 (IMAGPART_EXPR, rtype, arg1));
9750 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9751 }
9752 }
9753 }
9754
9755 /* A - B -> A + (-B) if B is easily negatable. */
9756 if (negate_expr_p (op1)
9757 && ! TYPE_OVERFLOW_SANITIZED (type)
9758 && ((FLOAT_TYPE_P (type)
9759 /* Avoid this transformation if B is a positive REAL_CST. */
9760 && (TREE_CODE (op1) != REAL_CST
9761 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1))))
9762 || INTEGRAL_TYPE_P (type)))
9763 return fold_build2_loc (loc, PLUS_EXPR, type,
9764 fold_convert_loc (loc, type, arg0),
9765 negate_expr (op1));
9766
9767 /* Fold &a[i] - &a[j] to i-j. */
9768 if (TREE_CODE (arg0) == ADDR_EXPR
9769 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9770 && TREE_CODE (arg1) == ADDR_EXPR
9771 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9772 {
9773 tree tem = fold_addr_of_array_ref_difference (loc, type,
9774 TREE_OPERAND (arg0, 0),
9775 TREE_OPERAND (arg1, 0));
9776 if (tem)
9777 return tem;
9778 }
9779
9780 if (FLOAT_TYPE_P (type)
9781 && flag_unsafe_math_optimizations
9782 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9783 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9784 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9785 return tem;
9786
9787 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
9788 one. Make sure the type is not saturating and has the signedness of
9789 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9790 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9791 if ((TREE_CODE (arg0) == MULT_EXPR
9792 || TREE_CODE (arg1) == MULT_EXPR)
9793 && !TYPE_SATURATING (type)
9794 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9795 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9796 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9797 {
9798 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9799 if (tem)
9800 return tem;
9801 }
9802
9803 goto associate;
9804
9805 case MULT_EXPR:
9806 if (! FLOAT_TYPE_P (type))
9807 {
9808 /* Transform x * -C into -x * C if x is easily negatable. */
9809 if (TREE_CODE (op1) == INTEGER_CST
9810 && tree_int_cst_sgn (op1) == -1
9811 && negate_expr_p (op0)
9812 && (tem = negate_expr (op1)) != op1
9813 && ! TREE_OVERFLOW (tem))
9814 return fold_build2_loc (loc, MULT_EXPR, type,
9815 fold_convert_loc (loc, type,
9816 negate_expr (op0)), tem);
9817
9818 /* (A + A) * C -> A * 2 * C */
9819 if (TREE_CODE (arg0) == PLUS_EXPR
9820 && TREE_CODE (arg1) == INTEGER_CST
9821 && operand_equal_p (TREE_OPERAND (arg0, 0),
9822 TREE_OPERAND (arg0, 1), 0))
9823 return fold_build2_loc (loc, MULT_EXPR, type,
9824 omit_one_operand_loc (loc, type,
9825 TREE_OPERAND (arg0, 0),
9826 TREE_OPERAND (arg0, 1)),
9827 fold_build2_loc (loc, MULT_EXPR, type,
9828 build_int_cst (type, 2) , arg1));
9829
9830 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
9831 sign-changing only. */
9832 if (TREE_CODE (arg1) == INTEGER_CST
9833 && TREE_CODE (arg0) == EXACT_DIV_EXPR
9834 && operand_equal_p (arg1, TREE_OPERAND (arg0, 1), 0))
9835 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
9836
9837 strict_overflow_p = false;
9838 if (TREE_CODE (arg1) == INTEGER_CST
9839 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
9840 &strict_overflow_p)))
9841 {
9842 if (strict_overflow_p)
9843 fold_overflow_warning (("assuming signed overflow does not "
9844 "occur when simplifying "
9845 "multiplication"),
9846 WARN_STRICT_OVERFLOW_MISC);
9847 return fold_convert_loc (loc, type, tem);
9848 }
9849
9850 /* Optimize z * conj(z) for integer complex numbers. */
9851 if (TREE_CODE (arg0) == CONJ_EXPR
9852 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9853 return fold_mult_zconjz (loc, type, arg1);
9854 if (TREE_CODE (arg1) == CONJ_EXPR
9855 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9856 return fold_mult_zconjz (loc, type, arg0);
9857 }
9858 else
9859 {
9860 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
9861 This is not the same for NaNs or if signed zeros are
9862 involved. */
9863 if (!HONOR_NANS (arg0)
9864 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9865 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9866 && TREE_CODE (arg1) == COMPLEX_CST
9867 && real_zerop (TREE_REALPART (arg1)))
9868 {
9869 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9870 if (real_onep (TREE_IMAGPART (arg1)))
9871 return
9872 fold_build2_loc (loc, COMPLEX_EXPR, type,
9873 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
9874 rtype, arg0)),
9875 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
9876 else if (real_minus_onep (TREE_IMAGPART (arg1)))
9877 return
9878 fold_build2_loc (loc, COMPLEX_EXPR, type,
9879 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
9880 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
9881 rtype, arg0)));
9882 }
9883
9884 /* Optimize z * conj(z) for floating point complex numbers.
9885 Guarded by flag_unsafe_math_optimizations as non-finite
9886 imaginary components don't produce scalar results. */
9887 if (flag_unsafe_math_optimizations
9888 && TREE_CODE (arg0) == CONJ_EXPR
9889 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9890 return fold_mult_zconjz (loc, type, arg1);
9891 if (flag_unsafe_math_optimizations
9892 && TREE_CODE (arg1) == CONJ_EXPR
9893 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9894 return fold_mult_zconjz (loc, type, arg0);
9895
9896 if (flag_unsafe_math_optimizations)
9897 {
9898
9899 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
9900 if (!in_gimple_form
9901 && optimize
9902 && operand_equal_p (arg0, arg1, 0))
9903 {
9904 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
9905
9906 if (powfn)
9907 {
9908 tree arg = build_real (type, dconst2);
9909 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
9910 }
9911 }
9912 }
9913 }
9914 goto associate;
9915
9916 case BIT_IOR_EXPR:
9917 /* Canonicalize (X & C1) | C2. */
9918 if (TREE_CODE (arg0) == BIT_AND_EXPR
9919 && TREE_CODE (arg1) == INTEGER_CST
9920 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9921 {
9922 int width = TYPE_PRECISION (type), w;
9923 wide_int c1 = TREE_OPERAND (arg0, 1);
9924 wide_int c2 = arg1;
9925
9926 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
9927 if ((c1 & c2) == c1)
9928 return omit_one_operand_loc (loc, type, arg1,
9929 TREE_OPERAND (arg0, 0));
9930
9931 wide_int msk = wi::mask (width, false,
9932 TYPE_PRECISION (TREE_TYPE (arg1)));
9933
9934 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
9935 if (msk.and_not (c1 | c2) == 0)
9936 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
9937 TREE_OPERAND (arg0, 0), arg1);
9938
9939 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
9940 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
9941 mode which allows further optimizations. */
9942 c1 &= msk;
9943 c2 &= msk;
9944 wide_int c3 = c1.and_not (c2);
9945 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
9946 {
9947 wide_int mask = wi::mask (w, false,
9948 TYPE_PRECISION (type));
9949 if (((c1 | c2) & mask) == mask && c1.and_not (mask) == 0)
9950 {
9951 c3 = mask;
9952 break;
9953 }
9954 }
9955
9956 if (c3 != c1)
9957 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
9958 fold_build2_loc (loc, BIT_AND_EXPR, type,
9959 TREE_OPERAND (arg0, 0),
9960 wide_int_to_tree (type,
9961 c3)),
9962 arg1);
9963 }
9964
9965 /* See if this can be simplified into a rotate first. If that
9966 is unsuccessful continue in the association code. */
9967 goto bit_rotate;
9968
9969 case BIT_XOR_EXPR:
9970 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
9971 if (TREE_CODE (arg0) == BIT_AND_EXPR
9972 && INTEGRAL_TYPE_P (type)
9973 && integer_onep (TREE_OPERAND (arg0, 1))
9974 && integer_onep (arg1))
9975 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
9976 build_zero_cst (TREE_TYPE (arg0)));
9977
9978 /* See if this can be simplified into a rotate first. If that
9979 is unsuccessful continue in the association code. */
9980 goto bit_rotate;
9981
9982 case BIT_AND_EXPR:
9983 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
9984 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9985 && INTEGRAL_TYPE_P (type)
9986 && integer_onep (TREE_OPERAND (arg0, 1))
9987 && integer_onep (arg1))
9988 {
9989 tree tem2;
9990 tem = TREE_OPERAND (arg0, 0);
9991 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
9992 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
9993 tem, tem2);
9994 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
9995 build_zero_cst (TREE_TYPE (tem)));
9996 }
9997 /* Fold ~X & 1 as (X & 1) == 0. */
9998 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9999 && INTEGRAL_TYPE_P (type)
10000 && integer_onep (arg1))
10001 {
10002 tree tem2;
10003 tem = TREE_OPERAND (arg0, 0);
10004 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
10005 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
10006 tem, tem2);
10007 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
10008 build_zero_cst (TREE_TYPE (tem)));
10009 }
10010 /* Fold !X & 1 as X == 0. */
10011 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10012 && integer_onep (arg1))
10013 {
10014 tem = TREE_OPERAND (arg0, 0);
10015 return fold_build2_loc (loc, EQ_EXPR, type, tem,
10016 build_zero_cst (TREE_TYPE (tem)));
10017 }
10018
10019 /* Fold (X ^ Y) & Y as ~X & Y. */
10020 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10021 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10022 {
10023 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10024 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10025 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10026 fold_convert_loc (loc, type, arg1));
10027 }
10028 /* Fold (X ^ Y) & X as ~Y & X. */
10029 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10030 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10031 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10032 {
10033 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10034 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10035 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10036 fold_convert_loc (loc, type, arg1));
10037 }
10038 /* Fold X & (X ^ Y) as X & ~Y. */
10039 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10040 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10041 {
10042 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10043 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10044 fold_convert_loc (loc, type, arg0),
10045 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
10046 }
10047 /* Fold X & (Y ^ X) as ~Y & X. */
10048 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10049 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10050 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10051 {
10052 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10053 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10054 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10055 fold_convert_loc (loc, type, arg0));
10056 }
10057
10058 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
10059 multiple of 1 << CST. */
10060 if (TREE_CODE (arg1) == INTEGER_CST)
10061 {
10062 wide_int cst1 = arg1;
10063 wide_int ncst1 = -cst1;
10064 if ((cst1 & ncst1) == ncst1
10065 && multiple_of_p (type, arg0,
10066 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
10067 return fold_convert_loc (loc, type, arg0);
10068 }
10069
10070 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
10071 bits from CST2. */
10072 if (TREE_CODE (arg1) == INTEGER_CST
10073 && TREE_CODE (arg0) == MULT_EXPR
10074 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10075 {
10076 wide_int warg1 = arg1;
10077 wide_int masked = mask_with_tz (type, warg1, TREE_OPERAND (arg0, 1));
10078
10079 if (masked == 0)
10080 return omit_two_operands_loc (loc, type, build_zero_cst (type),
10081 arg0, arg1);
10082 else if (masked != warg1)
10083 {
10084 /* Avoid the transform if arg1 is a mask of some
10085 mode which allows further optimizations. */
10086 int pop = wi::popcount (warg1);
10087 if (!(pop >= BITS_PER_UNIT
10088 && exact_log2 (pop) != -1
10089 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
10090 return fold_build2_loc (loc, code, type, op0,
10091 wide_int_to_tree (type, masked));
10092 }
10093 }
10094
10095 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
10096 ((A & N) + B) & M -> (A + B) & M
10097 Similarly if (N & M) == 0,
10098 ((A | N) + B) & M -> (A + B) & M
10099 and for - instead of + (or unary - instead of +)
10100 and/or ^ instead of |.
10101 If B is constant and (B & M) == 0, fold into A & M. */
10102 if (TREE_CODE (arg1) == INTEGER_CST)
10103 {
10104 wide_int cst1 = arg1;
10105 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
10106 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10107 && (TREE_CODE (arg0) == PLUS_EXPR
10108 || TREE_CODE (arg0) == MINUS_EXPR
10109 || TREE_CODE (arg0) == NEGATE_EXPR)
10110 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
10111 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
10112 {
10113 tree pmop[2];
10114 int which = 0;
10115 wide_int cst0;
10116
10117 /* Now we know that arg0 is (C + D) or (C - D) or
10118 -C and arg1 (M) is == (1LL << cst) - 1.
10119 Store C into PMOP[0] and D into PMOP[1]. */
10120 pmop[0] = TREE_OPERAND (arg0, 0);
10121 pmop[1] = NULL;
10122 if (TREE_CODE (arg0) != NEGATE_EXPR)
10123 {
10124 pmop[1] = TREE_OPERAND (arg0, 1);
10125 which = 1;
10126 }
10127
10128 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
10129 which = -1;
10130
10131 for (; which >= 0; which--)
10132 switch (TREE_CODE (pmop[which]))
10133 {
10134 case BIT_AND_EXPR:
10135 case BIT_IOR_EXPR:
10136 case BIT_XOR_EXPR:
10137 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
10138 != INTEGER_CST)
10139 break;
10140 cst0 = TREE_OPERAND (pmop[which], 1);
10141 cst0 &= cst1;
10142 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
10143 {
10144 if (cst0 != cst1)
10145 break;
10146 }
10147 else if (cst0 != 0)
10148 break;
10149 /* If C or D is of the form (A & N) where
10150 (N & M) == M, or of the form (A | N) or
10151 (A ^ N) where (N & M) == 0, replace it with A. */
10152 pmop[which] = TREE_OPERAND (pmop[which], 0);
10153 break;
10154 case INTEGER_CST:
10155 /* If C or D is a N where (N & M) == 0, it can be
10156 omitted (assumed 0). */
10157 if ((TREE_CODE (arg0) == PLUS_EXPR
10158 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
10159 && (cst1 & pmop[which]) == 0)
10160 pmop[which] = NULL;
10161 break;
10162 default:
10163 break;
10164 }
10165
10166 /* Only build anything new if we optimized one or both arguments
10167 above. */
10168 if (pmop[0] != TREE_OPERAND (arg0, 0)
10169 || (TREE_CODE (arg0) != NEGATE_EXPR
10170 && pmop[1] != TREE_OPERAND (arg0, 1)))
10171 {
10172 tree utype = TREE_TYPE (arg0);
10173 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10174 {
10175 /* Perform the operations in a type that has defined
10176 overflow behavior. */
10177 utype = unsigned_type_for (TREE_TYPE (arg0));
10178 if (pmop[0] != NULL)
10179 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
10180 if (pmop[1] != NULL)
10181 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
10182 }
10183
10184 if (TREE_CODE (arg0) == NEGATE_EXPR)
10185 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
10186 else if (TREE_CODE (arg0) == PLUS_EXPR)
10187 {
10188 if (pmop[0] != NULL && pmop[1] != NULL)
10189 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
10190 pmop[0], pmop[1]);
10191 else if (pmop[0] != NULL)
10192 tem = pmop[0];
10193 else if (pmop[1] != NULL)
10194 tem = pmop[1];
10195 else
10196 return build_int_cst (type, 0);
10197 }
10198 else if (pmop[0] == NULL)
10199 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
10200 else
10201 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
10202 pmop[0], pmop[1]);
10203 /* TEM is now the new binary +, - or unary - replacement. */
10204 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
10205 fold_convert_loc (loc, utype, arg1));
10206 return fold_convert_loc (loc, type, tem);
10207 }
10208 }
10209 }
10210
10211 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10212 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10213 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10214 {
10215 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10216
10217 wide_int mask = wide_int::from (arg1, prec, UNSIGNED);
10218 if (mask == -1)
10219 return
10220 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10221 }
10222
10223 goto associate;
10224
10225 case RDIV_EXPR:
10226 /* Don't touch a floating-point divide by zero unless the mode
10227 of the constant can represent infinity. */
10228 if (TREE_CODE (arg1) == REAL_CST
10229 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10230 && real_zerop (arg1))
10231 return NULL_TREE;
10232
10233 /* (-A) / (-B) -> A / B */
10234 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10235 return fold_build2_loc (loc, RDIV_EXPR, type,
10236 TREE_OPERAND (arg0, 0),
10237 negate_expr (arg1));
10238 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10239 return fold_build2_loc (loc, RDIV_EXPR, type,
10240 negate_expr (arg0),
10241 TREE_OPERAND (arg1, 0));
10242 return NULL_TREE;
10243
10244 case TRUNC_DIV_EXPR:
10245 /* Fall through */
10246
10247 case FLOOR_DIV_EXPR:
10248 /* Simplify A / (B << N) where A and B are positive and B is
10249 a power of 2, to A >> (N + log2(B)). */
10250 strict_overflow_p = false;
10251 if (TREE_CODE (arg1) == LSHIFT_EXPR
10252 && (TYPE_UNSIGNED (type)
10253 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
10254 {
10255 tree sval = TREE_OPERAND (arg1, 0);
10256 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10257 {
10258 tree sh_cnt = TREE_OPERAND (arg1, 1);
10259 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
10260 wi::exact_log2 (sval));
10261
10262 if (strict_overflow_p)
10263 fold_overflow_warning (("assuming signed overflow does not "
10264 "occur when simplifying A / (B << N)"),
10265 WARN_STRICT_OVERFLOW_MISC);
10266
10267 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
10268 sh_cnt, pow2);
10269 return fold_build2_loc (loc, RSHIFT_EXPR, type,
10270 fold_convert_loc (loc, type, arg0), sh_cnt);
10271 }
10272 }
10273
10274 /* Fall through */
10275
10276 case ROUND_DIV_EXPR:
10277 case CEIL_DIV_EXPR:
10278 case EXACT_DIV_EXPR:
10279 if (integer_zerop (arg1))
10280 return NULL_TREE;
10281
10282 /* Convert -A / -B to A / B when the type is signed and overflow is
10283 undefined. */
10284 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10285 && TREE_CODE (arg0) == NEGATE_EXPR
10286 && negate_expr_p (op1))
10287 {
10288 if (INTEGRAL_TYPE_P (type))
10289 fold_overflow_warning (("assuming signed overflow does not occur "
10290 "when distributing negation across "
10291 "division"),
10292 WARN_STRICT_OVERFLOW_MISC);
10293 return fold_build2_loc (loc, code, type,
10294 fold_convert_loc (loc, type,
10295 TREE_OPERAND (arg0, 0)),
10296 negate_expr (op1));
10297 }
10298 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10299 && TREE_CODE (arg1) == NEGATE_EXPR
10300 && negate_expr_p (op0))
10301 {
10302 if (INTEGRAL_TYPE_P (type))
10303 fold_overflow_warning (("assuming signed overflow does not occur "
10304 "when distributing negation across "
10305 "division"),
10306 WARN_STRICT_OVERFLOW_MISC);
10307 return fold_build2_loc (loc, code, type,
10308 negate_expr (op0),
10309 fold_convert_loc (loc, type,
10310 TREE_OPERAND (arg1, 0)));
10311 }
10312
10313 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10314 operation, EXACT_DIV_EXPR.
10315
10316 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10317 At one time others generated faster code, it's not clear if they do
10318 after the last round to changes to the DIV code in expmed.c. */
10319 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10320 && multiple_of_p (type, arg0, arg1))
10321 return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
10322 fold_convert (type, arg0),
10323 fold_convert (type, arg1));
10324
10325 strict_overflow_p = false;
10326 if (TREE_CODE (arg1) == INTEGER_CST
10327 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10328 &strict_overflow_p)))
10329 {
10330 if (strict_overflow_p)
10331 fold_overflow_warning (("assuming signed overflow does not occur "
10332 "when simplifying division"),
10333 WARN_STRICT_OVERFLOW_MISC);
10334 return fold_convert_loc (loc, type, tem);
10335 }
10336
10337 return NULL_TREE;
10338
10339 case CEIL_MOD_EXPR:
10340 case FLOOR_MOD_EXPR:
10341 case ROUND_MOD_EXPR:
10342 case TRUNC_MOD_EXPR:
10343 strict_overflow_p = false;
10344 if (TREE_CODE (arg1) == INTEGER_CST
10345 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10346 &strict_overflow_p)))
10347 {
10348 if (strict_overflow_p)
10349 fold_overflow_warning (("assuming signed overflow does not occur "
10350 "when simplifying modulus"),
10351 WARN_STRICT_OVERFLOW_MISC);
10352 return fold_convert_loc (loc, type, tem);
10353 }
10354
10355 return NULL_TREE;
10356
10357 case LROTATE_EXPR:
10358 case RROTATE_EXPR:
10359 case RSHIFT_EXPR:
10360 case LSHIFT_EXPR:
10361 /* Since negative shift count is not well-defined,
10362 don't try to compute it in the compiler. */
10363 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10364 return NULL_TREE;
10365
10366 prec = element_precision (type);
10367
10368 /* If we have a rotate of a bit operation with the rotate count and
10369 the second operand of the bit operation both constant,
10370 permute the two operations. */
10371 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10372 && (TREE_CODE (arg0) == BIT_AND_EXPR
10373 || TREE_CODE (arg0) == BIT_IOR_EXPR
10374 || TREE_CODE (arg0) == BIT_XOR_EXPR)
10375 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10376 return fold_build2_loc (loc, TREE_CODE (arg0), type,
10377 fold_build2_loc (loc, code, type,
10378 TREE_OPERAND (arg0, 0), arg1),
10379 fold_build2_loc (loc, code, type,
10380 TREE_OPERAND (arg0, 1), arg1));
10381
10382 /* Two consecutive rotates adding up to the some integer
10383 multiple of the precision of the type can be ignored. */
10384 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10385 && TREE_CODE (arg0) == RROTATE_EXPR
10386 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10387 && wi::umod_trunc (wi::add (arg1, TREE_OPERAND (arg0, 1)),
10388 prec) == 0)
10389 return TREE_OPERAND (arg0, 0);
10390
10391 return NULL_TREE;
10392
10393 case MIN_EXPR:
10394 case MAX_EXPR:
10395 goto associate;
10396
10397 case TRUTH_ANDIF_EXPR:
10398 /* Note that the operands of this must be ints
10399 and their values must be 0 or 1.
10400 ("true" is a fixed value perhaps depending on the language.) */
10401 /* If first arg is constant zero, return it. */
10402 if (integer_zerop (arg0))
10403 return fold_convert_loc (loc, type, arg0);
10404 case TRUTH_AND_EXPR:
10405 /* If either arg is constant true, drop it. */
10406 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10407 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10408 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10409 /* Preserve sequence points. */
10410 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10411 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10412 /* If second arg is constant zero, result is zero, but first arg
10413 must be evaluated. */
10414 if (integer_zerop (arg1))
10415 return omit_one_operand_loc (loc, type, arg1, arg0);
10416 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10417 case will be handled here. */
10418 if (integer_zerop (arg0))
10419 return omit_one_operand_loc (loc, type, arg0, arg1);
10420
10421 /* !X && X is always false. */
10422 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10423 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10424 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
10425 /* X && !X is always false. */
10426 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10427 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10428 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10429
10430 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10431 means A >= Y && A != MAX, but in this case we know that
10432 A < X <= MAX. */
10433
10434 if (!TREE_SIDE_EFFECTS (arg0)
10435 && !TREE_SIDE_EFFECTS (arg1))
10436 {
10437 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
10438 if (tem && !operand_equal_p (tem, arg0, 0))
10439 return fold_build2_loc (loc, code, type, tem, arg1);
10440
10441 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
10442 if (tem && !operand_equal_p (tem, arg1, 0))
10443 return fold_build2_loc (loc, code, type, arg0, tem);
10444 }
10445
10446 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10447 != NULL_TREE)
10448 return tem;
10449
10450 return NULL_TREE;
10451
10452 case TRUTH_ORIF_EXPR:
10453 /* Note that the operands of this must be ints
10454 and their values must be 0 or true.
10455 ("true" is a fixed value perhaps depending on the language.) */
10456 /* If first arg is constant true, return it. */
10457 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10458 return fold_convert_loc (loc, type, arg0);
10459 case TRUTH_OR_EXPR:
10460 /* If either arg is constant zero, drop it. */
10461 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10462 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10463 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10464 /* Preserve sequence points. */
10465 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10466 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10467 /* If second arg is constant true, result is true, but we must
10468 evaluate first arg. */
10469 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10470 return omit_one_operand_loc (loc, type, arg1, arg0);
10471 /* Likewise for first arg, but note this only occurs here for
10472 TRUTH_OR_EXPR. */
10473 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10474 return omit_one_operand_loc (loc, type, arg0, arg1);
10475
10476 /* !X || X is always true. */
10477 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10478 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10479 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10480 /* X || !X is always true. */
10481 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10482 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10483 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10484
10485 /* (X && !Y) || (!X && Y) is X ^ Y */
10486 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
10487 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
10488 {
10489 tree a0, a1, l0, l1, n0, n1;
10490
10491 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10492 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10493
10494 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10495 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10496
10497 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
10498 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
10499
10500 if ((operand_equal_p (n0, a0, 0)
10501 && operand_equal_p (n1, a1, 0))
10502 || (operand_equal_p (n0, a1, 0)
10503 && operand_equal_p (n1, a0, 0)))
10504 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
10505 }
10506
10507 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10508 != NULL_TREE)
10509 return tem;
10510
10511 return NULL_TREE;
10512
10513 case TRUTH_XOR_EXPR:
10514 /* If the second arg is constant zero, drop it. */
10515 if (integer_zerop (arg1))
10516 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10517 /* If the second arg is constant true, this is a logical inversion. */
10518 if (integer_onep (arg1))
10519 {
10520 tem = invert_truthvalue_loc (loc, arg0);
10521 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
10522 }
10523 /* Identical arguments cancel to zero. */
10524 if (operand_equal_p (arg0, arg1, 0))
10525 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10526
10527 /* !X ^ X is always true. */
10528 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10529 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10530 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10531
10532 /* X ^ !X is always true. */
10533 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10534 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10535 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10536
10537 return NULL_TREE;
10538
10539 case EQ_EXPR:
10540 case NE_EXPR:
10541 STRIP_NOPS (arg0);
10542 STRIP_NOPS (arg1);
10543
10544 tem = fold_comparison (loc, code, type, op0, op1);
10545 if (tem != NULL_TREE)
10546 return tem;
10547
10548 /* bool_var != 1 becomes !bool_var. */
10549 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10550 && code == NE_EXPR)
10551 return fold_convert_loc (loc, type,
10552 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10553 TREE_TYPE (arg0), arg0));
10554
10555 /* bool_var == 0 becomes !bool_var. */
10556 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10557 && code == EQ_EXPR)
10558 return fold_convert_loc (loc, type,
10559 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10560 TREE_TYPE (arg0), arg0));
10561
10562 /* !exp != 0 becomes !exp */
10563 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
10564 && code == NE_EXPR)
10565 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10566
10567 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
10568 if ((TREE_CODE (arg0) == PLUS_EXPR
10569 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
10570 || TREE_CODE (arg0) == MINUS_EXPR)
10571 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
10572 0)),
10573 arg1, 0)
10574 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10575 || POINTER_TYPE_P (TREE_TYPE (arg0))))
10576 {
10577 tree val = TREE_OPERAND (arg0, 1);
10578 return omit_two_operands_loc (loc, type,
10579 fold_build2_loc (loc, code, type,
10580 val,
10581 build_int_cst (TREE_TYPE (val),
10582 0)),
10583 TREE_OPERAND (arg0, 0), arg1);
10584 }
10585
10586 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
10587 if (TREE_CODE (arg0) == MINUS_EXPR
10588 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
10589 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
10590 1)),
10591 arg1, 0)
10592 && wi::extract_uhwi (TREE_OPERAND (arg0, 0), 0, 1) == 1)
10593 {
10594 return omit_two_operands_loc (loc, type,
10595 code == NE_EXPR
10596 ? boolean_true_node : boolean_false_node,
10597 TREE_OPERAND (arg0, 1), arg1);
10598 }
10599
10600 /* If this is an EQ or NE comparison with zero and ARG0 is
10601 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10602 two operations, but the latter can be done in one less insn
10603 on machines that have only two-operand insns or on which a
10604 constant cannot be the first operand. */
10605 if (TREE_CODE (arg0) == BIT_AND_EXPR
10606 && integer_zerop (arg1))
10607 {
10608 tree arg00 = TREE_OPERAND (arg0, 0);
10609 tree arg01 = TREE_OPERAND (arg0, 1);
10610 if (TREE_CODE (arg00) == LSHIFT_EXPR
10611 && integer_onep (TREE_OPERAND (arg00, 0)))
10612 {
10613 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
10614 arg01, TREE_OPERAND (arg00, 1));
10615 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10616 build_int_cst (TREE_TYPE (arg0), 1));
10617 return fold_build2_loc (loc, code, type,
10618 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10619 arg1);
10620 }
10621 else if (TREE_CODE (arg01) == LSHIFT_EXPR
10622 && integer_onep (TREE_OPERAND (arg01, 0)))
10623 {
10624 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
10625 arg00, TREE_OPERAND (arg01, 1));
10626 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10627 build_int_cst (TREE_TYPE (arg0), 1));
10628 return fold_build2_loc (loc, code, type,
10629 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10630 arg1);
10631 }
10632 }
10633
10634 /* If this is an NE or EQ comparison of zero against the result of a
10635 signed MOD operation whose second operand is a power of 2, make
10636 the MOD operation unsigned since it is simpler and equivalent. */
10637 if (integer_zerop (arg1)
10638 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10639 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10640 || TREE_CODE (arg0) == CEIL_MOD_EXPR
10641 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10642 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10643 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10644 {
10645 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
10646 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
10647 fold_convert_loc (loc, newtype,
10648 TREE_OPERAND (arg0, 0)),
10649 fold_convert_loc (loc, newtype,
10650 TREE_OPERAND (arg0, 1)));
10651
10652 return fold_build2_loc (loc, code, type, newmod,
10653 fold_convert_loc (loc, newtype, arg1));
10654 }
10655
10656 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10657 C1 is a valid shift constant, and C2 is a power of two, i.e.
10658 a single bit. */
10659 if (TREE_CODE (arg0) == BIT_AND_EXPR
10660 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10661 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10662 == INTEGER_CST
10663 && integer_pow2p (TREE_OPERAND (arg0, 1))
10664 && integer_zerop (arg1))
10665 {
10666 tree itype = TREE_TYPE (arg0);
10667 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10668 prec = TYPE_PRECISION (itype);
10669
10670 /* Check for a valid shift count. */
10671 if (wi::ltu_p (arg001, prec))
10672 {
10673 tree arg01 = TREE_OPERAND (arg0, 1);
10674 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10675 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10676 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10677 can be rewritten as (X & (C2 << C1)) != 0. */
10678 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
10679 {
10680 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
10681 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
10682 return fold_build2_loc (loc, code, type, tem,
10683 fold_convert_loc (loc, itype, arg1));
10684 }
10685 /* Otherwise, for signed (arithmetic) shifts,
10686 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10687 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10688 else if (!TYPE_UNSIGNED (itype))
10689 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10690 arg000, build_int_cst (itype, 0));
10691 /* Otherwise, of unsigned (logical) shifts,
10692 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10693 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10694 else
10695 return omit_one_operand_loc (loc, type,
10696 code == EQ_EXPR ? integer_one_node
10697 : integer_zero_node,
10698 arg000);
10699 }
10700 }
10701
10702 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
10703 Similarly for NE_EXPR. */
10704 if (TREE_CODE (arg0) == BIT_AND_EXPR
10705 && TREE_CODE (arg1) == INTEGER_CST
10706 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10707 {
10708 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
10709 TREE_TYPE (TREE_OPERAND (arg0, 1)),
10710 TREE_OPERAND (arg0, 1));
10711 tree dandnotc
10712 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
10713 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
10714 notc);
10715 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10716 if (integer_nonzerop (dandnotc))
10717 return omit_one_operand_loc (loc, type, rslt, arg0);
10718 }
10719
10720 /* If this is a comparison of a field, we may be able to simplify it. */
10721 if ((TREE_CODE (arg0) == COMPONENT_REF
10722 || TREE_CODE (arg0) == BIT_FIELD_REF)
10723 /* Handle the constant case even without -O
10724 to make sure the warnings are given. */
10725 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
10726 {
10727 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
10728 if (t1)
10729 return t1;
10730 }
10731
10732 /* Optimize comparisons of strlen vs zero to a compare of the
10733 first character of the string vs zero. To wit,
10734 strlen(ptr) == 0 => *ptr == 0
10735 strlen(ptr) != 0 => *ptr != 0
10736 Other cases should reduce to one of these two (or a constant)
10737 due to the return value of strlen being unsigned. */
10738 if (TREE_CODE (arg0) == CALL_EXPR
10739 && integer_zerop (arg1))
10740 {
10741 tree fndecl = get_callee_fndecl (arg0);
10742
10743 if (fndecl
10744 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
10745 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
10746 && call_expr_nargs (arg0) == 1
10747 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
10748 {
10749 tree iref = build_fold_indirect_ref_loc (loc,
10750 CALL_EXPR_ARG (arg0, 0));
10751 return fold_build2_loc (loc, code, type, iref,
10752 build_int_cst (TREE_TYPE (iref), 0));
10753 }
10754 }
10755
10756 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10757 of X. Similarly fold (X >> C) == 0 into X >= 0. */
10758 if (TREE_CODE (arg0) == RSHIFT_EXPR
10759 && integer_zerop (arg1)
10760 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10761 {
10762 tree arg00 = TREE_OPERAND (arg0, 0);
10763 tree arg01 = TREE_OPERAND (arg0, 1);
10764 tree itype = TREE_TYPE (arg00);
10765 if (wi::eq_p (arg01, element_precision (itype) - 1))
10766 {
10767 if (TYPE_UNSIGNED (itype))
10768 {
10769 itype = signed_type_for (itype);
10770 arg00 = fold_convert_loc (loc, itype, arg00);
10771 }
10772 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
10773 type, arg00, build_zero_cst (itype));
10774 }
10775 }
10776
10777 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
10778 (X & C) == 0 when C is a single bit. */
10779 if (TREE_CODE (arg0) == BIT_AND_EXPR
10780 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
10781 && integer_zerop (arg1)
10782 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10783 {
10784 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
10785 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
10786 TREE_OPERAND (arg0, 1));
10787 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
10788 type, tem,
10789 fold_convert_loc (loc, TREE_TYPE (arg0),
10790 arg1));
10791 }
10792
10793 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
10794 constant C is a power of two, i.e. a single bit. */
10795 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10796 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10797 && integer_zerop (arg1)
10798 && integer_pow2p (TREE_OPERAND (arg0, 1))
10799 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10800 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10801 {
10802 tree arg00 = TREE_OPERAND (arg0, 0);
10803 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10804 arg00, build_int_cst (TREE_TYPE (arg00), 0));
10805 }
10806
10807 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
10808 when is C is a power of two, i.e. a single bit. */
10809 if (TREE_CODE (arg0) == BIT_AND_EXPR
10810 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
10811 && integer_zerop (arg1)
10812 && integer_pow2p (TREE_OPERAND (arg0, 1))
10813 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10814 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10815 {
10816 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10817 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
10818 arg000, TREE_OPERAND (arg0, 1));
10819 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10820 tem, build_int_cst (TREE_TYPE (tem), 0));
10821 }
10822
10823 if (integer_zerop (arg1)
10824 && tree_expr_nonzero_p (arg0))
10825 {
10826 tree res = constant_boolean_node (code==NE_EXPR, type);
10827 return omit_one_operand_loc (loc, type, res, arg0);
10828 }
10829
10830 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
10831 if (TREE_CODE (arg0) == BIT_AND_EXPR
10832 && TREE_CODE (arg1) == BIT_AND_EXPR)
10833 {
10834 tree arg00 = TREE_OPERAND (arg0, 0);
10835 tree arg01 = TREE_OPERAND (arg0, 1);
10836 tree arg10 = TREE_OPERAND (arg1, 0);
10837 tree arg11 = TREE_OPERAND (arg1, 1);
10838 tree itype = TREE_TYPE (arg0);
10839
10840 if (operand_equal_p (arg01, arg11, 0))
10841 return fold_build2_loc (loc, code, type,
10842 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10843 fold_build2_loc (loc,
10844 BIT_XOR_EXPR, itype,
10845 arg00, arg10),
10846 arg01),
10847 build_zero_cst (itype));
10848
10849 if (operand_equal_p (arg01, arg10, 0))
10850 return fold_build2_loc (loc, code, type,
10851 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10852 fold_build2_loc (loc,
10853 BIT_XOR_EXPR, itype,
10854 arg00, arg11),
10855 arg01),
10856 build_zero_cst (itype));
10857
10858 if (operand_equal_p (arg00, arg11, 0))
10859 return fold_build2_loc (loc, code, type,
10860 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10861 fold_build2_loc (loc,
10862 BIT_XOR_EXPR, itype,
10863 arg01, arg10),
10864 arg00),
10865 build_zero_cst (itype));
10866
10867 if (operand_equal_p (arg00, arg10, 0))
10868 return fold_build2_loc (loc, code, type,
10869 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10870 fold_build2_loc (loc,
10871 BIT_XOR_EXPR, itype,
10872 arg01, arg11),
10873 arg00),
10874 build_zero_cst (itype));
10875 }
10876
10877 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10878 && TREE_CODE (arg1) == BIT_XOR_EXPR)
10879 {
10880 tree arg00 = TREE_OPERAND (arg0, 0);
10881 tree arg01 = TREE_OPERAND (arg0, 1);
10882 tree arg10 = TREE_OPERAND (arg1, 0);
10883 tree arg11 = TREE_OPERAND (arg1, 1);
10884 tree itype = TREE_TYPE (arg0);
10885
10886 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
10887 operand_equal_p guarantees no side-effects so we don't need
10888 to use omit_one_operand on Z. */
10889 if (operand_equal_p (arg01, arg11, 0))
10890 return fold_build2_loc (loc, code, type, arg00,
10891 fold_convert_loc (loc, TREE_TYPE (arg00),
10892 arg10));
10893 if (operand_equal_p (arg01, arg10, 0))
10894 return fold_build2_loc (loc, code, type, arg00,
10895 fold_convert_loc (loc, TREE_TYPE (arg00),
10896 arg11));
10897 if (operand_equal_p (arg00, arg11, 0))
10898 return fold_build2_loc (loc, code, type, arg01,
10899 fold_convert_loc (loc, TREE_TYPE (arg01),
10900 arg10));
10901 if (operand_equal_p (arg00, arg10, 0))
10902 return fold_build2_loc (loc, code, type, arg01,
10903 fold_convert_loc (loc, TREE_TYPE (arg01),
10904 arg11));
10905
10906 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
10907 if (TREE_CODE (arg01) == INTEGER_CST
10908 && TREE_CODE (arg11) == INTEGER_CST)
10909 {
10910 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
10911 fold_convert_loc (loc, itype, arg11));
10912 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
10913 return fold_build2_loc (loc, code, type, tem,
10914 fold_convert_loc (loc, itype, arg10));
10915 }
10916 }
10917
10918 /* Attempt to simplify equality/inequality comparisons of complex
10919 values. Only lower the comparison if the result is known or
10920 can be simplified to a single scalar comparison. */
10921 if ((TREE_CODE (arg0) == COMPLEX_EXPR
10922 || TREE_CODE (arg0) == COMPLEX_CST)
10923 && (TREE_CODE (arg1) == COMPLEX_EXPR
10924 || TREE_CODE (arg1) == COMPLEX_CST))
10925 {
10926 tree real0, imag0, real1, imag1;
10927 tree rcond, icond;
10928
10929 if (TREE_CODE (arg0) == COMPLEX_EXPR)
10930 {
10931 real0 = TREE_OPERAND (arg0, 0);
10932 imag0 = TREE_OPERAND (arg0, 1);
10933 }
10934 else
10935 {
10936 real0 = TREE_REALPART (arg0);
10937 imag0 = TREE_IMAGPART (arg0);
10938 }
10939
10940 if (TREE_CODE (arg1) == COMPLEX_EXPR)
10941 {
10942 real1 = TREE_OPERAND (arg1, 0);
10943 imag1 = TREE_OPERAND (arg1, 1);
10944 }
10945 else
10946 {
10947 real1 = TREE_REALPART (arg1);
10948 imag1 = TREE_IMAGPART (arg1);
10949 }
10950
10951 rcond = fold_binary_loc (loc, code, type, real0, real1);
10952 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
10953 {
10954 if (integer_zerop (rcond))
10955 {
10956 if (code == EQ_EXPR)
10957 return omit_two_operands_loc (loc, type, boolean_false_node,
10958 imag0, imag1);
10959 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
10960 }
10961 else
10962 {
10963 if (code == NE_EXPR)
10964 return omit_two_operands_loc (loc, type, boolean_true_node,
10965 imag0, imag1);
10966 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
10967 }
10968 }
10969
10970 icond = fold_binary_loc (loc, code, type, imag0, imag1);
10971 if (icond && TREE_CODE (icond) == INTEGER_CST)
10972 {
10973 if (integer_zerop (icond))
10974 {
10975 if (code == EQ_EXPR)
10976 return omit_two_operands_loc (loc, type, boolean_false_node,
10977 real0, real1);
10978 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
10979 }
10980 else
10981 {
10982 if (code == NE_EXPR)
10983 return omit_two_operands_loc (loc, type, boolean_true_node,
10984 real0, real1);
10985 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
10986 }
10987 }
10988 }
10989
10990 return NULL_TREE;
10991
10992 case LT_EXPR:
10993 case GT_EXPR:
10994 case LE_EXPR:
10995 case GE_EXPR:
10996 tem = fold_comparison (loc, code, type, op0, op1);
10997 if (tem != NULL_TREE)
10998 return tem;
10999
11000 /* Transform comparisons of the form X +- C CMP X. */
11001 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11002 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11003 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
11004 && !HONOR_SNANS (arg0))
11005 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11006 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
11007 {
11008 tree arg01 = TREE_OPERAND (arg0, 1);
11009 enum tree_code code0 = TREE_CODE (arg0);
11010 int is_positive;
11011
11012 if (TREE_CODE (arg01) == REAL_CST)
11013 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
11014 else
11015 is_positive = tree_int_cst_sgn (arg01);
11016
11017 /* (X - c) > X becomes false. */
11018 if (code == GT_EXPR
11019 && ((code0 == MINUS_EXPR && is_positive >= 0)
11020 || (code0 == PLUS_EXPR && is_positive <= 0)))
11021 {
11022 if (TREE_CODE (arg01) == INTEGER_CST
11023 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11024 fold_overflow_warning (("assuming signed overflow does not "
11025 "occur when assuming that (X - c) > X "
11026 "is always false"),
11027 WARN_STRICT_OVERFLOW_ALL);
11028 return constant_boolean_node (0, type);
11029 }
11030
11031 /* Likewise (X + c) < X becomes false. */
11032 if (code == LT_EXPR
11033 && ((code0 == PLUS_EXPR && is_positive >= 0)
11034 || (code0 == MINUS_EXPR && is_positive <= 0)))
11035 {
11036 if (TREE_CODE (arg01) == INTEGER_CST
11037 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11038 fold_overflow_warning (("assuming signed overflow does not "
11039 "occur when assuming that "
11040 "(X + c) < X is always false"),
11041 WARN_STRICT_OVERFLOW_ALL);
11042 return constant_boolean_node (0, type);
11043 }
11044
11045 /* Convert (X - c) <= X to true. */
11046 if (!HONOR_NANS (arg1)
11047 && code == LE_EXPR
11048 && ((code0 == MINUS_EXPR && is_positive >= 0)
11049 || (code0 == PLUS_EXPR && is_positive <= 0)))
11050 {
11051 if (TREE_CODE (arg01) == INTEGER_CST
11052 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11053 fold_overflow_warning (("assuming signed overflow does not "
11054 "occur when assuming that "
11055 "(X - c) <= X is always true"),
11056 WARN_STRICT_OVERFLOW_ALL);
11057 return constant_boolean_node (1, type);
11058 }
11059
11060 /* Convert (X + c) >= X to true. */
11061 if (!HONOR_NANS (arg1)
11062 && code == GE_EXPR
11063 && ((code0 == PLUS_EXPR && is_positive >= 0)
11064 || (code0 == MINUS_EXPR && is_positive <= 0)))
11065 {
11066 if (TREE_CODE (arg01) == INTEGER_CST
11067 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11068 fold_overflow_warning (("assuming signed overflow does not "
11069 "occur when assuming that "
11070 "(X + c) >= X is always true"),
11071 WARN_STRICT_OVERFLOW_ALL);
11072 return constant_boolean_node (1, type);
11073 }
11074
11075 if (TREE_CODE (arg01) == INTEGER_CST)
11076 {
11077 /* Convert X + c > X and X - c < X to true for integers. */
11078 if (code == GT_EXPR
11079 && ((code0 == PLUS_EXPR && is_positive > 0)
11080 || (code0 == MINUS_EXPR && is_positive < 0)))
11081 {
11082 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11083 fold_overflow_warning (("assuming signed overflow does "
11084 "not occur when assuming that "
11085 "(X + c) > X is always true"),
11086 WARN_STRICT_OVERFLOW_ALL);
11087 return constant_boolean_node (1, type);
11088 }
11089
11090 if (code == LT_EXPR
11091 && ((code0 == MINUS_EXPR && is_positive > 0)
11092 || (code0 == PLUS_EXPR && is_positive < 0)))
11093 {
11094 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11095 fold_overflow_warning (("assuming signed overflow does "
11096 "not occur when assuming that "
11097 "(X - c) < X is always true"),
11098 WARN_STRICT_OVERFLOW_ALL);
11099 return constant_boolean_node (1, type);
11100 }
11101
11102 /* Convert X + c <= X and X - c >= X to false for integers. */
11103 if (code == LE_EXPR
11104 && ((code0 == PLUS_EXPR && is_positive > 0)
11105 || (code0 == MINUS_EXPR && is_positive < 0)))
11106 {
11107 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11108 fold_overflow_warning (("assuming signed overflow does "
11109 "not occur when assuming that "
11110 "(X + c) <= X is always false"),
11111 WARN_STRICT_OVERFLOW_ALL);
11112 return constant_boolean_node (0, type);
11113 }
11114
11115 if (code == GE_EXPR
11116 && ((code0 == MINUS_EXPR && is_positive > 0)
11117 || (code0 == PLUS_EXPR && is_positive < 0)))
11118 {
11119 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11120 fold_overflow_warning (("assuming signed overflow does "
11121 "not occur when assuming that "
11122 "(X - c) >= X is always false"),
11123 WARN_STRICT_OVERFLOW_ALL);
11124 return constant_boolean_node (0, type);
11125 }
11126 }
11127 }
11128
11129 /* If we are comparing an ABS_EXPR with a constant, we can
11130 convert all the cases into explicit comparisons, but they may
11131 well not be faster than doing the ABS and one comparison.
11132 But ABS (X) <= C is a range comparison, which becomes a subtraction
11133 and a comparison, and is probably faster. */
11134 if (code == LE_EXPR
11135 && TREE_CODE (arg1) == INTEGER_CST
11136 && TREE_CODE (arg0) == ABS_EXPR
11137 && ! TREE_SIDE_EFFECTS (arg0)
11138 && (0 != (tem = negate_expr (arg1)))
11139 && TREE_CODE (tem) == INTEGER_CST
11140 && !TREE_OVERFLOW (tem))
11141 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
11142 build2 (GE_EXPR, type,
11143 TREE_OPERAND (arg0, 0), tem),
11144 build2 (LE_EXPR, type,
11145 TREE_OPERAND (arg0, 0), arg1));
11146
11147 /* Convert ABS_EXPR<x> >= 0 to true. */
11148 strict_overflow_p = false;
11149 if (code == GE_EXPR
11150 && (integer_zerop (arg1)
11151 || (! HONOR_NANS (arg0)
11152 && real_zerop (arg1)))
11153 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11154 {
11155 if (strict_overflow_p)
11156 fold_overflow_warning (("assuming signed overflow does not occur "
11157 "when simplifying comparison of "
11158 "absolute value and zero"),
11159 WARN_STRICT_OVERFLOW_CONDITIONAL);
11160 return omit_one_operand_loc (loc, type,
11161 constant_boolean_node (true, type),
11162 arg0);
11163 }
11164
11165 /* Convert ABS_EXPR<x> < 0 to false. */
11166 strict_overflow_p = false;
11167 if (code == LT_EXPR
11168 && (integer_zerop (arg1) || real_zerop (arg1))
11169 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11170 {
11171 if (strict_overflow_p)
11172 fold_overflow_warning (("assuming signed overflow does not occur "
11173 "when simplifying comparison of "
11174 "absolute value and zero"),
11175 WARN_STRICT_OVERFLOW_CONDITIONAL);
11176 return omit_one_operand_loc (loc, type,
11177 constant_boolean_node (false, type),
11178 arg0);
11179 }
11180
11181 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11182 and similarly for >= into !=. */
11183 if ((code == LT_EXPR || code == GE_EXPR)
11184 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11185 && TREE_CODE (arg1) == LSHIFT_EXPR
11186 && integer_onep (TREE_OPERAND (arg1, 0)))
11187 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11188 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11189 TREE_OPERAND (arg1, 1)),
11190 build_zero_cst (TREE_TYPE (arg0)));
11191
11192 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
11193 otherwise Y might be >= # of bits in X's type and thus e.g.
11194 (unsigned char) (1 << Y) for Y 15 might be 0.
11195 If the cast is widening, then 1 << Y should have unsigned type,
11196 otherwise if Y is number of bits in the signed shift type minus 1,
11197 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
11198 31 might be 0xffffffff80000000. */
11199 if ((code == LT_EXPR || code == GE_EXPR)
11200 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11201 && CONVERT_EXPR_P (arg1)
11202 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
11203 && (element_precision (TREE_TYPE (arg1))
11204 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
11205 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
11206 || (element_precision (TREE_TYPE (arg1))
11207 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
11208 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11209 {
11210 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11211 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
11212 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11213 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
11214 build_zero_cst (TREE_TYPE (arg0)));
11215 }
11216
11217 return NULL_TREE;
11218
11219 case UNORDERED_EXPR:
11220 case ORDERED_EXPR:
11221 case UNLT_EXPR:
11222 case UNLE_EXPR:
11223 case UNGT_EXPR:
11224 case UNGE_EXPR:
11225 case UNEQ_EXPR:
11226 case LTGT_EXPR:
11227 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
11228 {
11229 tree targ0 = strip_float_extensions (arg0);
11230 tree targ1 = strip_float_extensions (arg1);
11231 tree newtype = TREE_TYPE (targ0);
11232
11233 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
11234 newtype = TREE_TYPE (targ1);
11235
11236 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
11237 return fold_build2_loc (loc, code, type,
11238 fold_convert_loc (loc, newtype, targ0),
11239 fold_convert_loc (loc, newtype, targ1));
11240 }
11241
11242 return NULL_TREE;
11243
11244 case COMPOUND_EXPR:
11245 /* When pedantic, a compound expression can be neither an lvalue
11246 nor an integer constant expression. */
11247 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11248 return NULL_TREE;
11249 /* Don't let (0, 0) be null pointer constant. */
11250 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11251 : fold_convert_loc (loc, type, arg1);
11252 return pedantic_non_lvalue_loc (loc, tem);
11253
11254 case ASSERT_EXPR:
11255 /* An ASSERT_EXPR should never be passed to fold_binary. */
11256 gcc_unreachable ();
11257
11258 default:
11259 return NULL_TREE;
11260 } /* switch (code) */
11261 }
11262
11263 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
11264 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
11265 of GOTO_EXPR. */
11266
11267 static tree
11268 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
11269 {
11270 switch (TREE_CODE (*tp))
11271 {
11272 case LABEL_EXPR:
11273 return *tp;
11274
11275 case GOTO_EXPR:
11276 *walk_subtrees = 0;
11277
11278 /* ... fall through ... */
11279
11280 default:
11281 return NULL_TREE;
11282 }
11283 }
11284
11285 /* Return whether the sub-tree ST contains a label which is accessible from
11286 outside the sub-tree. */
11287
11288 static bool
11289 contains_label_p (tree st)
11290 {
11291 return
11292 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
11293 }
11294
11295 /* Fold a ternary expression of code CODE and type TYPE with operands
11296 OP0, OP1, and OP2. Return the folded expression if folding is
11297 successful. Otherwise, return NULL_TREE. */
11298
11299 tree
11300 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
11301 tree op0, tree op1, tree op2)
11302 {
11303 tree tem;
11304 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
11305 enum tree_code_class kind = TREE_CODE_CLASS (code);
11306
11307 gcc_assert (IS_EXPR_CODE_CLASS (kind)
11308 && TREE_CODE_LENGTH (code) == 3);
11309
11310 /* If this is a commutative operation, and OP0 is a constant, move it
11311 to OP1 to reduce the number of tests below. */
11312 if (commutative_ternary_tree_code (code)
11313 && tree_swap_operands_p (op0, op1, true))
11314 return fold_build3_loc (loc, code, type, op1, op0, op2);
11315
11316 tem = generic_simplify (loc, code, type, op0, op1, op2);
11317 if (tem)
11318 return tem;
11319
11320 /* Strip any conversions that don't change the mode. This is safe
11321 for every expression, except for a comparison expression because
11322 its signedness is derived from its operands. So, in the latter
11323 case, only strip conversions that don't change the signedness.
11324
11325 Note that this is done as an internal manipulation within the
11326 constant folder, in order to find the simplest representation of
11327 the arguments so that their form can be studied. In any cases,
11328 the appropriate type conversions should be put back in the tree
11329 that will get out of the constant folder. */
11330 if (op0)
11331 {
11332 arg0 = op0;
11333 STRIP_NOPS (arg0);
11334 }
11335
11336 if (op1)
11337 {
11338 arg1 = op1;
11339 STRIP_NOPS (arg1);
11340 }
11341
11342 if (op2)
11343 {
11344 arg2 = op2;
11345 STRIP_NOPS (arg2);
11346 }
11347
11348 switch (code)
11349 {
11350 case COMPONENT_REF:
11351 if (TREE_CODE (arg0) == CONSTRUCTOR
11352 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11353 {
11354 unsigned HOST_WIDE_INT idx;
11355 tree field, value;
11356 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11357 if (field == arg1)
11358 return value;
11359 }
11360 return NULL_TREE;
11361
11362 case COND_EXPR:
11363 case VEC_COND_EXPR:
11364 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11365 so all simple results must be passed through pedantic_non_lvalue. */
11366 if (TREE_CODE (arg0) == INTEGER_CST)
11367 {
11368 tree unused_op = integer_zerop (arg0) ? op1 : op2;
11369 tem = integer_zerop (arg0) ? op2 : op1;
11370 /* Only optimize constant conditions when the selected branch
11371 has the same type as the COND_EXPR. This avoids optimizing
11372 away "c ? x : throw", where the throw has a void type.
11373 Avoid throwing away that operand which contains label. */
11374 if ((!TREE_SIDE_EFFECTS (unused_op)
11375 || !contains_label_p (unused_op))
11376 && (! VOID_TYPE_P (TREE_TYPE (tem))
11377 || VOID_TYPE_P (type)))
11378 return pedantic_non_lvalue_loc (loc, tem);
11379 return NULL_TREE;
11380 }
11381 else if (TREE_CODE (arg0) == VECTOR_CST)
11382 {
11383 if ((TREE_CODE (arg1) == VECTOR_CST
11384 || TREE_CODE (arg1) == CONSTRUCTOR)
11385 && (TREE_CODE (arg2) == VECTOR_CST
11386 || TREE_CODE (arg2) == CONSTRUCTOR))
11387 {
11388 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
11389 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
11390 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
11391 for (i = 0; i < nelts; i++)
11392 {
11393 tree val = VECTOR_CST_ELT (arg0, i);
11394 if (integer_all_onesp (val))
11395 sel[i] = i;
11396 else if (integer_zerop (val))
11397 sel[i] = nelts + i;
11398 else /* Currently unreachable. */
11399 return NULL_TREE;
11400 }
11401 tree t = fold_vec_perm (type, arg1, arg2, sel);
11402 if (t != NULL_TREE)
11403 return t;
11404 }
11405 }
11406
11407 /* If we have A op B ? A : C, we may be able to convert this to a
11408 simpler expression, depending on the operation and the values
11409 of B and C. Signed zeros prevent all of these transformations,
11410 for reasons given above each one.
11411
11412 Also try swapping the arguments and inverting the conditional. */
11413 if (COMPARISON_CLASS_P (arg0)
11414 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11415 arg1, TREE_OPERAND (arg0, 1))
11416 && !HONOR_SIGNED_ZEROS (element_mode (arg1)))
11417 {
11418 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
11419 if (tem)
11420 return tem;
11421 }
11422
11423 if (COMPARISON_CLASS_P (arg0)
11424 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11425 op2,
11426 TREE_OPERAND (arg0, 1))
11427 && !HONOR_SIGNED_ZEROS (element_mode (op2)))
11428 {
11429 location_t loc0 = expr_location_or (arg0, loc);
11430 tem = fold_invert_truthvalue (loc0, arg0);
11431 if (tem && COMPARISON_CLASS_P (tem))
11432 {
11433 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
11434 if (tem)
11435 return tem;
11436 }
11437 }
11438
11439 /* If the second operand is simpler than the third, swap them
11440 since that produces better jump optimization results. */
11441 if (truth_value_p (TREE_CODE (arg0))
11442 && tree_swap_operands_p (op1, op2, false))
11443 {
11444 location_t loc0 = expr_location_or (arg0, loc);
11445 /* See if this can be inverted. If it can't, possibly because
11446 it was a floating-point inequality comparison, don't do
11447 anything. */
11448 tem = fold_invert_truthvalue (loc0, arg0);
11449 if (tem)
11450 return fold_build3_loc (loc, code, type, tem, op2, op1);
11451 }
11452
11453 /* Convert A ? 1 : 0 to simply A. */
11454 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
11455 : (integer_onep (op1)
11456 && !VECTOR_TYPE_P (type)))
11457 && integer_zerop (op2)
11458 /* If we try to convert OP0 to our type, the
11459 call to fold will try to move the conversion inside
11460 a COND, which will recurse. In that case, the COND_EXPR
11461 is probably the best choice, so leave it alone. */
11462 && type == TREE_TYPE (arg0))
11463 return pedantic_non_lvalue_loc (loc, arg0);
11464
11465 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11466 over COND_EXPR in cases such as floating point comparisons. */
11467 if (integer_zerop (op1)
11468 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
11469 : (integer_onep (op2)
11470 && !VECTOR_TYPE_P (type)))
11471 && truth_value_p (TREE_CODE (arg0)))
11472 return pedantic_non_lvalue_loc (loc,
11473 fold_convert_loc (loc, type,
11474 invert_truthvalue_loc (loc,
11475 arg0)));
11476
11477 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11478 if (TREE_CODE (arg0) == LT_EXPR
11479 && integer_zerop (TREE_OPERAND (arg0, 1))
11480 && integer_zerop (op2)
11481 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11482 {
11483 /* sign_bit_p looks through both zero and sign extensions,
11484 but for this optimization only sign extensions are
11485 usable. */
11486 tree tem2 = TREE_OPERAND (arg0, 0);
11487 while (tem != tem2)
11488 {
11489 if (TREE_CODE (tem2) != NOP_EXPR
11490 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
11491 {
11492 tem = NULL_TREE;
11493 break;
11494 }
11495 tem2 = TREE_OPERAND (tem2, 0);
11496 }
11497 /* sign_bit_p only checks ARG1 bits within A's precision.
11498 If <sign bit of A> has wider type than A, bits outside
11499 of A's precision in <sign bit of A> need to be checked.
11500 If they are all 0, this optimization needs to be done
11501 in unsigned A's type, if they are all 1 in signed A's type,
11502 otherwise this can't be done. */
11503 if (tem
11504 && TYPE_PRECISION (TREE_TYPE (tem))
11505 < TYPE_PRECISION (TREE_TYPE (arg1))
11506 && TYPE_PRECISION (TREE_TYPE (tem))
11507 < TYPE_PRECISION (type))
11508 {
11509 int inner_width, outer_width;
11510 tree tem_type;
11511
11512 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
11513 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
11514 if (outer_width > TYPE_PRECISION (type))
11515 outer_width = TYPE_PRECISION (type);
11516
11517 wide_int mask = wi::shifted_mask
11518 (inner_width, outer_width - inner_width, false,
11519 TYPE_PRECISION (TREE_TYPE (arg1)));
11520
11521 wide_int common = mask & arg1;
11522 if (common == mask)
11523 {
11524 tem_type = signed_type_for (TREE_TYPE (tem));
11525 tem = fold_convert_loc (loc, tem_type, tem);
11526 }
11527 else if (common == 0)
11528 {
11529 tem_type = unsigned_type_for (TREE_TYPE (tem));
11530 tem = fold_convert_loc (loc, tem_type, tem);
11531 }
11532 else
11533 tem = NULL;
11534 }
11535
11536 if (tem)
11537 return
11538 fold_convert_loc (loc, type,
11539 fold_build2_loc (loc, BIT_AND_EXPR,
11540 TREE_TYPE (tem), tem,
11541 fold_convert_loc (loc,
11542 TREE_TYPE (tem),
11543 arg1)));
11544 }
11545
11546 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11547 already handled above. */
11548 if (TREE_CODE (arg0) == BIT_AND_EXPR
11549 && integer_onep (TREE_OPERAND (arg0, 1))
11550 && integer_zerop (op2)
11551 && integer_pow2p (arg1))
11552 {
11553 tree tem = TREE_OPERAND (arg0, 0);
11554 STRIP_NOPS (tem);
11555 if (TREE_CODE (tem) == RSHIFT_EXPR
11556 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
11557 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
11558 tree_to_uhwi (TREE_OPERAND (tem, 1)))
11559 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11560 TREE_OPERAND (tem, 0), arg1);
11561 }
11562
11563 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11564 is probably obsolete because the first operand should be a
11565 truth value (that's why we have the two cases above), but let's
11566 leave it in until we can confirm this for all front-ends. */
11567 if (integer_zerop (op2)
11568 && TREE_CODE (arg0) == NE_EXPR
11569 && integer_zerop (TREE_OPERAND (arg0, 1))
11570 && integer_pow2p (arg1)
11571 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11572 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11573 arg1, OEP_ONLY_CONST))
11574 return pedantic_non_lvalue_loc (loc,
11575 fold_convert_loc (loc, type,
11576 TREE_OPERAND (arg0, 0)));
11577
11578 /* Disable the transformations below for vectors, since
11579 fold_binary_op_with_conditional_arg may undo them immediately,
11580 yielding an infinite loop. */
11581 if (code == VEC_COND_EXPR)
11582 return NULL_TREE;
11583
11584 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11585 if (integer_zerop (op2)
11586 && truth_value_p (TREE_CODE (arg0))
11587 && truth_value_p (TREE_CODE (arg1))
11588 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11589 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
11590 : TRUTH_ANDIF_EXPR,
11591 type, fold_convert_loc (loc, type, arg0), arg1);
11592
11593 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11594 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
11595 && truth_value_p (TREE_CODE (arg0))
11596 && truth_value_p (TREE_CODE (arg1))
11597 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11598 {
11599 location_t loc0 = expr_location_or (arg0, loc);
11600 /* Only perform transformation if ARG0 is easily inverted. */
11601 tem = fold_invert_truthvalue (loc0, arg0);
11602 if (tem)
11603 return fold_build2_loc (loc, code == VEC_COND_EXPR
11604 ? BIT_IOR_EXPR
11605 : TRUTH_ORIF_EXPR,
11606 type, fold_convert_loc (loc, type, tem),
11607 arg1);
11608 }
11609
11610 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11611 if (integer_zerop (arg1)
11612 && truth_value_p (TREE_CODE (arg0))
11613 && truth_value_p (TREE_CODE (op2))
11614 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11615 {
11616 location_t loc0 = expr_location_or (arg0, loc);
11617 /* Only perform transformation if ARG0 is easily inverted. */
11618 tem = fold_invert_truthvalue (loc0, arg0);
11619 if (tem)
11620 return fold_build2_loc (loc, code == VEC_COND_EXPR
11621 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
11622 type, fold_convert_loc (loc, type, tem),
11623 op2);
11624 }
11625
11626 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11627 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
11628 && truth_value_p (TREE_CODE (arg0))
11629 && truth_value_p (TREE_CODE (op2))
11630 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11631 return fold_build2_loc (loc, code == VEC_COND_EXPR
11632 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
11633 type, fold_convert_loc (loc, type, arg0), op2);
11634
11635 return NULL_TREE;
11636
11637 case CALL_EXPR:
11638 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
11639 of fold_ternary on them. */
11640 gcc_unreachable ();
11641
11642 case BIT_FIELD_REF:
11643 if ((TREE_CODE (arg0) == VECTOR_CST
11644 || (TREE_CODE (arg0) == CONSTRUCTOR
11645 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
11646 && (type == TREE_TYPE (TREE_TYPE (arg0))
11647 || (TREE_CODE (type) == VECTOR_TYPE
11648 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
11649 {
11650 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
11651 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
11652 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
11653 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
11654
11655 if (n != 0
11656 && (idx % width) == 0
11657 && (n % width) == 0
11658 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
11659 {
11660 idx = idx / width;
11661 n = n / width;
11662
11663 if (TREE_CODE (arg0) == VECTOR_CST)
11664 {
11665 if (n == 1)
11666 return VECTOR_CST_ELT (arg0, idx);
11667
11668 tree *vals = XALLOCAVEC (tree, n);
11669 for (unsigned i = 0; i < n; ++i)
11670 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
11671 return build_vector (type, vals);
11672 }
11673
11674 /* Constructor elements can be subvectors. */
11675 unsigned HOST_WIDE_INT k = 1;
11676 if (CONSTRUCTOR_NELTS (arg0) != 0)
11677 {
11678 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
11679 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
11680 k = TYPE_VECTOR_SUBPARTS (cons_elem);
11681 }
11682
11683 /* We keep an exact subset of the constructor elements. */
11684 if ((idx % k) == 0 && (n % k) == 0)
11685 {
11686 if (CONSTRUCTOR_NELTS (arg0) == 0)
11687 return build_constructor (type, NULL);
11688 idx /= k;
11689 n /= k;
11690 if (n == 1)
11691 {
11692 if (idx < CONSTRUCTOR_NELTS (arg0))
11693 return CONSTRUCTOR_ELT (arg0, idx)->value;
11694 return build_zero_cst (type);
11695 }
11696
11697 vec<constructor_elt, va_gc> *vals;
11698 vec_alloc (vals, n);
11699 for (unsigned i = 0;
11700 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
11701 ++i)
11702 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
11703 CONSTRUCTOR_ELT
11704 (arg0, idx + i)->value);
11705 return build_constructor (type, vals);
11706 }
11707 /* The bitfield references a single constructor element. */
11708 else if (idx + n <= (idx / k + 1) * k)
11709 {
11710 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
11711 return build_zero_cst (type);
11712 else if (n == k)
11713 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
11714 else
11715 return fold_build3_loc (loc, code, type,
11716 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
11717 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
11718 }
11719 }
11720 }
11721
11722 /* A bit-field-ref that referenced the full argument can be stripped. */
11723 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11724 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1)
11725 && integer_zerop (op2))
11726 return fold_convert_loc (loc, type, arg0);
11727
11728 /* On constants we can use native encode/interpret to constant
11729 fold (nearly) all BIT_FIELD_REFs. */
11730 if (CONSTANT_CLASS_P (arg0)
11731 && can_native_interpret_type_p (type)
11732 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0)))
11733 /* This limitation should not be necessary, we just need to
11734 round this up to mode size. */
11735 && tree_to_uhwi (op1) % BITS_PER_UNIT == 0
11736 /* Need bit-shifting of the buffer to relax the following. */
11737 && tree_to_uhwi (op2) % BITS_PER_UNIT == 0)
11738 {
11739 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11740 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
11741 unsigned HOST_WIDE_INT clen;
11742 clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0)));
11743 /* ??? We cannot tell native_encode_expr to start at
11744 some random byte only. So limit us to a reasonable amount
11745 of work. */
11746 if (clen <= 4096)
11747 {
11748 unsigned char *b = XALLOCAVEC (unsigned char, clen);
11749 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
11750 if (len > 0
11751 && len * BITS_PER_UNIT >= bitpos + bitsize)
11752 {
11753 tree v = native_interpret_expr (type,
11754 b + bitpos / BITS_PER_UNIT,
11755 bitsize / BITS_PER_UNIT);
11756 if (v)
11757 return v;
11758 }
11759 }
11760 }
11761
11762 return NULL_TREE;
11763
11764 case FMA_EXPR:
11765 /* For integers we can decompose the FMA if possible. */
11766 if (TREE_CODE (arg0) == INTEGER_CST
11767 && TREE_CODE (arg1) == INTEGER_CST)
11768 return fold_build2_loc (loc, PLUS_EXPR, type,
11769 const_binop (MULT_EXPR, arg0, arg1), arg2);
11770 if (integer_zerop (arg2))
11771 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11772
11773 return fold_fma (loc, type, arg0, arg1, arg2);
11774
11775 case VEC_PERM_EXPR:
11776 if (TREE_CODE (arg2) == VECTOR_CST)
11777 {
11778 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask, mask2;
11779 unsigned char *sel = XALLOCAVEC (unsigned char, 2 * nelts);
11780 unsigned char *sel2 = sel + nelts;
11781 bool need_mask_canon = false;
11782 bool need_mask_canon2 = false;
11783 bool all_in_vec0 = true;
11784 bool all_in_vec1 = true;
11785 bool maybe_identity = true;
11786 bool single_arg = (op0 == op1);
11787 bool changed = false;
11788
11789 mask2 = 2 * nelts - 1;
11790 mask = single_arg ? (nelts - 1) : mask2;
11791 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
11792 for (i = 0; i < nelts; i++)
11793 {
11794 tree val = VECTOR_CST_ELT (arg2, i);
11795 if (TREE_CODE (val) != INTEGER_CST)
11796 return NULL_TREE;
11797
11798 /* Make sure that the perm value is in an acceptable
11799 range. */
11800 wide_int t = val;
11801 need_mask_canon |= wi::gtu_p (t, mask);
11802 need_mask_canon2 |= wi::gtu_p (t, mask2);
11803 sel[i] = t.to_uhwi () & mask;
11804 sel2[i] = t.to_uhwi () & mask2;
11805
11806 if (sel[i] < nelts)
11807 all_in_vec1 = false;
11808 else
11809 all_in_vec0 = false;
11810
11811 if ((sel[i] & (nelts-1)) != i)
11812 maybe_identity = false;
11813 }
11814
11815 if (maybe_identity)
11816 {
11817 if (all_in_vec0)
11818 return op0;
11819 if (all_in_vec1)
11820 return op1;
11821 }
11822
11823 if (all_in_vec0)
11824 op1 = op0;
11825 else if (all_in_vec1)
11826 {
11827 op0 = op1;
11828 for (i = 0; i < nelts; i++)
11829 sel[i] -= nelts;
11830 need_mask_canon = true;
11831 }
11832
11833 if ((TREE_CODE (op0) == VECTOR_CST
11834 || TREE_CODE (op0) == CONSTRUCTOR)
11835 && (TREE_CODE (op1) == VECTOR_CST
11836 || TREE_CODE (op1) == CONSTRUCTOR))
11837 {
11838 tree t = fold_vec_perm (type, op0, op1, sel);
11839 if (t != NULL_TREE)
11840 return t;
11841 }
11842
11843 if (op0 == op1 && !single_arg)
11844 changed = true;
11845
11846 /* Some targets are deficient and fail to expand a single
11847 argument permutation while still allowing an equivalent
11848 2-argument version. */
11849 if (need_mask_canon && arg2 == op2
11850 && !can_vec_perm_p (TYPE_MODE (type), false, sel)
11851 && can_vec_perm_p (TYPE_MODE (type), false, sel2))
11852 {
11853 need_mask_canon = need_mask_canon2;
11854 sel = sel2;
11855 }
11856
11857 if (need_mask_canon && arg2 == op2)
11858 {
11859 tree *tsel = XALLOCAVEC (tree, nelts);
11860 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
11861 for (i = 0; i < nelts; i++)
11862 tsel[i] = build_int_cst (eltype, sel[i]);
11863 op2 = build_vector (TREE_TYPE (arg2), tsel);
11864 changed = true;
11865 }
11866
11867 if (changed)
11868 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
11869 }
11870 return NULL_TREE;
11871
11872 default:
11873 return NULL_TREE;
11874 } /* switch (code) */
11875 }
11876
11877 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
11878 of an array (or vector). */
11879
11880 tree
11881 get_array_ctor_element_at_index (tree ctor, offset_int access_index)
11882 {
11883 tree index_type = NULL_TREE;
11884 offset_int low_bound = 0;
11885
11886 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
11887 {
11888 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
11889 if (domain_type && TYPE_MIN_VALUE (domain_type))
11890 {
11891 /* Static constructors for variably sized objects makes no sense. */
11892 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
11893 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
11894 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
11895 }
11896 }
11897
11898 if (index_type)
11899 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
11900 TYPE_SIGN (index_type));
11901
11902 offset_int index = low_bound - 1;
11903 if (index_type)
11904 index = wi::ext (index, TYPE_PRECISION (index_type),
11905 TYPE_SIGN (index_type));
11906
11907 offset_int max_index;
11908 unsigned HOST_WIDE_INT cnt;
11909 tree cfield, cval;
11910
11911 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
11912 {
11913 /* Array constructor might explicitly set index, or specify a range,
11914 or leave index NULL meaning that it is next index after previous
11915 one. */
11916 if (cfield)
11917 {
11918 if (TREE_CODE (cfield) == INTEGER_CST)
11919 max_index = index = wi::to_offset (cfield);
11920 else
11921 {
11922 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
11923 index = wi::to_offset (TREE_OPERAND (cfield, 0));
11924 max_index = wi::to_offset (TREE_OPERAND (cfield, 1));
11925 }
11926 }
11927 else
11928 {
11929 index += 1;
11930 if (index_type)
11931 index = wi::ext (index, TYPE_PRECISION (index_type),
11932 TYPE_SIGN (index_type));
11933 max_index = index;
11934 }
11935
11936 /* Do we have match? */
11937 if (wi::cmpu (access_index, index) >= 0
11938 && wi::cmpu (access_index, max_index) <= 0)
11939 return cval;
11940 }
11941 return NULL_TREE;
11942 }
11943
11944 /* Perform constant folding and related simplification of EXPR.
11945 The related simplifications include x*1 => x, x*0 => 0, etc.,
11946 and application of the associative law.
11947 NOP_EXPR conversions may be removed freely (as long as we
11948 are careful not to change the type of the overall expression).
11949 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
11950 but we can constant-fold them if they have constant operands. */
11951
11952 #ifdef ENABLE_FOLD_CHECKING
11953 # define fold(x) fold_1 (x)
11954 static tree fold_1 (tree);
11955 static
11956 #endif
11957 tree
11958 fold (tree expr)
11959 {
11960 const tree t = expr;
11961 enum tree_code code = TREE_CODE (t);
11962 enum tree_code_class kind = TREE_CODE_CLASS (code);
11963 tree tem;
11964 location_t loc = EXPR_LOCATION (expr);
11965
11966 /* Return right away if a constant. */
11967 if (kind == tcc_constant)
11968 return t;
11969
11970 /* CALL_EXPR-like objects with variable numbers of operands are
11971 treated specially. */
11972 if (kind == tcc_vl_exp)
11973 {
11974 if (code == CALL_EXPR)
11975 {
11976 tem = fold_call_expr (loc, expr, false);
11977 return tem ? tem : expr;
11978 }
11979 return expr;
11980 }
11981
11982 if (IS_EXPR_CODE_CLASS (kind))
11983 {
11984 tree type = TREE_TYPE (t);
11985 tree op0, op1, op2;
11986
11987 switch (TREE_CODE_LENGTH (code))
11988 {
11989 case 1:
11990 op0 = TREE_OPERAND (t, 0);
11991 tem = fold_unary_loc (loc, code, type, op0);
11992 return tem ? tem : expr;
11993 case 2:
11994 op0 = TREE_OPERAND (t, 0);
11995 op1 = TREE_OPERAND (t, 1);
11996 tem = fold_binary_loc (loc, code, type, op0, op1);
11997 return tem ? tem : expr;
11998 case 3:
11999 op0 = TREE_OPERAND (t, 0);
12000 op1 = TREE_OPERAND (t, 1);
12001 op2 = TREE_OPERAND (t, 2);
12002 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12003 return tem ? tem : expr;
12004 default:
12005 break;
12006 }
12007 }
12008
12009 switch (code)
12010 {
12011 case ARRAY_REF:
12012 {
12013 tree op0 = TREE_OPERAND (t, 0);
12014 tree op1 = TREE_OPERAND (t, 1);
12015
12016 if (TREE_CODE (op1) == INTEGER_CST
12017 && TREE_CODE (op0) == CONSTRUCTOR
12018 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
12019 {
12020 tree val = get_array_ctor_element_at_index (op0,
12021 wi::to_offset (op1));
12022 if (val)
12023 return val;
12024 }
12025
12026 return t;
12027 }
12028
12029 /* Return a VECTOR_CST if possible. */
12030 case CONSTRUCTOR:
12031 {
12032 tree type = TREE_TYPE (t);
12033 if (TREE_CODE (type) != VECTOR_TYPE)
12034 return t;
12035
12036 unsigned i;
12037 tree val;
12038 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
12039 if (! CONSTANT_CLASS_P (val))
12040 return t;
12041
12042 return build_vector_from_ctor (type, CONSTRUCTOR_ELTS (t));
12043 }
12044
12045 case CONST_DECL:
12046 return fold (DECL_INITIAL (t));
12047
12048 default:
12049 return t;
12050 } /* switch (code) */
12051 }
12052
12053 #ifdef ENABLE_FOLD_CHECKING
12054 #undef fold
12055
12056 static void fold_checksum_tree (const_tree, struct md5_ctx *,
12057 hash_table<nofree_ptr_hash<const tree_node> > *);
12058 static void fold_check_failed (const_tree, const_tree);
12059 void print_fold_checksum (const_tree);
12060
12061 /* When --enable-checking=fold, compute a digest of expr before
12062 and after actual fold call to see if fold did not accidentally
12063 change original expr. */
12064
12065 tree
12066 fold (tree expr)
12067 {
12068 tree ret;
12069 struct md5_ctx ctx;
12070 unsigned char checksum_before[16], checksum_after[16];
12071 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12072
12073 md5_init_ctx (&ctx);
12074 fold_checksum_tree (expr, &ctx, &ht);
12075 md5_finish_ctx (&ctx, checksum_before);
12076 ht.empty ();
12077
12078 ret = fold_1 (expr);
12079
12080 md5_init_ctx (&ctx);
12081 fold_checksum_tree (expr, &ctx, &ht);
12082 md5_finish_ctx (&ctx, checksum_after);
12083
12084 if (memcmp (checksum_before, checksum_after, 16))
12085 fold_check_failed (expr, ret);
12086
12087 return ret;
12088 }
12089
12090 void
12091 print_fold_checksum (const_tree expr)
12092 {
12093 struct md5_ctx ctx;
12094 unsigned char checksum[16], cnt;
12095 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12096
12097 md5_init_ctx (&ctx);
12098 fold_checksum_tree (expr, &ctx, &ht);
12099 md5_finish_ctx (&ctx, checksum);
12100 for (cnt = 0; cnt < 16; ++cnt)
12101 fprintf (stderr, "%02x", checksum[cnt]);
12102 putc ('\n', stderr);
12103 }
12104
12105 static void
12106 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
12107 {
12108 internal_error ("fold check: original tree changed by fold");
12109 }
12110
12111 static void
12112 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
12113 hash_table<nofree_ptr_hash <const tree_node> > *ht)
12114 {
12115 const tree_node **slot;
12116 enum tree_code code;
12117 union tree_node buf;
12118 int i, len;
12119
12120 recursive_label:
12121 if (expr == NULL)
12122 return;
12123 slot = ht->find_slot (expr, INSERT);
12124 if (*slot != NULL)
12125 return;
12126 *slot = expr;
12127 code = TREE_CODE (expr);
12128 if (TREE_CODE_CLASS (code) == tcc_declaration
12129 && HAS_DECL_ASSEMBLER_NAME_P (expr))
12130 {
12131 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
12132 memcpy ((char *) &buf, expr, tree_size (expr));
12133 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
12134 buf.decl_with_vis.symtab_node = NULL;
12135 expr = (tree) &buf;
12136 }
12137 else if (TREE_CODE_CLASS (code) == tcc_type
12138 && (TYPE_POINTER_TO (expr)
12139 || TYPE_REFERENCE_TO (expr)
12140 || TYPE_CACHED_VALUES_P (expr)
12141 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
12142 || TYPE_NEXT_VARIANT (expr)))
12143 {
12144 /* Allow these fields to be modified. */
12145 tree tmp;
12146 memcpy ((char *) &buf, expr, tree_size (expr));
12147 expr = tmp = (tree) &buf;
12148 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
12149 TYPE_POINTER_TO (tmp) = NULL;
12150 TYPE_REFERENCE_TO (tmp) = NULL;
12151 TYPE_NEXT_VARIANT (tmp) = NULL;
12152 if (TYPE_CACHED_VALUES_P (tmp))
12153 {
12154 TYPE_CACHED_VALUES_P (tmp) = 0;
12155 TYPE_CACHED_VALUES (tmp) = NULL;
12156 }
12157 }
12158 md5_process_bytes (expr, tree_size (expr), ctx);
12159 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
12160 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
12161 if (TREE_CODE_CLASS (code) != tcc_type
12162 && TREE_CODE_CLASS (code) != tcc_declaration
12163 && code != TREE_LIST
12164 && code != SSA_NAME
12165 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
12166 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
12167 switch (TREE_CODE_CLASS (code))
12168 {
12169 case tcc_constant:
12170 switch (code)
12171 {
12172 case STRING_CST:
12173 md5_process_bytes (TREE_STRING_POINTER (expr),
12174 TREE_STRING_LENGTH (expr), ctx);
12175 break;
12176 case COMPLEX_CST:
12177 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
12178 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
12179 break;
12180 case VECTOR_CST:
12181 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
12182 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
12183 break;
12184 default:
12185 break;
12186 }
12187 break;
12188 case tcc_exceptional:
12189 switch (code)
12190 {
12191 case TREE_LIST:
12192 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
12193 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
12194 expr = TREE_CHAIN (expr);
12195 goto recursive_label;
12196 break;
12197 case TREE_VEC:
12198 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
12199 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
12200 break;
12201 default:
12202 break;
12203 }
12204 break;
12205 case tcc_expression:
12206 case tcc_reference:
12207 case tcc_comparison:
12208 case tcc_unary:
12209 case tcc_binary:
12210 case tcc_statement:
12211 case tcc_vl_exp:
12212 len = TREE_OPERAND_LENGTH (expr);
12213 for (i = 0; i < len; ++i)
12214 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
12215 break;
12216 case tcc_declaration:
12217 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
12218 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
12219 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
12220 {
12221 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
12222 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
12223 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
12224 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
12225 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
12226 }
12227
12228 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
12229 {
12230 if (TREE_CODE (expr) == FUNCTION_DECL)
12231 {
12232 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
12233 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
12234 }
12235 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
12236 }
12237 break;
12238 case tcc_type:
12239 if (TREE_CODE (expr) == ENUMERAL_TYPE)
12240 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
12241 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
12242 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
12243 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
12244 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
12245 if (INTEGRAL_TYPE_P (expr)
12246 || SCALAR_FLOAT_TYPE_P (expr))
12247 {
12248 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12249 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
12250 }
12251 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
12252 if (TREE_CODE (expr) == RECORD_TYPE
12253 || TREE_CODE (expr) == UNION_TYPE
12254 || TREE_CODE (expr) == QUAL_UNION_TYPE)
12255 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
12256 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12257 break;
12258 default:
12259 break;
12260 }
12261 }
12262
12263 /* Helper function for outputting the checksum of a tree T. When
12264 debugging with gdb, you can "define mynext" to be "next" followed
12265 by "call debug_fold_checksum (op0)", then just trace down till the
12266 outputs differ. */
12267
12268 DEBUG_FUNCTION void
12269 debug_fold_checksum (const_tree t)
12270 {
12271 int i;
12272 unsigned char checksum[16];
12273 struct md5_ctx ctx;
12274 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12275
12276 md5_init_ctx (&ctx);
12277 fold_checksum_tree (t, &ctx, &ht);
12278 md5_finish_ctx (&ctx, checksum);
12279 ht.empty ();
12280
12281 for (i = 0; i < 16; i++)
12282 fprintf (stderr, "%d ", checksum[i]);
12283
12284 fprintf (stderr, "\n");
12285 }
12286
12287 #endif
12288
12289 /* Fold a unary tree expression with code CODE of type TYPE with an
12290 operand OP0. LOC is the location of the resulting expression.
12291 Return a folded expression if successful. Otherwise, return a tree
12292 expression with code CODE of type TYPE with an operand OP0. */
12293
12294 tree
12295 fold_build1_stat_loc (location_t loc,
12296 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
12297 {
12298 tree tem;
12299 #ifdef ENABLE_FOLD_CHECKING
12300 unsigned char checksum_before[16], checksum_after[16];
12301 struct md5_ctx ctx;
12302 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12303
12304 md5_init_ctx (&ctx);
12305 fold_checksum_tree (op0, &ctx, &ht);
12306 md5_finish_ctx (&ctx, checksum_before);
12307 ht.empty ();
12308 #endif
12309
12310 tem = fold_unary_loc (loc, code, type, op0);
12311 if (!tem)
12312 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
12313
12314 #ifdef ENABLE_FOLD_CHECKING
12315 md5_init_ctx (&ctx);
12316 fold_checksum_tree (op0, &ctx, &ht);
12317 md5_finish_ctx (&ctx, checksum_after);
12318
12319 if (memcmp (checksum_before, checksum_after, 16))
12320 fold_check_failed (op0, tem);
12321 #endif
12322 return tem;
12323 }
12324
12325 /* Fold a binary tree expression with code CODE of type TYPE with
12326 operands OP0 and OP1. LOC is the location of the resulting
12327 expression. Return a folded expression if successful. Otherwise,
12328 return a tree expression with code CODE of type TYPE with operands
12329 OP0 and OP1. */
12330
12331 tree
12332 fold_build2_stat_loc (location_t loc,
12333 enum tree_code code, tree type, tree op0, tree op1
12334 MEM_STAT_DECL)
12335 {
12336 tree tem;
12337 #ifdef ENABLE_FOLD_CHECKING
12338 unsigned char checksum_before_op0[16],
12339 checksum_before_op1[16],
12340 checksum_after_op0[16],
12341 checksum_after_op1[16];
12342 struct md5_ctx ctx;
12343 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12344
12345 md5_init_ctx (&ctx);
12346 fold_checksum_tree (op0, &ctx, &ht);
12347 md5_finish_ctx (&ctx, checksum_before_op0);
12348 ht.empty ();
12349
12350 md5_init_ctx (&ctx);
12351 fold_checksum_tree (op1, &ctx, &ht);
12352 md5_finish_ctx (&ctx, checksum_before_op1);
12353 ht.empty ();
12354 #endif
12355
12356 tem = fold_binary_loc (loc, code, type, op0, op1);
12357 if (!tem)
12358 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
12359
12360 #ifdef ENABLE_FOLD_CHECKING
12361 md5_init_ctx (&ctx);
12362 fold_checksum_tree (op0, &ctx, &ht);
12363 md5_finish_ctx (&ctx, checksum_after_op0);
12364 ht.empty ();
12365
12366 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12367 fold_check_failed (op0, tem);
12368
12369 md5_init_ctx (&ctx);
12370 fold_checksum_tree (op1, &ctx, &ht);
12371 md5_finish_ctx (&ctx, checksum_after_op1);
12372
12373 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12374 fold_check_failed (op1, tem);
12375 #endif
12376 return tem;
12377 }
12378
12379 /* Fold a ternary tree expression with code CODE of type TYPE with
12380 operands OP0, OP1, and OP2. Return a folded expression if
12381 successful. Otherwise, return a tree expression with code CODE of
12382 type TYPE with operands OP0, OP1, and OP2. */
12383
12384 tree
12385 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
12386 tree op0, tree op1, tree op2 MEM_STAT_DECL)
12387 {
12388 tree tem;
12389 #ifdef ENABLE_FOLD_CHECKING
12390 unsigned char checksum_before_op0[16],
12391 checksum_before_op1[16],
12392 checksum_before_op2[16],
12393 checksum_after_op0[16],
12394 checksum_after_op1[16],
12395 checksum_after_op2[16];
12396 struct md5_ctx ctx;
12397 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12398
12399 md5_init_ctx (&ctx);
12400 fold_checksum_tree (op0, &ctx, &ht);
12401 md5_finish_ctx (&ctx, checksum_before_op0);
12402 ht.empty ();
12403
12404 md5_init_ctx (&ctx);
12405 fold_checksum_tree (op1, &ctx, &ht);
12406 md5_finish_ctx (&ctx, checksum_before_op1);
12407 ht.empty ();
12408
12409 md5_init_ctx (&ctx);
12410 fold_checksum_tree (op2, &ctx, &ht);
12411 md5_finish_ctx (&ctx, checksum_before_op2);
12412 ht.empty ();
12413 #endif
12414
12415 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
12416 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12417 if (!tem)
12418 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
12419
12420 #ifdef ENABLE_FOLD_CHECKING
12421 md5_init_ctx (&ctx);
12422 fold_checksum_tree (op0, &ctx, &ht);
12423 md5_finish_ctx (&ctx, checksum_after_op0);
12424 ht.empty ();
12425
12426 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12427 fold_check_failed (op0, tem);
12428
12429 md5_init_ctx (&ctx);
12430 fold_checksum_tree (op1, &ctx, &ht);
12431 md5_finish_ctx (&ctx, checksum_after_op1);
12432 ht.empty ();
12433
12434 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12435 fold_check_failed (op1, tem);
12436
12437 md5_init_ctx (&ctx);
12438 fold_checksum_tree (op2, &ctx, &ht);
12439 md5_finish_ctx (&ctx, checksum_after_op2);
12440
12441 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12442 fold_check_failed (op2, tem);
12443 #endif
12444 return tem;
12445 }
12446
12447 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
12448 arguments in ARGARRAY, and a null static chain.
12449 Return a folded expression if successful. Otherwise, return a CALL_EXPR
12450 of type TYPE from the given operands as constructed by build_call_array. */
12451
12452 tree
12453 fold_build_call_array_loc (location_t loc, tree type, tree fn,
12454 int nargs, tree *argarray)
12455 {
12456 tree tem;
12457 #ifdef ENABLE_FOLD_CHECKING
12458 unsigned char checksum_before_fn[16],
12459 checksum_before_arglist[16],
12460 checksum_after_fn[16],
12461 checksum_after_arglist[16];
12462 struct md5_ctx ctx;
12463 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12464 int i;
12465
12466 md5_init_ctx (&ctx);
12467 fold_checksum_tree (fn, &ctx, &ht);
12468 md5_finish_ctx (&ctx, checksum_before_fn);
12469 ht.empty ();
12470
12471 md5_init_ctx (&ctx);
12472 for (i = 0; i < nargs; i++)
12473 fold_checksum_tree (argarray[i], &ctx, &ht);
12474 md5_finish_ctx (&ctx, checksum_before_arglist);
12475 ht.empty ();
12476 #endif
12477
12478 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
12479 if (!tem)
12480 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
12481
12482 #ifdef ENABLE_FOLD_CHECKING
12483 md5_init_ctx (&ctx);
12484 fold_checksum_tree (fn, &ctx, &ht);
12485 md5_finish_ctx (&ctx, checksum_after_fn);
12486 ht.empty ();
12487
12488 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
12489 fold_check_failed (fn, tem);
12490
12491 md5_init_ctx (&ctx);
12492 for (i = 0; i < nargs; i++)
12493 fold_checksum_tree (argarray[i], &ctx, &ht);
12494 md5_finish_ctx (&ctx, checksum_after_arglist);
12495
12496 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
12497 fold_check_failed (NULL_TREE, tem);
12498 #endif
12499 return tem;
12500 }
12501
12502 /* Perform constant folding and related simplification of initializer
12503 expression EXPR. These behave identically to "fold_buildN" but ignore
12504 potential run-time traps and exceptions that fold must preserve. */
12505
12506 #define START_FOLD_INIT \
12507 int saved_signaling_nans = flag_signaling_nans;\
12508 int saved_trapping_math = flag_trapping_math;\
12509 int saved_rounding_math = flag_rounding_math;\
12510 int saved_trapv = flag_trapv;\
12511 int saved_folding_initializer = folding_initializer;\
12512 flag_signaling_nans = 0;\
12513 flag_trapping_math = 0;\
12514 flag_rounding_math = 0;\
12515 flag_trapv = 0;\
12516 folding_initializer = 1;
12517
12518 #define END_FOLD_INIT \
12519 flag_signaling_nans = saved_signaling_nans;\
12520 flag_trapping_math = saved_trapping_math;\
12521 flag_rounding_math = saved_rounding_math;\
12522 flag_trapv = saved_trapv;\
12523 folding_initializer = saved_folding_initializer;
12524
12525 tree
12526 fold_build1_initializer_loc (location_t loc, enum tree_code code,
12527 tree type, tree op)
12528 {
12529 tree result;
12530 START_FOLD_INIT;
12531
12532 result = fold_build1_loc (loc, code, type, op);
12533
12534 END_FOLD_INIT;
12535 return result;
12536 }
12537
12538 tree
12539 fold_build2_initializer_loc (location_t loc, enum tree_code code,
12540 tree type, tree op0, tree op1)
12541 {
12542 tree result;
12543 START_FOLD_INIT;
12544
12545 result = fold_build2_loc (loc, code, type, op0, op1);
12546
12547 END_FOLD_INIT;
12548 return result;
12549 }
12550
12551 tree
12552 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
12553 int nargs, tree *argarray)
12554 {
12555 tree result;
12556 START_FOLD_INIT;
12557
12558 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
12559
12560 END_FOLD_INIT;
12561 return result;
12562 }
12563
12564 #undef START_FOLD_INIT
12565 #undef END_FOLD_INIT
12566
12567 /* Determine if first argument is a multiple of second argument. Return 0 if
12568 it is not, or we cannot easily determined it to be.
12569
12570 An example of the sort of thing we care about (at this point; this routine
12571 could surely be made more general, and expanded to do what the *_DIV_EXPR's
12572 fold cases do now) is discovering that
12573
12574 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12575
12576 is a multiple of
12577
12578 SAVE_EXPR (J * 8)
12579
12580 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12581
12582 This code also handles discovering that
12583
12584 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12585
12586 is a multiple of 8 so we don't have to worry about dealing with a
12587 possible remainder.
12588
12589 Note that we *look* inside a SAVE_EXPR only to determine how it was
12590 calculated; it is not safe for fold to do much of anything else with the
12591 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12592 at run time. For example, the latter example above *cannot* be implemented
12593 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12594 evaluation time of the original SAVE_EXPR is not necessarily the same at
12595 the time the new expression is evaluated. The only optimization of this
12596 sort that would be valid is changing
12597
12598 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12599
12600 divided by 8 to
12601
12602 SAVE_EXPR (I) * SAVE_EXPR (J)
12603
12604 (where the same SAVE_EXPR (J) is used in the original and the
12605 transformed version). */
12606
12607 int
12608 multiple_of_p (tree type, const_tree top, const_tree bottom)
12609 {
12610 if (operand_equal_p (top, bottom, 0))
12611 return 1;
12612
12613 if (TREE_CODE (type) != INTEGER_TYPE)
12614 return 0;
12615
12616 switch (TREE_CODE (top))
12617 {
12618 case BIT_AND_EXPR:
12619 /* Bitwise and provides a power of two multiple. If the mask is
12620 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
12621 if (!integer_pow2p (bottom))
12622 return 0;
12623 /* FALLTHRU */
12624
12625 case MULT_EXPR:
12626 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12627 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12628
12629 case PLUS_EXPR:
12630 case MINUS_EXPR:
12631 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12632 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12633
12634 case LSHIFT_EXPR:
12635 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
12636 {
12637 tree op1, t1;
12638
12639 op1 = TREE_OPERAND (top, 1);
12640 /* const_binop may not detect overflow correctly,
12641 so check for it explicitly here. */
12642 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
12643 && 0 != (t1 = fold_convert (type,
12644 const_binop (LSHIFT_EXPR,
12645 size_one_node,
12646 op1)))
12647 && !TREE_OVERFLOW (t1))
12648 return multiple_of_p (type, t1, bottom);
12649 }
12650 return 0;
12651
12652 case NOP_EXPR:
12653 /* Can't handle conversions from non-integral or wider integral type. */
12654 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
12655 || (TYPE_PRECISION (type)
12656 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
12657 return 0;
12658
12659 /* .. fall through ... */
12660
12661 case SAVE_EXPR:
12662 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
12663
12664 case COND_EXPR:
12665 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12666 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
12667
12668 case INTEGER_CST:
12669 if (TREE_CODE (bottom) != INTEGER_CST
12670 || integer_zerop (bottom)
12671 || (TYPE_UNSIGNED (type)
12672 && (tree_int_cst_sgn (top) < 0
12673 || tree_int_cst_sgn (bottom) < 0)))
12674 return 0;
12675 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
12676 SIGNED);
12677
12678 default:
12679 return 0;
12680 }
12681 }
12682
12683 #define tree_expr_nonnegative_warnv_p(X, Y) \
12684 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
12685
12686 #define RECURSE(X) \
12687 ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
12688
12689 /* Return true if CODE or TYPE is known to be non-negative. */
12690
12691 static bool
12692 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
12693 {
12694 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
12695 && truth_value_p (code))
12696 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
12697 have a signed:1 type (where the value is -1 and 0). */
12698 return true;
12699 return false;
12700 }
12701
12702 /* Return true if (CODE OP0) is known to be non-negative. If the return
12703 value is based on the assumption that signed overflow is undefined,
12704 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12705 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12706
12707 bool
12708 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12709 bool *strict_overflow_p, int depth)
12710 {
12711 if (TYPE_UNSIGNED (type))
12712 return true;
12713
12714 switch (code)
12715 {
12716 case ABS_EXPR:
12717 /* We can't return 1 if flag_wrapv is set because
12718 ABS_EXPR<INT_MIN> = INT_MIN. */
12719 if (!ANY_INTEGRAL_TYPE_P (type))
12720 return true;
12721 if (TYPE_OVERFLOW_UNDEFINED (type))
12722 {
12723 *strict_overflow_p = true;
12724 return true;
12725 }
12726 break;
12727
12728 case NON_LVALUE_EXPR:
12729 case FLOAT_EXPR:
12730 case FIX_TRUNC_EXPR:
12731 return RECURSE (op0);
12732
12733 CASE_CONVERT:
12734 {
12735 tree inner_type = TREE_TYPE (op0);
12736 tree outer_type = type;
12737
12738 if (TREE_CODE (outer_type) == REAL_TYPE)
12739 {
12740 if (TREE_CODE (inner_type) == REAL_TYPE)
12741 return RECURSE (op0);
12742 if (INTEGRAL_TYPE_P (inner_type))
12743 {
12744 if (TYPE_UNSIGNED (inner_type))
12745 return true;
12746 return RECURSE (op0);
12747 }
12748 }
12749 else if (INTEGRAL_TYPE_P (outer_type))
12750 {
12751 if (TREE_CODE (inner_type) == REAL_TYPE)
12752 return RECURSE (op0);
12753 if (INTEGRAL_TYPE_P (inner_type))
12754 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
12755 && TYPE_UNSIGNED (inner_type);
12756 }
12757 }
12758 break;
12759
12760 default:
12761 return tree_simple_nonnegative_warnv_p (code, type);
12762 }
12763
12764 /* We don't know sign of `t', so be conservative and return false. */
12765 return false;
12766 }
12767
12768 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
12769 value is based on the assumption that signed overflow is undefined,
12770 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12771 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12772
12773 bool
12774 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12775 tree op1, bool *strict_overflow_p,
12776 int depth)
12777 {
12778 if (TYPE_UNSIGNED (type))
12779 return true;
12780
12781 switch (code)
12782 {
12783 case POINTER_PLUS_EXPR:
12784 case PLUS_EXPR:
12785 if (FLOAT_TYPE_P (type))
12786 return RECURSE (op0) && RECURSE (op1);
12787
12788 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12789 both unsigned and at least 2 bits shorter than the result. */
12790 if (TREE_CODE (type) == INTEGER_TYPE
12791 && TREE_CODE (op0) == NOP_EXPR
12792 && TREE_CODE (op1) == NOP_EXPR)
12793 {
12794 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
12795 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
12796 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12797 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12798 {
12799 unsigned int prec = MAX (TYPE_PRECISION (inner1),
12800 TYPE_PRECISION (inner2)) + 1;
12801 return prec < TYPE_PRECISION (type);
12802 }
12803 }
12804 break;
12805
12806 case MULT_EXPR:
12807 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12808 {
12809 /* x * x is always non-negative for floating point x
12810 or without overflow. */
12811 if (operand_equal_p (op0, op1, 0)
12812 || (RECURSE (op0) && RECURSE (op1)))
12813 {
12814 if (ANY_INTEGRAL_TYPE_P (type)
12815 && TYPE_OVERFLOW_UNDEFINED (type))
12816 *strict_overflow_p = true;
12817 return true;
12818 }
12819 }
12820
12821 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12822 both unsigned and their total bits is shorter than the result. */
12823 if (TREE_CODE (type) == INTEGER_TYPE
12824 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
12825 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
12826 {
12827 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
12828 ? TREE_TYPE (TREE_OPERAND (op0, 0))
12829 : TREE_TYPE (op0);
12830 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
12831 ? TREE_TYPE (TREE_OPERAND (op1, 0))
12832 : TREE_TYPE (op1);
12833
12834 bool unsigned0 = TYPE_UNSIGNED (inner0);
12835 bool unsigned1 = TYPE_UNSIGNED (inner1);
12836
12837 if (TREE_CODE (op0) == INTEGER_CST)
12838 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
12839
12840 if (TREE_CODE (op1) == INTEGER_CST)
12841 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
12842
12843 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
12844 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
12845 {
12846 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
12847 ? tree_int_cst_min_precision (op0, UNSIGNED)
12848 : TYPE_PRECISION (inner0);
12849
12850 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
12851 ? tree_int_cst_min_precision (op1, UNSIGNED)
12852 : TYPE_PRECISION (inner1);
12853
12854 return precision0 + precision1 < TYPE_PRECISION (type);
12855 }
12856 }
12857 return false;
12858
12859 case BIT_AND_EXPR:
12860 case MAX_EXPR:
12861 return RECURSE (op0) || RECURSE (op1);
12862
12863 case BIT_IOR_EXPR:
12864 case BIT_XOR_EXPR:
12865 case MIN_EXPR:
12866 case RDIV_EXPR:
12867 case TRUNC_DIV_EXPR:
12868 case CEIL_DIV_EXPR:
12869 case FLOOR_DIV_EXPR:
12870 case ROUND_DIV_EXPR:
12871 return RECURSE (op0) && RECURSE (op1);
12872
12873 case TRUNC_MOD_EXPR:
12874 return RECURSE (op0);
12875
12876 case FLOOR_MOD_EXPR:
12877 return RECURSE (op1);
12878
12879 case CEIL_MOD_EXPR:
12880 case ROUND_MOD_EXPR:
12881 default:
12882 return tree_simple_nonnegative_warnv_p (code, type);
12883 }
12884
12885 /* We don't know sign of `t', so be conservative and return false. */
12886 return false;
12887 }
12888
12889 /* Return true if T is known to be non-negative. If the return
12890 value is based on the assumption that signed overflow is undefined,
12891 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12892 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12893
12894 bool
12895 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
12896 {
12897 if (TYPE_UNSIGNED (TREE_TYPE (t)))
12898 return true;
12899
12900 switch (TREE_CODE (t))
12901 {
12902 case INTEGER_CST:
12903 return tree_int_cst_sgn (t) >= 0;
12904
12905 case REAL_CST:
12906 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
12907
12908 case FIXED_CST:
12909 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
12910
12911 case COND_EXPR:
12912 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
12913
12914 case SSA_NAME:
12915 /* Limit the depth of recursion to avoid quadratic behavior.
12916 This is expected to catch almost all occurrences in practice.
12917 If this code misses important cases that unbounded recursion
12918 would not, passes that need this information could be revised
12919 to provide it through dataflow propagation. */
12920 return (!name_registered_for_update_p (t)
12921 && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
12922 && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
12923 strict_overflow_p, depth));
12924
12925 default:
12926 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
12927 }
12928 }
12929
12930 /* Return true if T is known to be non-negative. If the return
12931 value is based on the assumption that signed overflow is undefined,
12932 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12933 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12934
12935 bool
12936 tree_call_nonnegative_warnv_p (tree type, combined_fn fn, tree arg0, tree arg1,
12937 bool *strict_overflow_p, int depth)
12938 {
12939 switch (fn)
12940 {
12941 CASE_CFN_ACOS:
12942 CASE_CFN_ACOSH:
12943 CASE_CFN_CABS:
12944 CASE_CFN_COSH:
12945 CASE_CFN_ERFC:
12946 CASE_CFN_EXP:
12947 CASE_CFN_EXP10:
12948 CASE_CFN_EXP2:
12949 CASE_CFN_FABS:
12950 CASE_CFN_FDIM:
12951 CASE_CFN_HYPOT:
12952 CASE_CFN_POW10:
12953 CASE_CFN_FFS:
12954 CASE_CFN_PARITY:
12955 CASE_CFN_POPCOUNT:
12956 CASE_CFN_CLZ:
12957 CASE_CFN_CLRSB:
12958 case CFN_BUILT_IN_BSWAP32:
12959 case CFN_BUILT_IN_BSWAP64:
12960 /* Always true. */
12961 return true;
12962
12963 CASE_CFN_SQRT:
12964 /* sqrt(-0.0) is -0.0. */
12965 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
12966 return true;
12967 return RECURSE (arg0);
12968
12969 CASE_CFN_ASINH:
12970 CASE_CFN_ATAN:
12971 CASE_CFN_ATANH:
12972 CASE_CFN_CBRT:
12973 CASE_CFN_CEIL:
12974 CASE_CFN_ERF:
12975 CASE_CFN_EXPM1:
12976 CASE_CFN_FLOOR:
12977 CASE_CFN_FMOD:
12978 CASE_CFN_FREXP:
12979 CASE_CFN_ICEIL:
12980 CASE_CFN_IFLOOR:
12981 CASE_CFN_IRINT:
12982 CASE_CFN_IROUND:
12983 CASE_CFN_LCEIL:
12984 CASE_CFN_LDEXP:
12985 CASE_CFN_LFLOOR:
12986 CASE_CFN_LLCEIL:
12987 CASE_CFN_LLFLOOR:
12988 CASE_CFN_LLRINT:
12989 CASE_CFN_LLROUND:
12990 CASE_CFN_LRINT:
12991 CASE_CFN_LROUND:
12992 CASE_CFN_MODF:
12993 CASE_CFN_NEARBYINT:
12994 CASE_CFN_RINT:
12995 CASE_CFN_ROUND:
12996 CASE_CFN_SCALB:
12997 CASE_CFN_SCALBLN:
12998 CASE_CFN_SCALBN:
12999 CASE_CFN_SIGNBIT:
13000 CASE_CFN_SIGNIFICAND:
13001 CASE_CFN_SINH:
13002 CASE_CFN_TANH:
13003 CASE_CFN_TRUNC:
13004 /* True if the 1st argument is nonnegative. */
13005 return RECURSE (arg0);
13006
13007 CASE_CFN_FMAX:
13008 /* True if the 1st OR 2nd arguments are nonnegative. */
13009 return RECURSE (arg0) || RECURSE (arg1);
13010
13011 CASE_CFN_FMIN:
13012 /* True if the 1st AND 2nd arguments are nonnegative. */
13013 return RECURSE (arg0) && RECURSE (arg1);
13014
13015 CASE_CFN_COPYSIGN:
13016 /* True if the 2nd argument is nonnegative. */
13017 return RECURSE (arg1);
13018
13019 CASE_CFN_POWI:
13020 /* True if the 1st argument is nonnegative or the second
13021 argument is an even integer. */
13022 if (TREE_CODE (arg1) == INTEGER_CST
13023 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
13024 return true;
13025 return RECURSE (arg0);
13026
13027 CASE_CFN_POW:
13028 /* True if the 1st argument is nonnegative or the second
13029 argument is an even integer valued real. */
13030 if (TREE_CODE (arg1) == REAL_CST)
13031 {
13032 REAL_VALUE_TYPE c;
13033 HOST_WIDE_INT n;
13034
13035 c = TREE_REAL_CST (arg1);
13036 n = real_to_integer (&c);
13037 if ((n & 1) == 0)
13038 {
13039 REAL_VALUE_TYPE cint;
13040 real_from_integer (&cint, VOIDmode, n, SIGNED);
13041 if (real_identical (&c, &cint))
13042 return true;
13043 }
13044 }
13045 return RECURSE (arg0);
13046
13047 default:
13048 break;
13049 }
13050 return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
13051 }
13052
13053 /* Return true if T is known to be non-negative. If the return
13054 value is based on the assumption that signed overflow is undefined,
13055 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13056 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13057
13058 static bool
13059 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13060 {
13061 enum tree_code code = TREE_CODE (t);
13062 if (TYPE_UNSIGNED (TREE_TYPE (t)))
13063 return true;
13064
13065 switch (code)
13066 {
13067 case TARGET_EXPR:
13068 {
13069 tree temp = TARGET_EXPR_SLOT (t);
13070 t = TARGET_EXPR_INITIAL (t);
13071
13072 /* If the initializer is non-void, then it's a normal expression
13073 that will be assigned to the slot. */
13074 if (!VOID_TYPE_P (t))
13075 return RECURSE (t);
13076
13077 /* Otherwise, the initializer sets the slot in some way. One common
13078 way is an assignment statement at the end of the initializer. */
13079 while (1)
13080 {
13081 if (TREE_CODE (t) == BIND_EXPR)
13082 t = expr_last (BIND_EXPR_BODY (t));
13083 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
13084 || TREE_CODE (t) == TRY_CATCH_EXPR)
13085 t = expr_last (TREE_OPERAND (t, 0));
13086 else if (TREE_CODE (t) == STATEMENT_LIST)
13087 t = expr_last (t);
13088 else
13089 break;
13090 }
13091 if (TREE_CODE (t) == MODIFY_EXPR
13092 && TREE_OPERAND (t, 0) == temp)
13093 return RECURSE (TREE_OPERAND (t, 1));
13094
13095 return false;
13096 }
13097
13098 case CALL_EXPR:
13099 {
13100 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
13101 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
13102
13103 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
13104 get_call_combined_fn (t),
13105 arg0,
13106 arg1,
13107 strict_overflow_p, depth);
13108 }
13109 case COMPOUND_EXPR:
13110 case MODIFY_EXPR:
13111 return RECURSE (TREE_OPERAND (t, 1));
13112
13113 case BIND_EXPR:
13114 return RECURSE (expr_last (TREE_OPERAND (t, 1)));
13115
13116 case SAVE_EXPR:
13117 return RECURSE (TREE_OPERAND (t, 0));
13118
13119 default:
13120 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
13121 }
13122 }
13123
13124 #undef RECURSE
13125 #undef tree_expr_nonnegative_warnv_p
13126
13127 /* Return true if T is known to be non-negative. If the return
13128 value is based on the assumption that signed overflow is undefined,
13129 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13130 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13131
13132 bool
13133 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13134 {
13135 enum tree_code code;
13136 if (t == error_mark_node)
13137 return false;
13138
13139 code = TREE_CODE (t);
13140 switch (TREE_CODE_CLASS (code))
13141 {
13142 case tcc_binary:
13143 case tcc_comparison:
13144 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13145 TREE_TYPE (t),
13146 TREE_OPERAND (t, 0),
13147 TREE_OPERAND (t, 1),
13148 strict_overflow_p, depth);
13149
13150 case tcc_unary:
13151 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13152 TREE_TYPE (t),
13153 TREE_OPERAND (t, 0),
13154 strict_overflow_p, depth);
13155
13156 case tcc_constant:
13157 case tcc_declaration:
13158 case tcc_reference:
13159 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13160
13161 default:
13162 break;
13163 }
13164
13165 switch (code)
13166 {
13167 case TRUTH_AND_EXPR:
13168 case TRUTH_OR_EXPR:
13169 case TRUTH_XOR_EXPR:
13170 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13171 TREE_TYPE (t),
13172 TREE_OPERAND (t, 0),
13173 TREE_OPERAND (t, 1),
13174 strict_overflow_p, depth);
13175 case TRUTH_NOT_EXPR:
13176 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13177 TREE_TYPE (t),
13178 TREE_OPERAND (t, 0),
13179 strict_overflow_p, depth);
13180
13181 case COND_EXPR:
13182 case CONSTRUCTOR:
13183 case OBJ_TYPE_REF:
13184 case ASSERT_EXPR:
13185 case ADDR_EXPR:
13186 case WITH_SIZE_EXPR:
13187 case SSA_NAME:
13188 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13189
13190 default:
13191 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
13192 }
13193 }
13194
13195 /* Return true if `t' is known to be non-negative. Handle warnings
13196 about undefined signed overflow. */
13197
13198 bool
13199 tree_expr_nonnegative_p (tree t)
13200 {
13201 bool ret, strict_overflow_p;
13202
13203 strict_overflow_p = false;
13204 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
13205 if (strict_overflow_p)
13206 fold_overflow_warning (("assuming signed overflow does not occur when "
13207 "determining that expression is always "
13208 "non-negative"),
13209 WARN_STRICT_OVERFLOW_MISC);
13210 return ret;
13211 }
13212
13213
13214 /* Return true when (CODE OP0) is an address and is known to be nonzero.
13215 For floating point we further ensure that T is not denormal.
13216 Similar logic is present in nonzero_address in rtlanal.h.
13217
13218 If the return value is based on the assumption that signed overflow
13219 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13220 change *STRICT_OVERFLOW_P. */
13221
13222 bool
13223 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
13224 bool *strict_overflow_p)
13225 {
13226 switch (code)
13227 {
13228 case ABS_EXPR:
13229 return tree_expr_nonzero_warnv_p (op0,
13230 strict_overflow_p);
13231
13232 case NOP_EXPR:
13233 {
13234 tree inner_type = TREE_TYPE (op0);
13235 tree outer_type = type;
13236
13237 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
13238 && tree_expr_nonzero_warnv_p (op0,
13239 strict_overflow_p));
13240 }
13241 break;
13242
13243 case NON_LVALUE_EXPR:
13244 return tree_expr_nonzero_warnv_p (op0,
13245 strict_overflow_p);
13246
13247 default:
13248 break;
13249 }
13250
13251 return false;
13252 }
13253
13254 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
13255 For floating point we further ensure that T is not denormal.
13256 Similar logic is present in nonzero_address in rtlanal.h.
13257
13258 If the return value is based on the assumption that signed overflow
13259 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13260 change *STRICT_OVERFLOW_P. */
13261
13262 bool
13263 tree_binary_nonzero_warnv_p (enum tree_code code,
13264 tree type,
13265 tree op0,
13266 tree op1, bool *strict_overflow_p)
13267 {
13268 bool sub_strict_overflow_p;
13269 switch (code)
13270 {
13271 case POINTER_PLUS_EXPR:
13272 case PLUS_EXPR:
13273 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
13274 {
13275 /* With the presence of negative values it is hard
13276 to say something. */
13277 sub_strict_overflow_p = false;
13278 if (!tree_expr_nonnegative_warnv_p (op0,
13279 &sub_strict_overflow_p)
13280 || !tree_expr_nonnegative_warnv_p (op1,
13281 &sub_strict_overflow_p))
13282 return false;
13283 /* One of operands must be positive and the other non-negative. */
13284 /* We don't set *STRICT_OVERFLOW_P here: even if this value
13285 overflows, on a twos-complement machine the sum of two
13286 nonnegative numbers can never be zero. */
13287 return (tree_expr_nonzero_warnv_p (op0,
13288 strict_overflow_p)
13289 || tree_expr_nonzero_warnv_p (op1,
13290 strict_overflow_p));
13291 }
13292 break;
13293
13294 case MULT_EXPR:
13295 if (TYPE_OVERFLOW_UNDEFINED (type))
13296 {
13297 if (tree_expr_nonzero_warnv_p (op0,
13298 strict_overflow_p)
13299 && tree_expr_nonzero_warnv_p (op1,
13300 strict_overflow_p))
13301 {
13302 *strict_overflow_p = true;
13303 return true;
13304 }
13305 }
13306 break;
13307
13308 case MIN_EXPR:
13309 sub_strict_overflow_p = false;
13310 if (tree_expr_nonzero_warnv_p (op0,
13311 &sub_strict_overflow_p)
13312 && tree_expr_nonzero_warnv_p (op1,
13313 &sub_strict_overflow_p))
13314 {
13315 if (sub_strict_overflow_p)
13316 *strict_overflow_p = true;
13317 }
13318 break;
13319
13320 case MAX_EXPR:
13321 sub_strict_overflow_p = false;
13322 if (tree_expr_nonzero_warnv_p (op0,
13323 &sub_strict_overflow_p))
13324 {
13325 if (sub_strict_overflow_p)
13326 *strict_overflow_p = true;
13327
13328 /* When both operands are nonzero, then MAX must be too. */
13329 if (tree_expr_nonzero_warnv_p (op1,
13330 strict_overflow_p))
13331 return true;
13332
13333 /* MAX where operand 0 is positive is positive. */
13334 return tree_expr_nonnegative_warnv_p (op0,
13335 strict_overflow_p);
13336 }
13337 /* MAX where operand 1 is positive is positive. */
13338 else if (tree_expr_nonzero_warnv_p (op1,
13339 &sub_strict_overflow_p)
13340 && tree_expr_nonnegative_warnv_p (op1,
13341 &sub_strict_overflow_p))
13342 {
13343 if (sub_strict_overflow_p)
13344 *strict_overflow_p = true;
13345 return true;
13346 }
13347 break;
13348
13349 case BIT_IOR_EXPR:
13350 return (tree_expr_nonzero_warnv_p (op1,
13351 strict_overflow_p)
13352 || tree_expr_nonzero_warnv_p (op0,
13353 strict_overflow_p));
13354
13355 default:
13356 break;
13357 }
13358
13359 return false;
13360 }
13361
13362 /* Return true when T is an address and is known to be nonzero.
13363 For floating point we further ensure that T is not denormal.
13364 Similar logic is present in nonzero_address in rtlanal.h.
13365
13366 If the return value is based on the assumption that signed overflow
13367 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13368 change *STRICT_OVERFLOW_P. */
13369
13370 bool
13371 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
13372 {
13373 bool sub_strict_overflow_p;
13374 switch (TREE_CODE (t))
13375 {
13376 case INTEGER_CST:
13377 return !integer_zerop (t);
13378
13379 case ADDR_EXPR:
13380 {
13381 tree base = TREE_OPERAND (t, 0);
13382
13383 if (!DECL_P (base))
13384 base = get_base_address (base);
13385
13386 if (!base)
13387 return false;
13388
13389 /* For objects in symbol table check if we know they are non-zero.
13390 Don't do anything for variables and functions before symtab is built;
13391 it is quite possible that they will be declared weak later. */
13392 if (DECL_P (base) && decl_in_symtab_p (base))
13393 {
13394 struct symtab_node *symbol;
13395
13396 symbol = symtab_node::get_create (base);
13397 if (symbol)
13398 return symbol->nonzero_address ();
13399 else
13400 return false;
13401 }
13402
13403 /* Function local objects are never NULL. */
13404 if (DECL_P (base)
13405 && (DECL_CONTEXT (base)
13406 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
13407 && auto_var_in_fn_p (base, DECL_CONTEXT (base))))
13408 return true;
13409
13410 /* Constants are never weak. */
13411 if (CONSTANT_CLASS_P (base))
13412 return true;
13413
13414 return false;
13415 }
13416
13417 case COND_EXPR:
13418 sub_strict_overflow_p = false;
13419 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13420 &sub_strict_overflow_p)
13421 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
13422 &sub_strict_overflow_p))
13423 {
13424 if (sub_strict_overflow_p)
13425 *strict_overflow_p = true;
13426 return true;
13427 }
13428 break;
13429
13430 default:
13431 break;
13432 }
13433 return false;
13434 }
13435
13436 #define integer_valued_real_p(X) \
13437 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
13438
13439 #define RECURSE(X) \
13440 ((integer_valued_real_p) (X, depth + 1))
13441
13442 /* Return true if the floating point result of (CODE OP0) has an
13443 integer value. We also allow +Inf, -Inf and NaN to be considered
13444 integer values. Return false for signaling NaN.
13445
13446 DEPTH is the current nesting depth of the query. */
13447
13448 bool
13449 integer_valued_real_unary_p (tree_code code, tree op0, int depth)
13450 {
13451 switch (code)
13452 {
13453 case FLOAT_EXPR:
13454 return true;
13455
13456 case ABS_EXPR:
13457 return RECURSE (op0);
13458
13459 CASE_CONVERT:
13460 {
13461 tree type = TREE_TYPE (op0);
13462 if (TREE_CODE (type) == INTEGER_TYPE)
13463 return true;
13464 if (TREE_CODE (type) == REAL_TYPE)
13465 return RECURSE (op0);
13466 break;
13467 }
13468
13469 default:
13470 break;
13471 }
13472 return false;
13473 }
13474
13475 /* Return true if the floating point result of (CODE OP0 OP1) has an
13476 integer value. We also allow +Inf, -Inf and NaN to be considered
13477 integer values. Return false for signaling NaN.
13478
13479 DEPTH is the current nesting depth of the query. */
13480
13481 bool
13482 integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth)
13483 {
13484 switch (code)
13485 {
13486 case PLUS_EXPR:
13487 case MINUS_EXPR:
13488 case MULT_EXPR:
13489 case MIN_EXPR:
13490 case MAX_EXPR:
13491 return RECURSE (op0) && RECURSE (op1);
13492
13493 default:
13494 break;
13495 }
13496 return false;
13497 }
13498
13499 /* Return true if the floating point result of calling FNDECL with arguments
13500 ARG0 and ARG1 has an integer value. We also allow +Inf, -Inf and NaN to be
13501 considered integer values. Return false for signaling NaN. If FNDECL
13502 takes fewer than 2 arguments, the remaining ARGn are null.
13503
13504 DEPTH is the current nesting depth of the query. */
13505
13506 bool
13507 integer_valued_real_call_p (combined_fn fn, tree arg0, tree arg1, int depth)
13508 {
13509 switch (fn)
13510 {
13511 CASE_CFN_CEIL:
13512 CASE_CFN_FLOOR:
13513 CASE_CFN_NEARBYINT:
13514 CASE_CFN_RINT:
13515 CASE_CFN_ROUND:
13516 CASE_CFN_TRUNC:
13517 return true;
13518
13519 CASE_CFN_FMIN:
13520 CASE_CFN_FMAX:
13521 return RECURSE (arg0) && RECURSE (arg1);
13522
13523 default:
13524 break;
13525 }
13526 return false;
13527 }
13528
13529 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
13530 has an integer value. We also allow +Inf, -Inf and NaN to be
13531 considered integer values. Return false for signaling NaN.
13532
13533 DEPTH is the current nesting depth of the query. */
13534
13535 bool
13536 integer_valued_real_single_p (tree t, int depth)
13537 {
13538 switch (TREE_CODE (t))
13539 {
13540 case REAL_CST:
13541 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
13542
13543 case COND_EXPR:
13544 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
13545
13546 case SSA_NAME:
13547 /* Limit the depth of recursion to avoid quadratic behavior.
13548 This is expected to catch almost all occurrences in practice.
13549 If this code misses important cases that unbounded recursion
13550 would not, passes that need this information could be revised
13551 to provide it through dataflow propagation. */
13552 return (!name_registered_for_update_p (t)
13553 && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
13554 && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t),
13555 depth));
13556
13557 default:
13558 break;
13559 }
13560 return false;
13561 }
13562
13563 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
13564 has an integer value. We also allow +Inf, -Inf and NaN to be
13565 considered integer values. Return false for signaling NaN.
13566
13567 DEPTH is the current nesting depth of the query. */
13568
13569 static bool
13570 integer_valued_real_invalid_p (tree t, int depth)
13571 {
13572 switch (TREE_CODE (t))
13573 {
13574 case COMPOUND_EXPR:
13575 case MODIFY_EXPR:
13576 case BIND_EXPR:
13577 return RECURSE (TREE_OPERAND (t, 1));
13578
13579 case SAVE_EXPR:
13580 return RECURSE (TREE_OPERAND (t, 0));
13581
13582 default:
13583 break;
13584 }
13585 return false;
13586 }
13587
13588 #undef RECURSE
13589 #undef integer_valued_real_p
13590
13591 /* Return true if the floating point expression T has an integer value.
13592 We also allow +Inf, -Inf and NaN to be considered integer values.
13593 Return false for signaling NaN.
13594
13595 DEPTH is the current nesting depth of the query. */
13596
13597 bool
13598 integer_valued_real_p (tree t, int depth)
13599 {
13600 if (t == error_mark_node)
13601 return false;
13602
13603 tree_code code = TREE_CODE (t);
13604 switch (TREE_CODE_CLASS (code))
13605 {
13606 case tcc_binary:
13607 case tcc_comparison:
13608 return integer_valued_real_binary_p (code, TREE_OPERAND (t, 0),
13609 TREE_OPERAND (t, 1), depth);
13610
13611 case tcc_unary:
13612 return integer_valued_real_unary_p (code, TREE_OPERAND (t, 0), depth);
13613
13614 case tcc_constant:
13615 case tcc_declaration:
13616 case tcc_reference:
13617 return integer_valued_real_single_p (t, depth);
13618
13619 default:
13620 break;
13621 }
13622
13623 switch (code)
13624 {
13625 case COND_EXPR:
13626 case SSA_NAME:
13627 return integer_valued_real_single_p (t, depth);
13628
13629 case CALL_EXPR:
13630 {
13631 tree arg0 = (call_expr_nargs (t) > 0
13632 ? CALL_EXPR_ARG (t, 0)
13633 : NULL_TREE);
13634 tree arg1 = (call_expr_nargs (t) > 1
13635 ? CALL_EXPR_ARG (t, 1)
13636 : NULL_TREE);
13637 return integer_valued_real_call_p (get_call_combined_fn (t),
13638 arg0, arg1, depth);
13639 }
13640
13641 default:
13642 return integer_valued_real_invalid_p (t, depth);
13643 }
13644 }
13645
13646 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
13647 attempt to fold the expression to a constant without modifying TYPE,
13648 OP0 or OP1.
13649
13650 If the expression could be simplified to a constant, then return
13651 the constant. If the expression would not be simplified to a
13652 constant, then return NULL_TREE. */
13653
13654 tree
13655 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
13656 {
13657 tree tem = fold_binary (code, type, op0, op1);
13658 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13659 }
13660
13661 /* Given the components of a unary expression CODE, TYPE and OP0,
13662 attempt to fold the expression to a constant without modifying
13663 TYPE or OP0.
13664
13665 If the expression could be simplified to a constant, then return
13666 the constant. If the expression would not be simplified to a
13667 constant, then return NULL_TREE. */
13668
13669 tree
13670 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
13671 {
13672 tree tem = fold_unary (code, type, op0);
13673 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13674 }
13675
13676 /* If EXP represents referencing an element in a constant string
13677 (either via pointer arithmetic or array indexing), return the
13678 tree representing the value accessed, otherwise return NULL. */
13679
13680 tree
13681 fold_read_from_constant_string (tree exp)
13682 {
13683 if ((TREE_CODE (exp) == INDIRECT_REF
13684 || TREE_CODE (exp) == ARRAY_REF)
13685 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
13686 {
13687 tree exp1 = TREE_OPERAND (exp, 0);
13688 tree index;
13689 tree string;
13690 location_t loc = EXPR_LOCATION (exp);
13691
13692 if (TREE_CODE (exp) == INDIRECT_REF)
13693 string = string_constant (exp1, &index);
13694 else
13695 {
13696 tree low_bound = array_ref_low_bound (exp);
13697 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
13698
13699 /* Optimize the special-case of a zero lower bound.
13700
13701 We convert the low_bound to sizetype to avoid some problems
13702 with constant folding. (E.g. suppose the lower bound is 1,
13703 and its mode is QI. Without the conversion,l (ARRAY
13704 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
13705 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
13706 if (! integer_zerop (low_bound))
13707 index = size_diffop_loc (loc, index,
13708 fold_convert_loc (loc, sizetype, low_bound));
13709
13710 string = exp1;
13711 }
13712
13713 if (string
13714 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
13715 && TREE_CODE (string) == STRING_CST
13716 && TREE_CODE (index) == INTEGER_CST
13717 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
13718 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
13719 == MODE_INT)
13720 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
13721 return build_int_cst_type (TREE_TYPE (exp),
13722 (TREE_STRING_POINTER (string)
13723 [TREE_INT_CST_LOW (index)]));
13724 }
13725 return NULL;
13726 }
13727
13728 /* Return the tree for neg (ARG0) when ARG0 is known to be either
13729 an integer constant, real, or fixed-point constant.
13730
13731 TYPE is the type of the result. */
13732
13733 static tree
13734 fold_negate_const (tree arg0, tree type)
13735 {
13736 tree t = NULL_TREE;
13737
13738 switch (TREE_CODE (arg0))
13739 {
13740 case INTEGER_CST:
13741 {
13742 bool overflow;
13743 wide_int val = wi::neg (arg0, &overflow);
13744 t = force_fit_type (type, val, 1,
13745 (overflow | TREE_OVERFLOW (arg0))
13746 && !TYPE_UNSIGNED (type));
13747 break;
13748 }
13749
13750 case REAL_CST:
13751 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13752 break;
13753
13754 case FIXED_CST:
13755 {
13756 FIXED_VALUE_TYPE f;
13757 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
13758 &(TREE_FIXED_CST (arg0)), NULL,
13759 TYPE_SATURATING (type));
13760 t = build_fixed (type, f);
13761 /* Propagate overflow flags. */
13762 if (overflow_p | TREE_OVERFLOW (arg0))
13763 TREE_OVERFLOW (t) = 1;
13764 break;
13765 }
13766
13767 default:
13768 gcc_unreachable ();
13769 }
13770
13771 return t;
13772 }
13773
13774 /* Return the tree for abs (ARG0) when ARG0 is known to be either
13775 an integer constant or real constant.
13776
13777 TYPE is the type of the result. */
13778
13779 tree
13780 fold_abs_const (tree arg0, tree type)
13781 {
13782 tree t = NULL_TREE;
13783
13784 switch (TREE_CODE (arg0))
13785 {
13786 case INTEGER_CST:
13787 {
13788 /* If the value is unsigned or non-negative, then the absolute value
13789 is the same as the ordinary value. */
13790 if (!wi::neg_p (arg0, TYPE_SIGN (type)))
13791 t = arg0;
13792
13793 /* If the value is negative, then the absolute value is
13794 its negation. */
13795 else
13796 {
13797 bool overflow;
13798 wide_int val = wi::neg (arg0, &overflow);
13799 t = force_fit_type (type, val, -1,
13800 overflow | TREE_OVERFLOW (arg0));
13801 }
13802 }
13803 break;
13804
13805 case REAL_CST:
13806 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
13807 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13808 else
13809 t = arg0;
13810 break;
13811
13812 default:
13813 gcc_unreachable ();
13814 }
13815
13816 return t;
13817 }
13818
13819 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13820 constant. TYPE is the type of the result. */
13821
13822 static tree
13823 fold_not_const (const_tree arg0, tree type)
13824 {
13825 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
13826
13827 return force_fit_type (type, wi::bit_not (arg0), 0, TREE_OVERFLOW (arg0));
13828 }
13829
13830 /* Given CODE, a relational operator, the target type, TYPE and two
13831 constant operands OP0 and OP1, return the result of the
13832 relational operation. If the result is not a compile time
13833 constant, then return NULL_TREE. */
13834
13835 static tree
13836 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
13837 {
13838 int result, invert;
13839
13840 /* From here on, the only cases we handle are when the result is
13841 known to be a constant. */
13842
13843 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
13844 {
13845 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
13846 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
13847
13848 /* Handle the cases where either operand is a NaN. */
13849 if (real_isnan (c0) || real_isnan (c1))
13850 {
13851 switch (code)
13852 {
13853 case EQ_EXPR:
13854 case ORDERED_EXPR:
13855 result = 0;
13856 break;
13857
13858 case NE_EXPR:
13859 case UNORDERED_EXPR:
13860 case UNLT_EXPR:
13861 case UNLE_EXPR:
13862 case UNGT_EXPR:
13863 case UNGE_EXPR:
13864 case UNEQ_EXPR:
13865 result = 1;
13866 break;
13867
13868 case LT_EXPR:
13869 case LE_EXPR:
13870 case GT_EXPR:
13871 case GE_EXPR:
13872 case LTGT_EXPR:
13873 if (flag_trapping_math)
13874 return NULL_TREE;
13875 result = 0;
13876 break;
13877
13878 default:
13879 gcc_unreachable ();
13880 }
13881
13882 return constant_boolean_node (result, type);
13883 }
13884
13885 return constant_boolean_node (real_compare (code, c0, c1), type);
13886 }
13887
13888 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
13889 {
13890 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
13891 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
13892 return constant_boolean_node (fixed_compare (code, c0, c1), type);
13893 }
13894
13895 /* Handle equality/inequality of complex constants. */
13896 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
13897 {
13898 tree rcond = fold_relational_const (code, type,
13899 TREE_REALPART (op0),
13900 TREE_REALPART (op1));
13901 tree icond = fold_relational_const (code, type,
13902 TREE_IMAGPART (op0),
13903 TREE_IMAGPART (op1));
13904 if (code == EQ_EXPR)
13905 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
13906 else if (code == NE_EXPR)
13907 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
13908 else
13909 return NULL_TREE;
13910 }
13911
13912 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
13913 {
13914 unsigned count = VECTOR_CST_NELTS (op0);
13915 tree *elts = XALLOCAVEC (tree, count);
13916 gcc_assert (VECTOR_CST_NELTS (op1) == count
13917 && TYPE_VECTOR_SUBPARTS (type) == count);
13918
13919 for (unsigned i = 0; i < count; i++)
13920 {
13921 tree elem_type = TREE_TYPE (type);
13922 tree elem0 = VECTOR_CST_ELT (op0, i);
13923 tree elem1 = VECTOR_CST_ELT (op1, i);
13924
13925 tree tem = fold_relational_const (code, elem_type,
13926 elem0, elem1);
13927
13928 if (tem == NULL_TREE)
13929 return NULL_TREE;
13930
13931 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
13932 }
13933
13934 return build_vector (type, elts);
13935 }
13936
13937 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
13938
13939 To compute GT, swap the arguments and do LT.
13940 To compute GE, do LT and invert the result.
13941 To compute LE, swap the arguments, do LT and invert the result.
13942 To compute NE, do EQ and invert the result.
13943
13944 Therefore, the code below must handle only EQ and LT. */
13945
13946 if (code == LE_EXPR || code == GT_EXPR)
13947 {
13948 std::swap (op0, op1);
13949 code = swap_tree_comparison (code);
13950 }
13951
13952 /* Note that it is safe to invert for real values here because we
13953 have already handled the one case that it matters. */
13954
13955 invert = 0;
13956 if (code == NE_EXPR || code == GE_EXPR)
13957 {
13958 invert = 1;
13959 code = invert_tree_comparison (code, false);
13960 }
13961
13962 /* Compute a result for LT or EQ if args permit;
13963 Otherwise return T. */
13964 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
13965 {
13966 if (code == EQ_EXPR)
13967 result = tree_int_cst_equal (op0, op1);
13968 else
13969 result = tree_int_cst_lt (op0, op1);
13970 }
13971 else
13972 return NULL_TREE;
13973
13974 if (invert)
13975 result ^= 1;
13976 return constant_boolean_node (result, type);
13977 }
13978
13979 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
13980 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
13981 itself. */
13982
13983 tree
13984 fold_build_cleanup_point_expr (tree type, tree expr)
13985 {
13986 /* If the expression does not have side effects then we don't have to wrap
13987 it with a cleanup point expression. */
13988 if (!TREE_SIDE_EFFECTS (expr))
13989 return expr;
13990
13991 /* If the expression is a return, check to see if the expression inside the
13992 return has no side effects or the right hand side of the modify expression
13993 inside the return. If either don't have side effects set we don't need to
13994 wrap the expression in a cleanup point expression. Note we don't check the
13995 left hand side of the modify because it should always be a return decl. */
13996 if (TREE_CODE (expr) == RETURN_EXPR)
13997 {
13998 tree op = TREE_OPERAND (expr, 0);
13999 if (!op || !TREE_SIDE_EFFECTS (op))
14000 return expr;
14001 op = TREE_OPERAND (op, 1);
14002 if (!TREE_SIDE_EFFECTS (op))
14003 return expr;
14004 }
14005
14006 return build1 (CLEANUP_POINT_EXPR, type, expr);
14007 }
14008
14009 /* Given a pointer value OP0 and a type TYPE, return a simplified version
14010 of an indirection through OP0, or NULL_TREE if no simplification is
14011 possible. */
14012
14013 tree
14014 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
14015 {
14016 tree sub = op0;
14017 tree subtype;
14018
14019 STRIP_NOPS (sub);
14020 subtype = TREE_TYPE (sub);
14021 if (!POINTER_TYPE_P (subtype))
14022 return NULL_TREE;
14023
14024 if (TREE_CODE (sub) == ADDR_EXPR)
14025 {
14026 tree op = TREE_OPERAND (sub, 0);
14027 tree optype = TREE_TYPE (op);
14028 /* *&CONST_DECL -> to the value of the const decl. */
14029 if (TREE_CODE (op) == CONST_DECL)
14030 return DECL_INITIAL (op);
14031 /* *&p => p; make sure to handle *&"str"[cst] here. */
14032 if (type == optype)
14033 {
14034 tree fop = fold_read_from_constant_string (op);
14035 if (fop)
14036 return fop;
14037 else
14038 return op;
14039 }
14040 /* *(foo *)&fooarray => fooarray[0] */
14041 else if (TREE_CODE (optype) == ARRAY_TYPE
14042 && type == TREE_TYPE (optype)
14043 && (!in_gimple_form
14044 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14045 {
14046 tree type_domain = TYPE_DOMAIN (optype);
14047 tree min_val = size_zero_node;
14048 if (type_domain && TYPE_MIN_VALUE (type_domain))
14049 min_val = TYPE_MIN_VALUE (type_domain);
14050 if (in_gimple_form
14051 && TREE_CODE (min_val) != INTEGER_CST)
14052 return NULL_TREE;
14053 return build4_loc (loc, ARRAY_REF, type, op, min_val,
14054 NULL_TREE, NULL_TREE);
14055 }
14056 /* *(foo *)&complexfoo => __real__ complexfoo */
14057 else if (TREE_CODE (optype) == COMPLEX_TYPE
14058 && type == TREE_TYPE (optype))
14059 return fold_build1_loc (loc, REALPART_EXPR, type, op);
14060 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
14061 else if (TREE_CODE (optype) == VECTOR_TYPE
14062 && type == TREE_TYPE (optype))
14063 {
14064 tree part_width = TYPE_SIZE (type);
14065 tree index = bitsize_int (0);
14066 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
14067 }
14068 }
14069
14070 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
14071 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
14072 {
14073 tree op00 = TREE_OPERAND (sub, 0);
14074 tree op01 = TREE_OPERAND (sub, 1);
14075
14076 STRIP_NOPS (op00);
14077 if (TREE_CODE (op00) == ADDR_EXPR)
14078 {
14079 tree op00type;
14080 op00 = TREE_OPERAND (op00, 0);
14081 op00type = TREE_TYPE (op00);
14082
14083 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
14084 if (TREE_CODE (op00type) == VECTOR_TYPE
14085 && type == TREE_TYPE (op00type))
14086 {
14087 HOST_WIDE_INT offset = tree_to_shwi (op01);
14088 tree part_width = TYPE_SIZE (type);
14089 unsigned HOST_WIDE_INT part_widthi = tree_to_shwi (part_width)/BITS_PER_UNIT;
14090 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
14091 tree index = bitsize_int (indexi);
14092
14093 if (offset / part_widthi < TYPE_VECTOR_SUBPARTS (op00type))
14094 return fold_build3_loc (loc,
14095 BIT_FIELD_REF, type, op00,
14096 part_width, index);
14097
14098 }
14099 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
14100 else if (TREE_CODE (op00type) == COMPLEX_TYPE
14101 && type == TREE_TYPE (op00type))
14102 {
14103 tree size = TYPE_SIZE_UNIT (type);
14104 if (tree_int_cst_equal (size, op01))
14105 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
14106 }
14107 /* ((foo *)&fooarray)[1] => fooarray[1] */
14108 else if (TREE_CODE (op00type) == ARRAY_TYPE
14109 && type == TREE_TYPE (op00type))
14110 {
14111 tree type_domain = TYPE_DOMAIN (op00type);
14112 tree min_val = size_zero_node;
14113 if (type_domain && TYPE_MIN_VALUE (type_domain))
14114 min_val = TYPE_MIN_VALUE (type_domain);
14115 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
14116 TYPE_SIZE_UNIT (type));
14117 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
14118 return build4_loc (loc, ARRAY_REF, type, op00, op01,
14119 NULL_TREE, NULL_TREE);
14120 }
14121 }
14122 }
14123
14124 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
14125 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
14126 && type == TREE_TYPE (TREE_TYPE (subtype))
14127 && (!in_gimple_form
14128 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14129 {
14130 tree type_domain;
14131 tree min_val = size_zero_node;
14132 sub = build_fold_indirect_ref_loc (loc, sub);
14133 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
14134 if (type_domain && TYPE_MIN_VALUE (type_domain))
14135 min_val = TYPE_MIN_VALUE (type_domain);
14136 if (in_gimple_form
14137 && TREE_CODE (min_val) != INTEGER_CST)
14138 return NULL_TREE;
14139 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
14140 NULL_TREE);
14141 }
14142
14143 return NULL_TREE;
14144 }
14145
14146 /* Builds an expression for an indirection through T, simplifying some
14147 cases. */
14148
14149 tree
14150 build_fold_indirect_ref_loc (location_t loc, tree t)
14151 {
14152 tree type = TREE_TYPE (TREE_TYPE (t));
14153 tree sub = fold_indirect_ref_1 (loc, type, t);
14154
14155 if (sub)
14156 return sub;
14157
14158 return build1_loc (loc, INDIRECT_REF, type, t);
14159 }
14160
14161 /* Given an INDIRECT_REF T, return either T or a simplified version. */
14162
14163 tree
14164 fold_indirect_ref_loc (location_t loc, tree t)
14165 {
14166 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
14167
14168 if (sub)
14169 return sub;
14170 else
14171 return t;
14172 }
14173
14174 /* Strip non-trapping, non-side-effecting tree nodes from an expression
14175 whose result is ignored. The type of the returned tree need not be
14176 the same as the original expression. */
14177
14178 tree
14179 fold_ignored_result (tree t)
14180 {
14181 if (!TREE_SIDE_EFFECTS (t))
14182 return integer_zero_node;
14183
14184 for (;;)
14185 switch (TREE_CODE_CLASS (TREE_CODE (t)))
14186 {
14187 case tcc_unary:
14188 t = TREE_OPERAND (t, 0);
14189 break;
14190
14191 case tcc_binary:
14192 case tcc_comparison:
14193 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14194 t = TREE_OPERAND (t, 0);
14195 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
14196 t = TREE_OPERAND (t, 1);
14197 else
14198 return t;
14199 break;
14200
14201 case tcc_expression:
14202 switch (TREE_CODE (t))
14203 {
14204 case COMPOUND_EXPR:
14205 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14206 return t;
14207 t = TREE_OPERAND (t, 0);
14208 break;
14209
14210 case COND_EXPR:
14211 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
14212 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
14213 return t;
14214 t = TREE_OPERAND (t, 0);
14215 break;
14216
14217 default:
14218 return t;
14219 }
14220 break;
14221
14222 default:
14223 return t;
14224 }
14225 }
14226
14227 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
14228
14229 tree
14230 round_up_loc (location_t loc, tree value, unsigned int divisor)
14231 {
14232 tree div = NULL_TREE;
14233
14234 if (divisor == 1)
14235 return value;
14236
14237 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14238 have to do anything. Only do this when we are not given a const,
14239 because in that case, this check is more expensive than just
14240 doing it. */
14241 if (TREE_CODE (value) != INTEGER_CST)
14242 {
14243 div = build_int_cst (TREE_TYPE (value), divisor);
14244
14245 if (multiple_of_p (TREE_TYPE (value), value, div))
14246 return value;
14247 }
14248
14249 /* If divisor is a power of two, simplify this to bit manipulation. */
14250 if (divisor == (divisor & -divisor))
14251 {
14252 if (TREE_CODE (value) == INTEGER_CST)
14253 {
14254 wide_int val = value;
14255 bool overflow_p;
14256
14257 if ((val & (divisor - 1)) == 0)
14258 return value;
14259
14260 overflow_p = TREE_OVERFLOW (value);
14261 val += divisor - 1;
14262 val &= - (int) divisor;
14263 if (val == 0)
14264 overflow_p = true;
14265
14266 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
14267 }
14268 else
14269 {
14270 tree t;
14271
14272 t = build_int_cst (TREE_TYPE (value), divisor - 1);
14273 value = size_binop_loc (loc, PLUS_EXPR, value, t);
14274 t = build_int_cst (TREE_TYPE (value), - (int) divisor);
14275 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14276 }
14277 }
14278 else
14279 {
14280 if (!div)
14281 div = build_int_cst (TREE_TYPE (value), divisor);
14282 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
14283 value = size_binop_loc (loc, MULT_EXPR, value, div);
14284 }
14285
14286 return value;
14287 }
14288
14289 /* Likewise, but round down. */
14290
14291 tree
14292 round_down_loc (location_t loc, tree value, int divisor)
14293 {
14294 tree div = NULL_TREE;
14295
14296 gcc_assert (divisor > 0);
14297 if (divisor == 1)
14298 return value;
14299
14300 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14301 have to do anything. Only do this when we are not given a const,
14302 because in that case, this check is more expensive than just
14303 doing it. */
14304 if (TREE_CODE (value) != INTEGER_CST)
14305 {
14306 div = build_int_cst (TREE_TYPE (value), divisor);
14307
14308 if (multiple_of_p (TREE_TYPE (value), value, div))
14309 return value;
14310 }
14311
14312 /* If divisor is a power of two, simplify this to bit manipulation. */
14313 if (divisor == (divisor & -divisor))
14314 {
14315 tree t;
14316
14317 t = build_int_cst (TREE_TYPE (value), -divisor);
14318 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14319 }
14320 else
14321 {
14322 if (!div)
14323 div = build_int_cst (TREE_TYPE (value), divisor);
14324 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
14325 value = size_binop_loc (loc, MULT_EXPR, value, div);
14326 }
14327
14328 return value;
14329 }
14330
14331 /* Returns the pointer to the base of the object addressed by EXP and
14332 extracts the information about the offset of the access, storing it
14333 to PBITPOS and POFFSET. */
14334
14335 static tree
14336 split_address_to_core_and_offset (tree exp,
14337 HOST_WIDE_INT *pbitpos, tree *poffset)
14338 {
14339 tree core;
14340 machine_mode mode;
14341 int unsignedp, reversep, volatilep;
14342 HOST_WIDE_INT bitsize;
14343 location_t loc = EXPR_LOCATION (exp);
14344
14345 if (TREE_CODE (exp) == ADDR_EXPR)
14346 {
14347 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
14348 poffset, &mode, &unsignedp, &reversep,
14349 &volatilep, false);
14350 core = build_fold_addr_expr_loc (loc, core);
14351 }
14352 else
14353 {
14354 core = exp;
14355 *pbitpos = 0;
14356 *poffset = NULL_TREE;
14357 }
14358
14359 return core;
14360 }
14361
14362 /* Returns true if addresses of E1 and E2 differ by a constant, false
14363 otherwise. If they do, E1 - E2 is stored in *DIFF. */
14364
14365 bool
14366 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
14367 {
14368 tree core1, core2;
14369 HOST_WIDE_INT bitpos1, bitpos2;
14370 tree toffset1, toffset2, tdiff, type;
14371
14372 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
14373 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
14374
14375 if (bitpos1 % BITS_PER_UNIT != 0
14376 || bitpos2 % BITS_PER_UNIT != 0
14377 || !operand_equal_p (core1, core2, 0))
14378 return false;
14379
14380 if (toffset1 && toffset2)
14381 {
14382 type = TREE_TYPE (toffset1);
14383 if (type != TREE_TYPE (toffset2))
14384 toffset2 = fold_convert (type, toffset2);
14385
14386 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
14387 if (!cst_and_fits_in_hwi (tdiff))
14388 return false;
14389
14390 *diff = int_cst_value (tdiff);
14391 }
14392 else if (toffset1 || toffset2)
14393 {
14394 /* If only one of the offsets is non-constant, the difference cannot
14395 be a constant. */
14396 return false;
14397 }
14398 else
14399 *diff = 0;
14400
14401 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
14402 return true;
14403 }
14404
14405 /* Return OFF converted to a pointer offset type suitable as offset for
14406 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
14407 tree
14408 convert_to_ptrofftype_loc (location_t loc, tree off)
14409 {
14410 return fold_convert_loc (loc, sizetype, off);
14411 }
14412
14413 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14414 tree
14415 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
14416 {
14417 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14418 ptr, convert_to_ptrofftype_loc (loc, off));
14419 }
14420
14421 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14422 tree
14423 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
14424 {
14425 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14426 ptr, size_int (off));
14427 }
14428
14429 /* Return a char pointer for a C string if it is a string constant
14430 or sum of string constant and integer constant. */
14431
14432 const char *
14433 c_getstr (tree src)
14434 {
14435 tree offset_node;
14436
14437 src = string_constant (src, &offset_node);
14438 if (src == 0)
14439 return 0;
14440
14441 if (offset_node == 0)
14442 return TREE_STRING_POINTER (src);
14443 else if (!tree_fits_uhwi_p (offset_node)
14444 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
14445 return 0;
14446
14447 return TREE_STRING_POINTER (src) + tree_to_uhwi (offset_node);
14448 }