Remove a layer of indirection from hash_table
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
27
28 /* The entry points in this file are fold, size_int_wide and size_binop.
29
30 fold takes a tree as argument and returns a simplified tree.
31
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
35
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
38
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
42
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "tm.h"
47 #include "flags.h"
48 #include "tree.h"
49 #include "stor-layout.h"
50 #include "calls.h"
51 #include "tree-iterator.h"
52 #include "realmpfr.h"
53 #include "rtl.h"
54 #include "expr.h"
55 #include "tm_p.h"
56 #include "target.h"
57 #include "diagnostic-core.h"
58 #include "intl.h"
59 #include "langhooks.h"
60 #include "md5.h"
61 #include "basic-block.h"
62 #include "tree-ssa-alias.h"
63 #include "internal-fn.h"
64 #include "tree-eh.h"
65 #include "gimple-expr.h"
66 #include "is-a.h"
67 #include "gimple.h"
68 #include "gimplify.h"
69 #include "tree-dfa.h"
70 #include "hash-table.h" /* Required for ENABLE_FOLD_CHECKING. */
71 #include "builtins.h"
72
73 /* Nonzero if we are folding constants inside an initializer; zero
74 otherwise. */
75 int folding_initializer = 0;
76
77 /* The following constants represent a bit based encoding of GCC's
78 comparison operators. This encoding simplifies transformations
79 on relational comparison operators, such as AND and OR. */
80 enum comparison_code {
81 COMPCODE_FALSE = 0,
82 COMPCODE_LT = 1,
83 COMPCODE_EQ = 2,
84 COMPCODE_LE = 3,
85 COMPCODE_GT = 4,
86 COMPCODE_LTGT = 5,
87 COMPCODE_GE = 6,
88 COMPCODE_ORD = 7,
89 COMPCODE_UNORD = 8,
90 COMPCODE_UNLT = 9,
91 COMPCODE_UNEQ = 10,
92 COMPCODE_UNLE = 11,
93 COMPCODE_UNGT = 12,
94 COMPCODE_NE = 13,
95 COMPCODE_UNGE = 14,
96 COMPCODE_TRUE = 15
97 };
98
99 static bool negate_mathfn_p (enum built_in_function);
100 static bool negate_expr_p (tree);
101 static tree negate_expr (tree);
102 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
103 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
104 static tree const_binop (enum tree_code, tree, tree);
105 static enum comparison_code comparison_to_compcode (enum tree_code);
106 static enum tree_code compcode_to_comparison (enum comparison_code);
107 static int operand_equal_for_comparison_p (tree, tree, tree);
108 static int twoval_comparison_p (tree, tree *, tree *, int *);
109 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
110 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
111 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
112 static tree make_bit_field_ref (location_t, tree, tree,
113 HOST_WIDE_INT, HOST_WIDE_INT, int);
114 static tree optimize_bit_field_compare (location_t, enum tree_code,
115 tree, tree, tree);
116 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
117 HOST_WIDE_INT *,
118 enum machine_mode *, int *, int *,
119 tree *, tree *);
120 static tree sign_bit_p (tree, const_tree);
121 static int simple_operand_p (const_tree);
122 static bool simple_operand_p_2 (tree);
123 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
124 static tree range_predecessor (tree);
125 static tree range_successor (tree);
126 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
127 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
128 static tree unextend (tree, int, int, tree);
129 static tree optimize_minmax_comparison (location_t, enum tree_code,
130 tree, tree, tree);
131 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
132 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
133 static tree fold_binary_op_with_conditional_arg (location_t,
134 enum tree_code, tree,
135 tree, tree,
136 tree, tree, int);
137 static tree fold_mathfn_compare (location_t,
138 enum built_in_function, enum tree_code,
139 tree, tree, tree);
140 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
141 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
142 static bool reorder_operands_p (const_tree, const_tree);
143 static tree fold_negate_const (tree, tree);
144 static tree fold_not_const (const_tree, tree);
145 static tree fold_relational_const (enum tree_code, tree, tree, tree);
146 static tree fold_convert_const (enum tree_code, tree, tree);
147
148 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
149 Otherwise, return LOC. */
150
151 static location_t
152 expr_location_or (tree t, location_t loc)
153 {
154 location_t tloc = EXPR_LOCATION (t);
155 return tloc == UNKNOWN_LOCATION ? loc : tloc;
156 }
157
158 /* Similar to protected_set_expr_location, but never modify x in place,
159 if location can and needs to be set, unshare it. */
160
161 static inline tree
162 protected_set_expr_location_unshare (tree x, location_t loc)
163 {
164 if (CAN_HAVE_LOCATION_P (x)
165 && EXPR_LOCATION (x) != loc
166 && !(TREE_CODE (x) == SAVE_EXPR
167 || TREE_CODE (x) == TARGET_EXPR
168 || TREE_CODE (x) == BIND_EXPR))
169 {
170 x = copy_node (x);
171 SET_EXPR_LOCATION (x, loc);
172 }
173 return x;
174 }
175 \f
176 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
177 division and returns the quotient. Otherwise returns
178 NULL_TREE. */
179
180 tree
181 div_if_zero_remainder (const_tree arg1, const_tree arg2)
182 {
183 widest_int quo;
184
185 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
186 SIGNED, &quo))
187 return wide_int_to_tree (TREE_TYPE (arg1), quo);
188
189 return NULL_TREE;
190 }
191 \f
192 /* This is nonzero if we should defer warnings about undefined
193 overflow. This facility exists because these warnings are a
194 special case. The code to estimate loop iterations does not want
195 to issue any warnings, since it works with expressions which do not
196 occur in user code. Various bits of cleanup code call fold(), but
197 only use the result if it has certain characteristics (e.g., is a
198 constant); that code only wants to issue a warning if the result is
199 used. */
200
201 static int fold_deferring_overflow_warnings;
202
203 /* If a warning about undefined overflow is deferred, this is the
204 warning. Note that this may cause us to turn two warnings into
205 one, but that is fine since it is sufficient to only give one
206 warning per expression. */
207
208 static const char* fold_deferred_overflow_warning;
209
210 /* If a warning about undefined overflow is deferred, this is the
211 level at which the warning should be emitted. */
212
213 static enum warn_strict_overflow_code fold_deferred_overflow_code;
214
215 /* Start deferring overflow warnings. We could use a stack here to
216 permit nested calls, but at present it is not necessary. */
217
218 void
219 fold_defer_overflow_warnings (void)
220 {
221 ++fold_deferring_overflow_warnings;
222 }
223
224 /* Stop deferring overflow warnings. If there is a pending warning,
225 and ISSUE is true, then issue the warning if appropriate. STMT is
226 the statement with which the warning should be associated (used for
227 location information); STMT may be NULL. CODE is the level of the
228 warning--a warn_strict_overflow_code value. This function will use
229 the smaller of CODE and the deferred code when deciding whether to
230 issue the warning. CODE may be zero to mean to always use the
231 deferred code. */
232
233 void
234 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
235 {
236 const char *warnmsg;
237 location_t locus;
238
239 gcc_assert (fold_deferring_overflow_warnings > 0);
240 --fold_deferring_overflow_warnings;
241 if (fold_deferring_overflow_warnings > 0)
242 {
243 if (fold_deferred_overflow_warning != NULL
244 && code != 0
245 && code < (int) fold_deferred_overflow_code)
246 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
247 return;
248 }
249
250 warnmsg = fold_deferred_overflow_warning;
251 fold_deferred_overflow_warning = NULL;
252
253 if (!issue || warnmsg == NULL)
254 return;
255
256 if (gimple_no_warning_p (stmt))
257 return;
258
259 /* Use the smallest code level when deciding to issue the
260 warning. */
261 if (code == 0 || code > (int) fold_deferred_overflow_code)
262 code = fold_deferred_overflow_code;
263
264 if (!issue_strict_overflow_warning (code))
265 return;
266
267 if (stmt == NULL)
268 locus = input_location;
269 else
270 locus = gimple_location (stmt);
271 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
272 }
273
274 /* Stop deferring overflow warnings, ignoring any deferred
275 warnings. */
276
277 void
278 fold_undefer_and_ignore_overflow_warnings (void)
279 {
280 fold_undefer_overflow_warnings (false, NULL, 0);
281 }
282
283 /* Whether we are deferring overflow warnings. */
284
285 bool
286 fold_deferring_overflow_warnings_p (void)
287 {
288 return fold_deferring_overflow_warnings > 0;
289 }
290
291 /* This is called when we fold something based on the fact that signed
292 overflow is undefined. */
293
294 static void
295 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
296 {
297 if (fold_deferring_overflow_warnings > 0)
298 {
299 if (fold_deferred_overflow_warning == NULL
300 || wc < fold_deferred_overflow_code)
301 {
302 fold_deferred_overflow_warning = gmsgid;
303 fold_deferred_overflow_code = wc;
304 }
305 }
306 else if (issue_strict_overflow_warning (wc))
307 warning (OPT_Wstrict_overflow, gmsgid);
308 }
309 \f
310 /* Return true if the built-in mathematical function specified by CODE
311 is odd, i.e. -f(x) == f(-x). */
312
313 static bool
314 negate_mathfn_p (enum built_in_function code)
315 {
316 switch (code)
317 {
318 CASE_FLT_FN (BUILT_IN_ASIN):
319 CASE_FLT_FN (BUILT_IN_ASINH):
320 CASE_FLT_FN (BUILT_IN_ATAN):
321 CASE_FLT_FN (BUILT_IN_ATANH):
322 CASE_FLT_FN (BUILT_IN_CASIN):
323 CASE_FLT_FN (BUILT_IN_CASINH):
324 CASE_FLT_FN (BUILT_IN_CATAN):
325 CASE_FLT_FN (BUILT_IN_CATANH):
326 CASE_FLT_FN (BUILT_IN_CBRT):
327 CASE_FLT_FN (BUILT_IN_CPROJ):
328 CASE_FLT_FN (BUILT_IN_CSIN):
329 CASE_FLT_FN (BUILT_IN_CSINH):
330 CASE_FLT_FN (BUILT_IN_CTAN):
331 CASE_FLT_FN (BUILT_IN_CTANH):
332 CASE_FLT_FN (BUILT_IN_ERF):
333 CASE_FLT_FN (BUILT_IN_LLROUND):
334 CASE_FLT_FN (BUILT_IN_LROUND):
335 CASE_FLT_FN (BUILT_IN_ROUND):
336 CASE_FLT_FN (BUILT_IN_SIN):
337 CASE_FLT_FN (BUILT_IN_SINH):
338 CASE_FLT_FN (BUILT_IN_TAN):
339 CASE_FLT_FN (BUILT_IN_TANH):
340 CASE_FLT_FN (BUILT_IN_TRUNC):
341 return true;
342
343 CASE_FLT_FN (BUILT_IN_LLRINT):
344 CASE_FLT_FN (BUILT_IN_LRINT):
345 CASE_FLT_FN (BUILT_IN_NEARBYINT):
346 CASE_FLT_FN (BUILT_IN_RINT):
347 return !flag_rounding_math;
348
349 default:
350 break;
351 }
352 return false;
353 }
354
355 /* Check whether we may negate an integer constant T without causing
356 overflow. */
357
358 bool
359 may_negate_without_overflow_p (const_tree t)
360 {
361 tree type;
362
363 gcc_assert (TREE_CODE (t) == INTEGER_CST);
364
365 type = TREE_TYPE (t);
366 if (TYPE_UNSIGNED (type))
367 return false;
368
369 return !wi::only_sign_bit_p (t);
370 }
371
372 /* Determine whether an expression T can be cheaply negated using
373 the function negate_expr without introducing undefined overflow. */
374
375 static bool
376 negate_expr_p (tree t)
377 {
378 tree type;
379
380 if (t == 0)
381 return false;
382
383 type = TREE_TYPE (t);
384
385 STRIP_SIGN_NOPS (t);
386 switch (TREE_CODE (t))
387 {
388 case INTEGER_CST:
389 if (TYPE_OVERFLOW_WRAPS (type))
390 return true;
391
392 /* Check that -CST will not overflow type. */
393 return may_negate_without_overflow_p (t);
394 case BIT_NOT_EXPR:
395 return (INTEGRAL_TYPE_P (type)
396 && TYPE_OVERFLOW_WRAPS (type));
397
398 case FIXED_CST:
399 case NEGATE_EXPR:
400 return true;
401
402 case REAL_CST:
403 /* We want to canonicalize to positive real constants. Pretend
404 that only negative ones can be easily negated. */
405 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
406
407 case COMPLEX_CST:
408 return negate_expr_p (TREE_REALPART (t))
409 && negate_expr_p (TREE_IMAGPART (t));
410
411 case VECTOR_CST:
412 {
413 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
414 return true;
415
416 int count = TYPE_VECTOR_SUBPARTS (type), i;
417
418 for (i = 0; i < count; i++)
419 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
420 return false;
421
422 return true;
423 }
424
425 case COMPLEX_EXPR:
426 return negate_expr_p (TREE_OPERAND (t, 0))
427 && negate_expr_p (TREE_OPERAND (t, 1));
428
429 case CONJ_EXPR:
430 return negate_expr_p (TREE_OPERAND (t, 0));
431
432 case PLUS_EXPR:
433 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
434 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
435 return false;
436 /* -(A + B) -> (-B) - A. */
437 if (negate_expr_p (TREE_OPERAND (t, 1))
438 && reorder_operands_p (TREE_OPERAND (t, 0),
439 TREE_OPERAND (t, 1)))
440 return true;
441 /* -(A + B) -> (-A) - B. */
442 return negate_expr_p (TREE_OPERAND (t, 0));
443
444 case MINUS_EXPR:
445 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
446 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
447 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
448 && reorder_operands_p (TREE_OPERAND (t, 0),
449 TREE_OPERAND (t, 1));
450
451 case MULT_EXPR:
452 if (TYPE_UNSIGNED (TREE_TYPE (t)))
453 break;
454
455 /* Fall through. */
456
457 case RDIV_EXPR:
458 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
459 return negate_expr_p (TREE_OPERAND (t, 1))
460 || negate_expr_p (TREE_OPERAND (t, 0));
461 break;
462
463 case TRUNC_DIV_EXPR:
464 case ROUND_DIV_EXPR:
465 case EXACT_DIV_EXPR:
466 /* In general we can't negate A / B, because if A is INT_MIN and
467 B is 1, we may turn this into INT_MIN / -1 which is undefined
468 and actually traps on some architectures. But if overflow is
469 undefined, we can negate, because - (INT_MIN / 1) is an
470 overflow. */
471 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
472 {
473 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
474 break;
475 /* If overflow is undefined then we have to be careful because
476 we ask whether it's ok to associate the negate with the
477 division which is not ok for example for
478 -((a - b) / c) where (-(a - b)) / c may invoke undefined
479 overflow because of negating INT_MIN. So do not use
480 negate_expr_p here but open-code the two important cases. */
481 if (TREE_CODE (TREE_OPERAND (t, 0)) == NEGATE_EXPR
482 || (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
483 && may_negate_without_overflow_p (TREE_OPERAND (t, 0))))
484 return true;
485 }
486 else if (negate_expr_p (TREE_OPERAND (t, 0)))
487 return true;
488 return negate_expr_p (TREE_OPERAND (t, 1));
489
490 case NOP_EXPR:
491 /* Negate -((double)float) as (double)(-float). */
492 if (TREE_CODE (type) == REAL_TYPE)
493 {
494 tree tem = strip_float_extensions (t);
495 if (tem != t)
496 return negate_expr_p (tem);
497 }
498 break;
499
500 case CALL_EXPR:
501 /* Negate -f(x) as f(-x). */
502 if (negate_mathfn_p (builtin_mathfn_code (t)))
503 return negate_expr_p (CALL_EXPR_ARG (t, 0));
504 break;
505
506 case RSHIFT_EXPR:
507 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
508 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
509 {
510 tree op1 = TREE_OPERAND (t, 1);
511 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
512 return true;
513 }
514 break;
515
516 default:
517 break;
518 }
519 return false;
520 }
521
522 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
523 simplification is possible.
524 If negate_expr_p would return true for T, NULL_TREE will never be
525 returned. */
526
527 static tree
528 fold_negate_expr (location_t loc, tree t)
529 {
530 tree type = TREE_TYPE (t);
531 tree tem;
532
533 switch (TREE_CODE (t))
534 {
535 /* Convert - (~A) to A + 1. */
536 case BIT_NOT_EXPR:
537 if (INTEGRAL_TYPE_P (type))
538 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
539 build_one_cst (type));
540 break;
541
542 case INTEGER_CST:
543 tem = fold_negate_const (t, type);
544 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
545 || !TYPE_OVERFLOW_TRAPS (type))
546 return tem;
547 break;
548
549 case REAL_CST:
550 tem = fold_negate_const (t, type);
551 /* Two's complement FP formats, such as c4x, may overflow. */
552 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
553 return tem;
554 break;
555
556 case FIXED_CST:
557 tem = fold_negate_const (t, type);
558 return tem;
559
560 case COMPLEX_CST:
561 {
562 tree rpart = negate_expr (TREE_REALPART (t));
563 tree ipart = negate_expr (TREE_IMAGPART (t));
564
565 if ((TREE_CODE (rpart) == REAL_CST
566 && TREE_CODE (ipart) == REAL_CST)
567 || (TREE_CODE (rpart) == INTEGER_CST
568 && TREE_CODE (ipart) == INTEGER_CST))
569 return build_complex (type, rpart, ipart);
570 }
571 break;
572
573 case VECTOR_CST:
574 {
575 int count = TYPE_VECTOR_SUBPARTS (type), i;
576 tree *elts = XALLOCAVEC (tree, count);
577
578 for (i = 0; i < count; i++)
579 {
580 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
581 if (elts[i] == NULL_TREE)
582 return NULL_TREE;
583 }
584
585 return build_vector (type, elts);
586 }
587
588 case COMPLEX_EXPR:
589 if (negate_expr_p (t))
590 return fold_build2_loc (loc, COMPLEX_EXPR, type,
591 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
592 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
593 break;
594
595 case CONJ_EXPR:
596 if (negate_expr_p (t))
597 return fold_build1_loc (loc, CONJ_EXPR, type,
598 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
599 break;
600
601 case NEGATE_EXPR:
602 return TREE_OPERAND (t, 0);
603
604 case PLUS_EXPR:
605 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
606 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
607 {
608 /* -(A + B) -> (-B) - A. */
609 if (negate_expr_p (TREE_OPERAND (t, 1))
610 && reorder_operands_p (TREE_OPERAND (t, 0),
611 TREE_OPERAND (t, 1)))
612 {
613 tem = negate_expr (TREE_OPERAND (t, 1));
614 return fold_build2_loc (loc, MINUS_EXPR, type,
615 tem, TREE_OPERAND (t, 0));
616 }
617
618 /* -(A + B) -> (-A) - B. */
619 if (negate_expr_p (TREE_OPERAND (t, 0)))
620 {
621 tem = negate_expr (TREE_OPERAND (t, 0));
622 return fold_build2_loc (loc, MINUS_EXPR, type,
623 tem, TREE_OPERAND (t, 1));
624 }
625 }
626 break;
627
628 case MINUS_EXPR:
629 /* - (A - B) -> B - A */
630 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
631 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
632 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
633 return fold_build2_loc (loc, MINUS_EXPR, type,
634 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
635 break;
636
637 case MULT_EXPR:
638 if (TYPE_UNSIGNED (type))
639 break;
640
641 /* Fall through. */
642
643 case RDIV_EXPR:
644 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
645 {
646 tem = TREE_OPERAND (t, 1);
647 if (negate_expr_p (tem))
648 return fold_build2_loc (loc, TREE_CODE (t), type,
649 TREE_OPERAND (t, 0), negate_expr (tem));
650 tem = TREE_OPERAND (t, 0);
651 if (negate_expr_p (tem))
652 return fold_build2_loc (loc, TREE_CODE (t), type,
653 negate_expr (tem), TREE_OPERAND (t, 1));
654 }
655 break;
656
657 case TRUNC_DIV_EXPR:
658 case ROUND_DIV_EXPR:
659 case EXACT_DIV_EXPR:
660 /* In general we can't negate A / B, because if A is INT_MIN and
661 B is 1, we may turn this into INT_MIN / -1 which is undefined
662 and actually traps on some architectures. But if overflow is
663 undefined, we can negate, because - (INT_MIN / 1) is an
664 overflow. */
665 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
666 {
667 const char * const warnmsg = G_("assuming signed overflow does not "
668 "occur when negating a division");
669 tem = TREE_OPERAND (t, 1);
670 if (negate_expr_p (tem))
671 {
672 if (INTEGRAL_TYPE_P (type)
673 && (TREE_CODE (tem) != INTEGER_CST
674 || integer_onep (tem)))
675 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
676 return fold_build2_loc (loc, TREE_CODE (t), type,
677 TREE_OPERAND (t, 0), negate_expr (tem));
678 }
679 /* If overflow is undefined then we have to be careful because
680 we ask whether it's ok to associate the negate with the
681 division which is not ok for example for
682 -((a - b) / c) where (-(a - b)) / c may invoke undefined
683 overflow because of negating INT_MIN. So do not use
684 negate_expr_p here but open-code the two important cases. */
685 tem = TREE_OPERAND (t, 0);
686 if ((INTEGRAL_TYPE_P (type)
687 && (TREE_CODE (tem) == NEGATE_EXPR
688 || (TREE_CODE (tem) == INTEGER_CST
689 && may_negate_without_overflow_p (tem))))
690 || !INTEGRAL_TYPE_P (type))
691 return fold_build2_loc (loc, TREE_CODE (t), type,
692 negate_expr (tem), TREE_OPERAND (t, 1));
693 }
694 break;
695
696 case NOP_EXPR:
697 /* Convert -((double)float) into (double)(-float). */
698 if (TREE_CODE (type) == REAL_TYPE)
699 {
700 tem = strip_float_extensions (t);
701 if (tem != t && negate_expr_p (tem))
702 return fold_convert_loc (loc, type, negate_expr (tem));
703 }
704 break;
705
706 case CALL_EXPR:
707 /* Negate -f(x) as f(-x). */
708 if (negate_mathfn_p (builtin_mathfn_code (t))
709 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
710 {
711 tree fndecl, arg;
712
713 fndecl = get_callee_fndecl (t);
714 arg = negate_expr (CALL_EXPR_ARG (t, 0));
715 return build_call_expr_loc (loc, fndecl, 1, arg);
716 }
717 break;
718
719 case RSHIFT_EXPR:
720 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
721 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
722 {
723 tree op1 = TREE_OPERAND (t, 1);
724 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
725 {
726 tree ntype = TYPE_UNSIGNED (type)
727 ? signed_type_for (type)
728 : unsigned_type_for (type);
729 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
730 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
731 return fold_convert_loc (loc, type, temp);
732 }
733 }
734 break;
735
736 default:
737 break;
738 }
739
740 return NULL_TREE;
741 }
742
743 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
744 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
745 return NULL_TREE. */
746
747 static tree
748 negate_expr (tree t)
749 {
750 tree type, tem;
751 location_t loc;
752
753 if (t == NULL_TREE)
754 return NULL_TREE;
755
756 loc = EXPR_LOCATION (t);
757 type = TREE_TYPE (t);
758 STRIP_SIGN_NOPS (t);
759
760 tem = fold_negate_expr (loc, t);
761 if (!tem)
762 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
763 return fold_convert_loc (loc, type, tem);
764 }
765 \f
766 /* Split a tree IN into a constant, literal and variable parts that could be
767 combined with CODE to make IN. "constant" means an expression with
768 TREE_CONSTANT but that isn't an actual constant. CODE must be a
769 commutative arithmetic operation. Store the constant part into *CONP,
770 the literal in *LITP and return the variable part. If a part isn't
771 present, set it to null. If the tree does not decompose in this way,
772 return the entire tree as the variable part and the other parts as null.
773
774 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
775 case, we negate an operand that was subtracted. Except if it is a
776 literal for which we use *MINUS_LITP instead.
777
778 If NEGATE_P is true, we are negating all of IN, again except a literal
779 for which we use *MINUS_LITP instead.
780
781 If IN is itself a literal or constant, return it as appropriate.
782
783 Note that we do not guarantee that any of the three values will be the
784 same type as IN, but they will have the same signedness and mode. */
785
786 static tree
787 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
788 tree *minus_litp, int negate_p)
789 {
790 tree var = 0;
791
792 *conp = 0;
793 *litp = 0;
794 *minus_litp = 0;
795
796 /* Strip any conversions that don't change the machine mode or signedness. */
797 STRIP_SIGN_NOPS (in);
798
799 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
800 || TREE_CODE (in) == FIXED_CST)
801 *litp = in;
802 else if (TREE_CODE (in) == code
803 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
804 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
805 /* We can associate addition and subtraction together (even
806 though the C standard doesn't say so) for integers because
807 the value is not affected. For reals, the value might be
808 affected, so we can't. */
809 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
810 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
811 {
812 tree op0 = TREE_OPERAND (in, 0);
813 tree op1 = TREE_OPERAND (in, 1);
814 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
815 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
816
817 /* First see if either of the operands is a literal, then a constant. */
818 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
819 || TREE_CODE (op0) == FIXED_CST)
820 *litp = op0, op0 = 0;
821 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
822 || TREE_CODE (op1) == FIXED_CST)
823 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
824
825 if (op0 != 0 && TREE_CONSTANT (op0))
826 *conp = op0, op0 = 0;
827 else if (op1 != 0 && TREE_CONSTANT (op1))
828 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
829
830 /* If we haven't dealt with either operand, this is not a case we can
831 decompose. Otherwise, VAR is either of the ones remaining, if any. */
832 if (op0 != 0 && op1 != 0)
833 var = in;
834 else if (op0 != 0)
835 var = op0;
836 else
837 var = op1, neg_var_p = neg1_p;
838
839 /* Now do any needed negations. */
840 if (neg_litp_p)
841 *minus_litp = *litp, *litp = 0;
842 if (neg_conp_p)
843 *conp = negate_expr (*conp);
844 if (neg_var_p)
845 var = negate_expr (var);
846 }
847 else if (TREE_CODE (in) == BIT_NOT_EXPR
848 && code == PLUS_EXPR)
849 {
850 /* -X - 1 is folded to ~X, undo that here. */
851 *minus_litp = build_one_cst (TREE_TYPE (in));
852 var = negate_expr (TREE_OPERAND (in, 0));
853 }
854 else if (TREE_CONSTANT (in))
855 *conp = in;
856 else
857 var = in;
858
859 if (negate_p)
860 {
861 if (*litp)
862 *minus_litp = *litp, *litp = 0;
863 else if (*minus_litp)
864 *litp = *minus_litp, *minus_litp = 0;
865 *conp = negate_expr (*conp);
866 var = negate_expr (var);
867 }
868
869 return var;
870 }
871
872 /* Re-associate trees split by the above function. T1 and T2 are
873 either expressions to associate or null. Return the new
874 expression, if any. LOC is the location of the new expression. If
875 we build an operation, do it in TYPE and with CODE. */
876
877 static tree
878 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
879 {
880 if (t1 == 0)
881 return t2;
882 else if (t2 == 0)
883 return t1;
884
885 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
886 try to fold this since we will have infinite recursion. But do
887 deal with any NEGATE_EXPRs. */
888 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
889 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
890 {
891 if (code == PLUS_EXPR)
892 {
893 if (TREE_CODE (t1) == NEGATE_EXPR)
894 return build2_loc (loc, MINUS_EXPR, type,
895 fold_convert_loc (loc, type, t2),
896 fold_convert_loc (loc, type,
897 TREE_OPERAND (t1, 0)));
898 else if (TREE_CODE (t2) == NEGATE_EXPR)
899 return build2_loc (loc, MINUS_EXPR, type,
900 fold_convert_loc (loc, type, t1),
901 fold_convert_loc (loc, type,
902 TREE_OPERAND (t2, 0)));
903 else if (integer_zerop (t2))
904 return fold_convert_loc (loc, type, t1);
905 }
906 else if (code == MINUS_EXPR)
907 {
908 if (integer_zerop (t2))
909 return fold_convert_loc (loc, type, t1);
910 }
911
912 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
913 fold_convert_loc (loc, type, t2));
914 }
915
916 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
917 fold_convert_loc (loc, type, t2));
918 }
919 \f
920 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
921 for use in int_const_binop, size_binop and size_diffop. */
922
923 static bool
924 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
925 {
926 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
927 return false;
928 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
929 return false;
930
931 switch (code)
932 {
933 case LSHIFT_EXPR:
934 case RSHIFT_EXPR:
935 case LROTATE_EXPR:
936 case RROTATE_EXPR:
937 return true;
938
939 default:
940 break;
941 }
942
943 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
944 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
945 && TYPE_MODE (type1) == TYPE_MODE (type2);
946 }
947
948
949 /* Combine two integer constants ARG1 and ARG2 under operation CODE
950 to produce a new constant. Return NULL_TREE if we don't know how
951 to evaluate CODE at compile-time. */
952
953 static tree
954 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree parg2,
955 int overflowable)
956 {
957 wide_int res;
958 tree t;
959 tree type = TREE_TYPE (arg1);
960 signop sign = TYPE_SIGN (type);
961 bool overflow = false;
962
963 wide_int arg2 = wide_int::from (parg2, TYPE_PRECISION (type),
964 TYPE_SIGN (TREE_TYPE (parg2)));
965
966 switch (code)
967 {
968 case BIT_IOR_EXPR:
969 res = wi::bit_or (arg1, arg2);
970 break;
971
972 case BIT_XOR_EXPR:
973 res = wi::bit_xor (arg1, arg2);
974 break;
975
976 case BIT_AND_EXPR:
977 res = wi::bit_and (arg1, arg2);
978 break;
979
980 case RSHIFT_EXPR:
981 case LSHIFT_EXPR:
982 if (wi::neg_p (arg2))
983 {
984 arg2 = -arg2;
985 if (code == RSHIFT_EXPR)
986 code = LSHIFT_EXPR;
987 else
988 code = RSHIFT_EXPR;
989 }
990
991 if (code == RSHIFT_EXPR)
992 /* It's unclear from the C standard whether shifts can overflow.
993 The following code ignores overflow; perhaps a C standard
994 interpretation ruling is needed. */
995 res = wi::rshift (arg1, arg2, sign);
996 else
997 res = wi::lshift (arg1, arg2);
998 break;
999
1000 case RROTATE_EXPR:
1001 case LROTATE_EXPR:
1002 if (wi::neg_p (arg2))
1003 {
1004 arg2 = -arg2;
1005 if (code == RROTATE_EXPR)
1006 code = LROTATE_EXPR;
1007 else
1008 code = RROTATE_EXPR;
1009 }
1010
1011 if (code == RROTATE_EXPR)
1012 res = wi::rrotate (arg1, arg2);
1013 else
1014 res = wi::lrotate (arg1, arg2);
1015 break;
1016
1017 case PLUS_EXPR:
1018 res = wi::add (arg1, arg2, sign, &overflow);
1019 break;
1020
1021 case MINUS_EXPR:
1022 res = wi::sub (arg1, arg2, sign, &overflow);
1023 break;
1024
1025 case MULT_EXPR:
1026 res = wi::mul (arg1, arg2, sign, &overflow);
1027 break;
1028
1029 case MULT_HIGHPART_EXPR:
1030 res = wi::mul_high (arg1, arg2, sign);
1031 break;
1032
1033 case TRUNC_DIV_EXPR:
1034 case EXACT_DIV_EXPR:
1035 if (arg2 == 0)
1036 return NULL_TREE;
1037 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1038 break;
1039
1040 case FLOOR_DIV_EXPR:
1041 if (arg2 == 0)
1042 return NULL_TREE;
1043 res = wi::div_floor (arg1, arg2, sign, &overflow);
1044 break;
1045
1046 case CEIL_DIV_EXPR:
1047 if (arg2 == 0)
1048 return NULL_TREE;
1049 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1050 break;
1051
1052 case ROUND_DIV_EXPR:
1053 if (arg2 == 0)
1054 return NULL_TREE;
1055 res = wi::div_round (arg1, arg2, sign, &overflow);
1056 break;
1057
1058 case TRUNC_MOD_EXPR:
1059 if (arg2 == 0)
1060 return NULL_TREE;
1061 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1062 break;
1063
1064 case FLOOR_MOD_EXPR:
1065 if (arg2 == 0)
1066 return NULL_TREE;
1067 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1068 break;
1069
1070 case CEIL_MOD_EXPR:
1071 if (arg2 == 0)
1072 return NULL_TREE;
1073 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1074 break;
1075
1076 case ROUND_MOD_EXPR:
1077 if (arg2 == 0)
1078 return NULL_TREE;
1079 res = wi::mod_round (arg1, arg2, sign, &overflow);
1080 break;
1081
1082 case MIN_EXPR:
1083 res = wi::min (arg1, arg2, sign);
1084 break;
1085
1086 case MAX_EXPR:
1087 res = wi::max (arg1, arg2, sign);
1088 break;
1089
1090 default:
1091 return NULL_TREE;
1092 }
1093
1094 t = force_fit_type (type, res, overflowable,
1095 (((sign == SIGNED || overflowable == -1)
1096 && overflow)
1097 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (parg2)));
1098
1099 return t;
1100 }
1101
1102 tree
1103 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1104 {
1105 return int_const_binop_1 (code, arg1, arg2, 1);
1106 }
1107
1108 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1109 constant. We assume ARG1 and ARG2 have the same data type, or at least
1110 are the same kind of constant and the same machine mode. Return zero if
1111 combining the constants is not allowed in the current operating mode. */
1112
1113 static tree
1114 const_binop (enum tree_code code, tree arg1, tree arg2)
1115 {
1116 /* Sanity check for the recursive cases. */
1117 if (!arg1 || !arg2)
1118 return NULL_TREE;
1119
1120 STRIP_NOPS (arg1);
1121 STRIP_NOPS (arg2);
1122
1123 if (TREE_CODE (arg1) == INTEGER_CST)
1124 return int_const_binop (code, arg1, arg2);
1125
1126 if (TREE_CODE (arg1) == REAL_CST)
1127 {
1128 enum machine_mode mode;
1129 REAL_VALUE_TYPE d1;
1130 REAL_VALUE_TYPE d2;
1131 REAL_VALUE_TYPE value;
1132 REAL_VALUE_TYPE result;
1133 bool inexact;
1134 tree t, type;
1135
1136 /* The following codes are handled by real_arithmetic. */
1137 switch (code)
1138 {
1139 case PLUS_EXPR:
1140 case MINUS_EXPR:
1141 case MULT_EXPR:
1142 case RDIV_EXPR:
1143 case MIN_EXPR:
1144 case MAX_EXPR:
1145 break;
1146
1147 default:
1148 return NULL_TREE;
1149 }
1150
1151 d1 = TREE_REAL_CST (arg1);
1152 d2 = TREE_REAL_CST (arg2);
1153
1154 type = TREE_TYPE (arg1);
1155 mode = TYPE_MODE (type);
1156
1157 /* Don't perform operation if we honor signaling NaNs and
1158 either operand is a NaN. */
1159 if (HONOR_SNANS (mode)
1160 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1161 return NULL_TREE;
1162
1163 /* Don't perform operation if it would raise a division
1164 by zero exception. */
1165 if (code == RDIV_EXPR
1166 && REAL_VALUES_EQUAL (d2, dconst0)
1167 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1168 return NULL_TREE;
1169
1170 /* If either operand is a NaN, just return it. Otherwise, set up
1171 for floating-point trap; we return an overflow. */
1172 if (REAL_VALUE_ISNAN (d1))
1173 return arg1;
1174 else if (REAL_VALUE_ISNAN (d2))
1175 return arg2;
1176
1177 inexact = real_arithmetic (&value, code, &d1, &d2);
1178 real_convert (&result, mode, &value);
1179
1180 /* Don't constant fold this floating point operation if
1181 the result has overflowed and flag_trapping_math. */
1182 if (flag_trapping_math
1183 && MODE_HAS_INFINITIES (mode)
1184 && REAL_VALUE_ISINF (result)
1185 && !REAL_VALUE_ISINF (d1)
1186 && !REAL_VALUE_ISINF (d2))
1187 return NULL_TREE;
1188
1189 /* Don't constant fold this floating point operation if the
1190 result may dependent upon the run-time rounding mode and
1191 flag_rounding_math is set, or if GCC's software emulation
1192 is unable to accurately represent the result. */
1193 if ((flag_rounding_math
1194 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1195 && (inexact || !real_identical (&result, &value)))
1196 return NULL_TREE;
1197
1198 t = build_real (type, result);
1199
1200 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1201 return t;
1202 }
1203
1204 if (TREE_CODE (arg1) == FIXED_CST)
1205 {
1206 FIXED_VALUE_TYPE f1;
1207 FIXED_VALUE_TYPE f2;
1208 FIXED_VALUE_TYPE result;
1209 tree t, type;
1210 int sat_p;
1211 bool overflow_p;
1212
1213 /* The following codes are handled by fixed_arithmetic. */
1214 switch (code)
1215 {
1216 case PLUS_EXPR:
1217 case MINUS_EXPR:
1218 case MULT_EXPR:
1219 case TRUNC_DIV_EXPR:
1220 f2 = TREE_FIXED_CST (arg2);
1221 break;
1222
1223 case LSHIFT_EXPR:
1224 case RSHIFT_EXPR:
1225 {
1226 wide_int w2 = arg2;
1227 f2.data.high = w2.elt (1);
1228 f2.data.low = w2.elt (0);
1229 f2.mode = SImode;
1230 }
1231 break;
1232
1233 default:
1234 return NULL_TREE;
1235 }
1236
1237 f1 = TREE_FIXED_CST (arg1);
1238 type = TREE_TYPE (arg1);
1239 sat_p = TYPE_SATURATING (type);
1240 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1241 t = build_fixed (type, result);
1242 /* Propagate overflow flags. */
1243 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1244 TREE_OVERFLOW (t) = 1;
1245 return t;
1246 }
1247
1248 if (TREE_CODE (arg1) == COMPLEX_CST)
1249 {
1250 tree type = TREE_TYPE (arg1);
1251 tree r1 = TREE_REALPART (arg1);
1252 tree i1 = TREE_IMAGPART (arg1);
1253 tree r2 = TREE_REALPART (arg2);
1254 tree i2 = TREE_IMAGPART (arg2);
1255 tree real, imag;
1256
1257 switch (code)
1258 {
1259 case PLUS_EXPR:
1260 case MINUS_EXPR:
1261 real = const_binop (code, r1, r2);
1262 imag = const_binop (code, i1, i2);
1263 break;
1264
1265 case MULT_EXPR:
1266 if (COMPLEX_FLOAT_TYPE_P (type))
1267 return do_mpc_arg2 (arg1, arg2, type,
1268 /* do_nonfinite= */ folding_initializer,
1269 mpc_mul);
1270
1271 real = const_binop (MINUS_EXPR,
1272 const_binop (MULT_EXPR, r1, r2),
1273 const_binop (MULT_EXPR, i1, i2));
1274 imag = const_binop (PLUS_EXPR,
1275 const_binop (MULT_EXPR, r1, i2),
1276 const_binop (MULT_EXPR, i1, r2));
1277 break;
1278
1279 case RDIV_EXPR:
1280 if (COMPLEX_FLOAT_TYPE_P (type))
1281 return do_mpc_arg2 (arg1, arg2, type,
1282 /* do_nonfinite= */ folding_initializer,
1283 mpc_div);
1284 /* Fallthru ... */
1285 case TRUNC_DIV_EXPR:
1286 case CEIL_DIV_EXPR:
1287 case FLOOR_DIV_EXPR:
1288 case ROUND_DIV_EXPR:
1289 if (flag_complex_method == 0)
1290 {
1291 /* Keep this algorithm in sync with
1292 tree-complex.c:expand_complex_div_straight().
1293
1294 Expand complex division to scalars, straightforward algorithm.
1295 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1296 t = br*br + bi*bi
1297 */
1298 tree magsquared
1299 = const_binop (PLUS_EXPR,
1300 const_binop (MULT_EXPR, r2, r2),
1301 const_binop (MULT_EXPR, i2, i2));
1302 tree t1
1303 = const_binop (PLUS_EXPR,
1304 const_binop (MULT_EXPR, r1, r2),
1305 const_binop (MULT_EXPR, i1, i2));
1306 tree t2
1307 = const_binop (MINUS_EXPR,
1308 const_binop (MULT_EXPR, i1, r2),
1309 const_binop (MULT_EXPR, r1, i2));
1310
1311 real = const_binop (code, t1, magsquared);
1312 imag = const_binop (code, t2, magsquared);
1313 }
1314 else
1315 {
1316 /* Keep this algorithm in sync with
1317 tree-complex.c:expand_complex_div_wide().
1318
1319 Expand complex division to scalars, modified algorithm to minimize
1320 overflow with wide input ranges. */
1321 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1322 fold_abs_const (r2, TREE_TYPE (type)),
1323 fold_abs_const (i2, TREE_TYPE (type)));
1324
1325 if (integer_nonzerop (compare))
1326 {
1327 /* In the TRUE branch, we compute
1328 ratio = br/bi;
1329 div = (br * ratio) + bi;
1330 tr = (ar * ratio) + ai;
1331 ti = (ai * ratio) - ar;
1332 tr = tr / div;
1333 ti = ti / div; */
1334 tree ratio = const_binop (code, r2, i2);
1335 tree div = const_binop (PLUS_EXPR, i2,
1336 const_binop (MULT_EXPR, r2, ratio));
1337 real = const_binop (MULT_EXPR, r1, ratio);
1338 real = const_binop (PLUS_EXPR, real, i1);
1339 real = const_binop (code, real, div);
1340
1341 imag = const_binop (MULT_EXPR, i1, ratio);
1342 imag = const_binop (MINUS_EXPR, imag, r1);
1343 imag = const_binop (code, imag, div);
1344 }
1345 else
1346 {
1347 /* In the FALSE branch, we compute
1348 ratio = d/c;
1349 divisor = (d * ratio) + c;
1350 tr = (b * ratio) + a;
1351 ti = b - (a * ratio);
1352 tr = tr / div;
1353 ti = ti / div; */
1354 tree ratio = const_binop (code, i2, r2);
1355 tree div = const_binop (PLUS_EXPR, r2,
1356 const_binop (MULT_EXPR, i2, ratio));
1357
1358 real = const_binop (MULT_EXPR, i1, ratio);
1359 real = const_binop (PLUS_EXPR, real, r1);
1360 real = const_binop (code, real, div);
1361
1362 imag = const_binop (MULT_EXPR, r1, ratio);
1363 imag = const_binop (MINUS_EXPR, i1, imag);
1364 imag = const_binop (code, imag, div);
1365 }
1366 }
1367 break;
1368
1369 default:
1370 return NULL_TREE;
1371 }
1372
1373 if (real && imag)
1374 return build_complex (type, real, imag);
1375 }
1376
1377 if (TREE_CODE (arg1) == VECTOR_CST
1378 && TREE_CODE (arg2) == VECTOR_CST)
1379 {
1380 tree type = TREE_TYPE (arg1);
1381 int count = TYPE_VECTOR_SUBPARTS (type), i;
1382 tree *elts = XALLOCAVEC (tree, count);
1383
1384 for (i = 0; i < count; i++)
1385 {
1386 tree elem1 = VECTOR_CST_ELT (arg1, i);
1387 tree elem2 = VECTOR_CST_ELT (arg2, i);
1388
1389 elts[i] = const_binop (code, elem1, elem2);
1390
1391 /* It is possible that const_binop cannot handle the given
1392 code and return NULL_TREE */
1393 if (elts[i] == NULL_TREE)
1394 return NULL_TREE;
1395 }
1396
1397 return build_vector (type, elts);
1398 }
1399
1400 /* Shifts allow a scalar offset for a vector. */
1401 if (TREE_CODE (arg1) == VECTOR_CST
1402 && TREE_CODE (arg2) == INTEGER_CST)
1403 {
1404 tree type = TREE_TYPE (arg1);
1405 int count = TYPE_VECTOR_SUBPARTS (type), i;
1406 tree *elts = XALLOCAVEC (tree, count);
1407
1408 if (code == VEC_LSHIFT_EXPR
1409 || code == VEC_RSHIFT_EXPR)
1410 {
1411 if (!tree_fits_uhwi_p (arg2))
1412 return NULL_TREE;
1413
1414 unsigned HOST_WIDE_INT shiftc = tree_to_uhwi (arg2);
1415 unsigned HOST_WIDE_INT outerc = tree_to_uhwi (TYPE_SIZE (type));
1416 unsigned HOST_WIDE_INT innerc
1417 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (type)));
1418 if (shiftc >= outerc || (shiftc % innerc) != 0)
1419 return NULL_TREE;
1420 int offset = shiftc / innerc;
1421 /* The direction of VEC_[LR]SHIFT_EXPR is endian dependent.
1422 For reductions, compiler emits VEC_RSHIFT_EXPR always,
1423 for !BYTES_BIG_ENDIAN picks first vector element, but
1424 for BYTES_BIG_ENDIAN last element from the vector. */
1425 if ((code == VEC_RSHIFT_EXPR) ^ (!BYTES_BIG_ENDIAN))
1426 offset = -offset;
1427 tree zero = build_zero_cst (TREE_TYPE (type));
1428 for (i = 0; i < count; i++)
1429 {
1430 if (i + offset < 0 || i + offset >= count)
1431 elts[i] = zero;
1432 else
1433 elts[i] = VECTOR_CST_ELT (arg1, i + offset);
1434 }
1435 }
1436 else
1437 for (i = 0; i < count; i++)
1438 {
1439 tree elem1 = VECTOR_CST_ELT (arg1, i);
1440
1441 elts[i] = const_binop (code, elem1, arg2);
1442
1443 /* It is possible that const_binop cannot handle the given
1444 code and return NULL_TREE */
1445 if (elts[i] == NULL_TREE)
1446 return NULL_TREE;
1447 }
1448
1449 return build_vector (type, elts);
1450 }
1451 return NULL_TREE;
1452 }
1453
1454 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1455 indicates which particular sizetype to create. */
1456
1457 tree
1458 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1459 {
1460 return build_int_cst (sizetype_tab[(int) kind], number);
1461 }
1462 \f
1463 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1464 is a tree code. The type of the result is taken from the operands.
1465 Both must be equivalent integer types, ala int_binop_types_match_p.
1466 If the operands are constant, so is the result. */
1467
1468 tree
1469 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1470 {
1471 tree type = TREE_TYPE (arg0);
1472
1473 if (arg0 == error_mark_node || arg1 == error_mark_node)
1474 return error_mark_node;
1475
1476 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1477 TREE_TYPE (arg1)));
1478
1479 /* Handle the special case of two integer constants faster. */
1480 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1481 {
1482 /* And some specific cases even faster than that. */
1483 if (code == PLUS_EXPR)
1484 {
1485 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1486 return arg1;
1487 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1488 return arg0;
1489 }
1490 else if (code == MINUS_EXPR)
1491 {
1492 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1493 return arg0;
1494 }
1495 else if (code == MULT_EXPR)
1496 {
1497 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1498 return arg1;
1499 }
1500
1501 /* Handle general case of two integer constants. For sizetype
1502 constant calculations we always want to know about overflow,
1503 even in the unsigned case. */
1504 return int_const_binop_1 (code, arg0, arg1, -1);
1505 }
1506
1507 return fold_build2_loc (loc, code, type, arg0, arg1);
1508 }
1509
1510 /* Given two values, either both of sizetype or both of bitsizetype,
1511 compute the difference between the two values. Return the value
1512 in signed type corresponding to the type of the operands. */
1513
1514 tree
1515 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1516 {
1517 tree type = TREE_TYPE (arg0);
1518 tree ctype;
1519
1520 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1521 TREE_TYPE (arg1)));
1522
1523 /* If the type is already signed, just do the simple thing. */
1524 if (!TYPE_UNSIGNED (type))
1525 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1526
1527 if (type == sizetype)
1528 ctype = ssizetype;
1529 else if (type == bitsizetype)
1530 ctype = sbitsizetype;
1531 else
1532 ctype = signed_type_for (type);
1533
1534 /* If either operand is not a constant, do the conversions to the signed
1535 type and subtract. The hardware will do the right thing with any
1536 overflow in the subtraction. */
1537 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1538 return size_binop_loc (loc, MINUS_EXPR,
1539 fold_convert_loc (loc, ctype, arg0),
1540 fold_convert_loc (loc, ctype, arg1));
1541
1542 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1543 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1544 overflow) and negate (which can't either). Special-case a result
1545 of zero while we're here. */
1546 if (tree_int_cst_equal (arg0, arg1))
1547 return build_int_cst (ctype, 0);
1548 else if (tree_int_cst_lt (arg1, arg0))
1549 return fold_convert_loc (loc, ctype,
1550 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1551 else
1552 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1553 fold_convert_loc (loc, ctype,
1554 size_binop_loc (loc,
1555 MINUS_EXPR,
1556 arg1, arg0)));
1557 }
1558 \f
1559 /* A subroutine of fold_convert_const handling conversions of an
1560 INTEGER_CST to another integer type. */
1561
1562 static tree
1563 fold_convert_const_int_from_int (tree type, const_tree arg1)
1564 {
1565 /* Given an integer constant, make new constant with new type,
1566 appropriately sign-extended or truncated. Use widest_int
1567 so that any extension is done according ARG1's type. */
1568 return force_fit_type (type, wi::to_widest (arg1),
1569 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1570 TREE_OVERFLOW (arg1));
1571 }
1572
1573 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1574 to an integer type. */
1575
1576 static tree
1577 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1578 {
1579 bool overflow = false;
1580 tree t;
1581
1582 /* The following code implements the floating point to integer
1583 conversion rules required by the Java Language Specification,
1584 that IEEE NaNs are mapped to zero and values that overflow
1585 the target precision saturate, i.e. values greater than
1586 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1587 are mapped to INT_MIN. These semantics are allowed by the
1588 C and C++ standards that simply state that the behavior of
1589 FP-to-integer conversion is unspecified upon overflow. */
1590
1591 wide_int val;
1592 REAL_VALUE_TYPE r;
1593 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1594
1595 switch (code)
1596 {
1597 case FIX_TRUNC_EXPR:
1598 real_trunc (&r, VOIDmode, &x);
1599 break;
1600
1601 default:
1602 gcc_unreachable ();
1603 }
1604
1605 /* If R is NaN, return zero and show we have an overflow. */
1606 if (REAL_VALUE_ISNAN (r))
1607 {
1608 overflow = true;
1609 val = wi::zero (TYPE_PRECISION (type));
1610 }
1611
1612 /* See if R is less than the lower bound or greater than the
1613 upper bound. */
1614
1615 if (! overflow)
1616 {
1617 tree lt = TYPE_MIN_VALUE (type);
1618 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1619 if (REAL_VALUES_LESS (r, l))
1620 {
1621 overflow = true;
1622 val = lt;
1623 }
1624 }
1625
1626 if (! overflow)
1627 {
1628 tree ut = TYPE_MAX_VALUE (type);
1629 if (ut)
1630 {
1631 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1632 if (REAL_VALUES_LESS (u, r))
1633 {
1634 overflow = true;
1635 val = ut;
1636 }
1637 }
1638 }
1639
1640 if (! overflow)
1641 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1642
1643 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1644 return t;
1645 }
1646
1647 /* A subroutine of fold_convert_const handling conversions of a
1648 FIXED_CST to an integer type. */
1649
1650 static tree
1651 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1652 {
1653 tree t;
1654 double_int temp, temp_trunc;
1655 unsigned int mode;
1656
1657 /* Right shift FIXED_CST to temp by fbit. */
1658 temp = TREE_FIXED_CST (arg1).data;
1659 mode = TREE_FIXED_CST (arg1).mode;
1660 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1661 {
1662 temp = temp.rshift (GET_MODE_FBIT (mode),
1663 HOST_BITS_PER_DOUBLE_INT,
1664 SIGNED_FIXED_POINT_MODE_P (mode));
1665
1666 /* Left shift temp to temp_trunc by fbit. */
1667 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1668 HOST_BITS_PER_DOUBLE_INT,
1669 SIGNED_FIXED_POINT_MODE_P (mode));
1670 }
1671 else
1672 {
1673 temp = double_int_zero;
1674 temp_trunc = double_int_zero;
1675 }
1676
1677 /* If FIXED_CST is negative, we need to round the value toward 0.
1678 By checking if the fractional bits are not zero to add 1 to temp. */
1679 if (SIGNED_FIXED_POINT_MODE_P (mode)
1680 && temp_trunc.is_negative ()
1681 && TREE_FIXED_CST (arg1).data != temp_trunc)
1682 temp += double_int_one;
1683
1684 /* Given a fixed-point constant, make new constant with new type,
1685 appropriately sign-extended or truncated. */
1686 t = force_fit_type (type, temp, -1,
1687 (temp.is_negative ()
1688 && (TYPE_UNSIGNED (type)
1689 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1690 | TREE_OVERFLOW (arg1));
1691
1692 return t;
1693 }
1694
1695 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1696 to another floating point type. */
1697
1698 static tree
1699 fold_convert_const_real_from_real (tree type, const_tree arg1)
1700 {
1701 REAL_VALUE_TYPE value;
1702 tree t;
1703
1704 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1705 t = build_real (type, value);
1706
1707 /* If converting an infinity or NAN to a representation that doesn't
1708 have one, set the overflow bit so that we can produce some kind of
1709 error message at the appropriate point if necessary. It's not the
1710 most user-friendly message, but it's better than nothing. */
1711 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1712 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1713 TREE_OVERFLOW (t) = 1;
1714 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1715 && !MODE_HAS_NANS (TYPE_MODE (type)))
1716 TREE_OVERFLOW (t) = 1;
1717 /* Regular overflow, conversion produced an infinity in a mode that
1718 can't represent them. */
1719 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1720 && REAL_VALUE_ISINF (value)
1721 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1722 TREE_OVERFLOW (t) = 1;
1723 else
1724 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1725 return t;
1726 }
1727
1728 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1729 to a floating point type. */
1730
1731 static tree
1732 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1733 {
1734 REAL_VALUE_TYPE value;
1735 tree t;
1736
1737 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1738 t = build_real (type, value);
1739
1740 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1741 return t;
1742 }
1743
1744 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1745 to another fixed-point type. */
1746
1747 static tree
1748 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1749 {
1750 FIXED_VALUE_TYPE value;
1751 tree t;
1752 bool overflow_p;
1753
1754 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1755 TYPE_SATURATING (type));
1756 t = build_fixed (type, value);
1757
1758 /* Propagate overflow flags. */
1759 if (overflow_p | TREE_OVERFLOW (arg1))
1760 TREE_OVERFLOW (t) = 1;
1761 return t;
1762 }
1763
1764 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1765 to a fixed-point type. */
1766
1767 static tree
1768 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1769 {
1770 FIXED_VALUE_TYPE value;
1771 tree t;
1772 bool overflow_p;
1773 double_int di;
1774
1775 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
1776
1777 di.low = TREE_INT_CST_ELT (arg1, 0);
1778 if (TREE_INT_CST_NUNITS (arg1) == 1)
1779 di.high = (HOST_WIDE_INT) di.low < 0 ? (HOST_WIDE_INT) -1 : 0;
1780 else
1781 di.high = TREE_INT_CST_ELT (arg1, 1);
1782
1783 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di,
1784 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1785 TYPE_SATURATING (type));
1786 t = build_fixed (type, value);
1787
1788 /* Propagate overflow flags. */
1789 if (overflow_p | TREE_OVERFLOW (arg1))
1790 TREE_OVERFLOW (t) = 1;
1791 return t;
1792 }
1793
1794 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1795 to a fixed-point type. */
1796
1797 static tree
1798 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1799 {
1800 FIXED_VALUE_TYPE value;
1801 tree t;
1802 bool overflow_p;
1803
1804 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1805 &TREE_REAL_CST (arg1),
1806 TYPE_SATURATING (type));
1807 t = build_fixed (type, value);
1808
1809 /* Propagate overflow flags. */
1810 if (overflow_p | TREE_OVERFLOW (arg1))
1811 TREE_OVERFLOW (t) = 1;
1812 return t;
1813 }
1814
1815 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1816 type TYPE. If no simplification can be done return NULL_TREE. */
1817
1818 static tree
1819 fold_convert_const (enum tree_code code, tree type, tree arg1)
1820 {
1821 if (TREE_TYPE (arg1) == type)
1822 return arg1;
1823
1824 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1825 || TREE_CODE (type) == OFFSET_TYPE)
1826 {
1827 if (TREE_CODE (arg1) == INTEGER_CST)
1828 return fold_convert_const_int_from_int (type, arg1);
1829 else if (TREE_CODE (arg1) == REAL_CST)
1830 return fold_convert_const_int_from_real (code, type, arg1);
1831 else if (TREE_CODE (arg1) == FIXED_CST)
1832 return fold_convert_const_int_from_fixed (type, arg1);
1833 }
1834 else if (TREE_CODE (type) == REAL_TYPE)
1835 {
1836 if (TREE_CODE (arg1) == INTEGER_CST)
1837 return build_real_from_int_cst (type, arg1);
1838 else if (TREE_CODE (arg1) == REAL_CST)
1839 return fold_convert_const_real_from_real (type, arg1);
1840 else if (TREE_CODE (arg1) == FIXED_CST)
1841 return fold_convert_const_real_from_fixed (type, arg1);
1842 }
1843 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1844 {
1845 if (TREE_CODE (arg1) == FIXED_CST)
1846 return fold_convert_const_fixed_from_fixed (type, arg1);
1847 else if (TREE_CODE (arg1) == INTEGER_CST)
1848 return fold_convert_const_fixed_from_int (type, arg1);
1849 else if (TREE_CODE (arg1) == REAL_CST)
1850 return fold_convert_const_fixed_from_real (type, arg1);
1851 }
1852 return NULL_TREE;
1853 }
1854
1855 /* Construct a vector of zero elements of vector type TYPE. */
1856
1857 static tree
1858 build_zero_vector (tree type)
1859 {
1860 tree t;
1861
1862 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1863 return build_vector_from_val (type, t);
1864 }
1865
1866 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1867
1868 bool
1869 fold_convertible_p (const_tree type, const_tree arg)
1870 {
1871 tree orig = TREE_TYPE (arg);
1872
1873 if (type == orig)
1874 return true;
1875
1876 if (TREE_CODE (arg) == ERROR_MARK
1877 || TREE_CODE (type) == ERROR_MARK
1878 || TREE_CODE (orig) == ERROR_MARK)
1879 return false;
1880
1881 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1882 return true;
1883
1884 switch (TREE_CODE (type))
1885 {
1886 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1887 case POINTER_TYPE: case REFERENCE_TYPE:
1888 case OFFSET_TYPE:
1889 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1890 || TREE_CODE (orig) == OFFSET_TYPE)
1891 return true;
1892 return (TREE_CODE (orig) == VECTOR_TYPE
1893 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1894
1895 case REAL_TYPE:
1896 case FIXED_POINT_TYPE:
1897 case COMPLEX_TYPE:
1898 case VECTOR_TYPE:
1899 case VOID_TYPE:
1900 return TREE_CODE (type) == TREE_CODE (orig);
1901
1902 default:
1903 return false;
1904 }
1905 }
1906
1907 /* Convert expression ARG to type TYPE. Used by the middle-end for
1908 simple conversions in preference to calling the front-end's convert. */
1909
1910 tree
1911 fold_convert_loc (location_t loc, tree type, tree arg)
1912 {
1913 tree orig = TREE_TYPE (arg);
1914 tree tem;
1915
1916 if (type == orig)
1917 return arg;
1918
1919 if (TREE_CODE (arg) == ERROR_MARK
1920 || TREE_CODE (type) == ERROR_MARK
1921 || TREE_CODE (orig) == ERROR_MARK)
1922 return error_mark_node;
1923
1924 switch (TREE_CODE (type))
1925 {
1926 case POINTER_TYPE:
1927 case REFERENCE_TYPE:
1928 /* Handle conversions between pointers to different address spaces. */
1929 if (POINTER_TYPE_P (orig)
1930 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1931 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1932 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1933 /* fall through */
1934
1935 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1936 case OFFSET_TYPE:
1937 if (TREE_CODE (arg) == INTEGER_CST)
1938 {
1939 tem = fold_convert_const (NOP_EXPR, type, arg);
1940 if (tem != NULL_TREE)
1941 return tem;
1942 }
1943 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1944 || TREE_CODE (orig) == OFFSET_TYPE)
1945 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1946 if (TREE_CODE (orig) == COMPLEX_TYPE)
1947 return fold_convert_loc (loc, type,
1948 fold_build1_loc (loc, REALPART_EXPR,
1949 TREE_TYPE (orig), arg));
1950 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1951 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1952 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1953
1954 case REAL_TYPE:
1955 if (TREE_CODE (arg) == INTEGER_CST)
1956 {
1957 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1958 if (tem != NULL_TREE)
1959 return tem;
1960 }
1961 else if (TREE_CODE (arg) == REAL_CST)
1962 {
1963 tem = fold_convert_const (NOP_EXPR, type, arg);
1964 if (tem != NULL_TREE)
1965 return tem;
1966 }
1967 else if (TREE_CODE (arg) == FIXED_CST)
1968 {
1969 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1970 if (tem != NULL_TREE)
1971 return tem;
1972 }
1973
1974 switch (TREE_CODE (orig))
1975 {
1976 case INTEGER_TYPE:
1977 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1978 case POINTER_TYPE: case REFERENCE_TYPE:
1979 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1980
1981 case REAL_TYPE:
1982 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1983
1984 case FIXED_POINT_TYPE:
1985 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1986
1987 case COMPLEX_TYPE:
1988 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1989 return fold_convert_loc (loc, type, tem);
1990
1991 default:
1992 gcc_unreachable ();
1993 }
1994
1995 case FIXED_POINT_TYPE:
1996 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
1997 || TREE_CODE (arg) == REAL_CST)
1998 {
1999 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2000 if (tem != NULL_TREE)
2001 goto fold_convert_exit;
2002 }
2003
2004 switch (TREE_CODE (orig))
2005 {
2006 case FIXED_POINT_TYPE:
2007 case INTEGER_TYPE:
2008 case ENUMERAL_TYPE:
2009 case BOOLEAN_TYPE:
2010 case REAL_TYPE:
2011 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2012
2013 case COMPLEX_TYPE:
2014 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2015 return fold_convert_loc (loc, type, tem);
2016
2017 default:
2018 gcc_unreachable ();
2019 }
2020
2021 case COMPLEX_TYPE:
2022 switch (TREE_CODE (orig))
2023 {
2024 case INTEGER_TYPE:
2025 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2026 case POINTER_TYPE: case REFERENCE_TYPE:
2027 case REAL_TYPE:
2028 case FIXED_POINT_TYPE:
2029 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2030 fold_convert_loc (loc, TREE_TYPE (type), arg),
2031 fold_convert_loc (loc, TREE_TYPE (type),
2032 integer_zero_node));
2033 case COMPLEX_TYPE:
2034 {
2035 tree rpart, ipart;
2036
2037 if (TREE_CODE (arg) == COMPLEX_EXPR)
2038 {
2039 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2040 TREE_OPERAND (arg, 0));
2041 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2042 TREE_OPERAND (arg, 1));
2043 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2044 }
2045
2046 arg = save_expr (arg);
2047 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2048 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2049 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2050 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2051 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2052 }
2053
2054 default:
2055 gcc_unreachable ();
2056 }
2057
2058 case VECTOR_TYPE:
2059 if (integer_zerop (arg))
2060 return build_zero_vector (type);
2061 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2062 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2063 || TREE_CODE (orig) == VECTOR_TYPE);
2064 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2065
2066 case VOID_TYPE:
2067 tem = fold_ignored_result (arg);
2068 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2069
2070 default:
2071 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2072 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2073 gcc_unreachable ();
2074 }
2075 fold_convert_exit:
2076 protected_set_expr_location_unshare (tem, loc);
2077 return tem;
2078 }
2079 \f
2080 /* Return false if expr can be assumed not to be an lvalue, true
2081 otherwise. */
2082
2083 static bool
2084 maybe_lvalue_p (const_tree x)
2085 {
2086 /* We only need to wrap lvalue tree codes. */
2087 switch (TREE_CODE (x))
2088 {
2089 case VAR_DECL:
2090 case PARM_DECL:
2091 case RESULT_DECL:
2092 case LABEL_DECL:
2093 case FUNCTION_DECL:
2094 case SSA_NAME:
2095
2096 case COMPONENT_REF:
2097 case MEM_REF:
2098 case INDIRECT_REF:
2099 case ARRAY_REF:
2100 case ARRAY_RANGE_REF:
2101 case BIT_FIELD_REF:
2102 case OBJ_TYPE_REF:
2103
2104 case REALPART_EXPR:
2105 case IMAGPART_EXPR:
2106 case PREINCREMENT_EXPR:
2107 case PREDECREMENT_EXPR:
2108 case SAVE_EXPR:
2109 case TRY_CATCH_EXPR:
2110 case WITH_CLEANUP_EXPR:
2111 case COMPOUND_EXPR:
2112 case MODIFY_EXPR:
2113 case TARGET_EXPR:
2114 case COND_EXPR:
2115 case BIND_EXPR:
2116 break;
2117
2118 default:
2119 /* Assume the worst for front-end tree codes. */
2120 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2121 break;
2122 return false;
2123 }
2124
2125 return true;
2126 }
2127
2128 /* Return an expr equal to X but certainly not valid as an lvalue. */
2129
2130 tree
2131 non_lvalue_loc (location_t loc, tree x)
2132 {
2133 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2134 us. */
2135 if (in_gimple_form)
2136 return x;
2137
2138 if (! maybe_lvalue_p (x))
2139 return x;
2140 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2141 }
2142
2143 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2144 Zero means allow extended lvalues. */
2145
2146 int pedantic_lvalues;
2147
2148 /* When pedantic, return an expr equal to X but certainly not valid as a
2149 pedantic lvalue. Otherwise, return X. */
2150
2151 static tree
2152 pedantic_non_lvalue_loc (location_t loc, tree x)
2153 {
2154 if (pedantic_lvalues)
2155 return non_lvalue_loc (loc, x);
2156
2157 return protected_set_expr_location_unshare (x, loc);
2158 }
2159 \f
2160 /* Given a tree comparison code, return the code that is the logical inverse.
2161 It is generally not safe to do this for floating-point comparisons, except
2162 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2163 ERROR_MARK in this case. */
2164
2165 enum tree_code
2166 invert_tree_comparison (enum tree_code code, bool honor_nans)
2167 {
2168 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2169 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2170 return ERROR_MARK;
2171
2172 switch (code)
2173 {
2174 case EQ_EXPR:
2175 return NE_EXPR;
2176 case NE_EXPR:
2177 return EQ_EXPR;
2178 case GT_EXPR:
2179 return honor_nans ? UNLE_EXPR : LE_EXPR;
2180 case GE_EXPR:
2181 return honor_nans ? UNLT_EXPR : LT_EXPR;
2182 case LT_EXPR:
2183 return honor_nans ? UNGE_EXPR : GE_EXPR;
2184 case LE_EXPR:
2185 return honor_nans ? UNGT_EXPR : GT_EXPR;
2186 case LTGT_EXPR:
2187 return UNEQ_EXPR;
2188 case UNEQ_EXPR:
2189 return LTGT_EXPR;
2190 case UNGT_EXPR:
2191 return LE_EXPR;
2192 case UNGE_EXPR:
2193 return LT_EXPR;
2194 case UNLT_EXPR:
2195 return GE_EXPR;
2196 case UNLE_EXPR:
2197 return GT_EXPR;
2198 case ORDERED_EXPR:
2199 return UNORDERED_EXPR;
2200 case UNORDERED_EXPR:
2201 return ORDERED_EXPR;
2202 default:
2203 gcc_unreachable ();
2204 }
2205 }
2206
2207 /* Similar, but return the comparison that results if the operands are
2208 swapped. This is safe for floating-point. */
2209
2210 enum tree_code
2211 swap_tree_comparison (enum tree_code code)
2212 {
2213 switch (code)
2214 {
2215 case EQ_EXPR:
2216 case NE_EXPR:
2217 case ORDERED_EXPR:
2218 case UNORDERED_EXPR:
2219 case LTGT_EXPR:
2220 case UNEQ_EXPR:
2221 return code;
2222 case GT_EXPR:
2223 return LT_EXPR;
2224 case GE_EXPR:
2225 return LE_EXPR;
2226 case LT_EXPR:
2227 return GT_EXPR;
2228 case LE_EXPR:
2229 return GE_EXPR;
2230 case UNGT_EXPR:
2231 return UNLT_EXPR;
2232 case UNGE_EXPR:
2233 return UNLE_EXPR;
2234 case UNLT_EXPR:
2235 return UNGT_EXPR;
2236 case UNLE_EXPR:
2237 return UNGE_EXPR;
2238 default:
2239 gcc_unreachable ();
2240 }
2241 }
2242
2243
2244 /* Convert a comparison tree code from an enum tree_code representation
2245 into a compcode bit-based encoding. This function is the inverse of
2246 compcode_to_comparison. */
2247
2248 static enum comparison_code
2249 comparison_to_compcode (enum tree_code code)
2250 {
2251 switch (code)
2252 {
2253 case LT_EXPR:
2254 return COMPCODE_LT;
2255 case EQ_EXPR:
2256 return COMPCODE_EQ;
2257 case LE_EXPR:
2258 return COMPCODE_LE;
2259 case GT_EXPR:
2260 return COMPCODE_GT;
2261 case NE_EXPR:
2262 return COMPCODE_NE;
2263 case GE_EXPR:
2264 return COMPCODE_GE;
2265 case ORDERED_EXPR:
2266 return COMPCODE_ORD;
2267 case UNORDERED_EXPR:
2268 return COMPCODE_UNORD;
2269 case UNLT_EXPR:
2270 return COMPCODE_UNLT;
2271 case UNEQ_EXPR:
2272 return COMPCODE_UNEQ;
2273 case UNLE_EXPR:
2274 return COMPCODE_UNLE;
2275 case UNGT_EXPR:
2276 return COMPCODE_UNGT;
2277 case LTGT_EXPR:
2278 return COMPCODE_LTGT;
2279 case UNGE_EXPR:
2280 return COMPCODE_UNGE;
2281 default:
2282 gcc_unreachable ();
2283 }
2284 }
2285
2286 /* Convert a compcode bit-based encoding of a comparison operator back
2287 to GCC's enum tree_code representation. This function is the
2288 inverse of comparison_to_compcode. */
2289
2290 static enum tree_code
2291 compcode_to_comparison (enum comparison_code code)
2292 {
2293 switch (code)
2294 {
2295 case COMPCODE_LT:
2296 return LT_EXPR;
2297 case COMPCODE_EQ:
2298 return EQ_EXPR;
2299 case COMPCODE_LE:
2300 return LE_EXPR;
2301 case COMPCODE_GT:
2302 return GT_EXPR;
2303 case COMPCODE_NE:
2304 return NE_EXPR;
2305 case COMPCODE_GE:
2306 return GE_EXPR;
2307 case COMPCODE_ORD:
2308 return ORDERED_EXPR;
2309 case COMPCODE_UNORD:
2310 return UNORDERED_EXPR;
2311 case COMPCODE_UNLT:
2312 return UNLT_EXPR;
2313 case COMPCODE_UNEQ:
2314 return UNEQ_EXPR;
2315 case COMPCODE_UNLE:
2316 return UNLE_EXPR;
2317 case COMPCODE_UNGT:
2318 return UNGT_EXPR;
2319 case COMPCODE_LTGT:
2320 return LTGT_EXPR;
2321 case COMPCODE_UNGE:
2322 return UNGE_EXPR;
2323 default:
2324 gcc_unreachable ();
2325 }
2326 }
2327
2328 /* Return a tree for the comparison which is the combination of
2329 doing the AND or OR (depending on CODE) of the two operations LCODE
2330 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2331 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2332 if this makes the transformation invalid. */
2333
2334 tree
2335 combine_comparisons (location_t loc,
2336 enum tree_code code, enum tree_code lcode,
2337 enum tree_code rcode, tree truth_type,
2338 tree ll_arg, tree lr_arg)
2339 {
2340 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2341 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2342 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2343 int compcode;
2344
2345 switch (code)
2346 {
2347 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2348 compcode = lcompcode & rcompcode;
2349 break;
2350
2351 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2352 compcode = lcompcode | rcompcode;
2353 break;
2354
2355 default:
2356 return NULL_TREE;
2357 }
2358
2359 if (!honor_nans)
2360 {
2361 /* Eliminate unordered comparisons, as well as LTGT and ORD
2362 which are not used unless the mode has NaNs. */
2363 compcode &= ~COMPCODE_UNORD;
2364 if (compcode == COMPCODE_LTGT)
2365 compcode = COMPCODE_NE;
2366 else if (compcode == COMPCODE_ORD)
2367 compcode = COMPCODE_TRUE;
2368 }
2369 else if (flag_trapping_math)
2370 {
2371 /* Check that the original operation and the optimized ones will trap
2372 under the same condition. */
2373 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2374 && (lcompcode != COMPCODE_EQ)
2375 && (lcompcode != COMPCODE_ORD);
2376 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2377 && (rcompcode != COMPCODE_EQ)
2378 && (rcompcode != COMPCODE_ORD);
2379 bool trap = (compcode & COMPCODE_UNORD) == 0
2380 && (compcode != COMPCODE_EQ)
2381 && (compcode != COMPCODE_ORD);
2382
2383 /* In a short-circuited boolean expression the LHS might be
2384 such that the RHS, if evaluated, will never trap. For
2385 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2386 if neither x nor y is NaN. (This is a mixed blessing: for
2387 example, the expression above will never trap, hence
2388 optimizing it to x < y would be invalid). */
2389 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2390 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2391 rtrap = false;
2392
2393 /* If the comparison was short-circuited, and only the RHS
2394 trapped, we may now generate a spurious trap. */
2395 if (rtrap && !ltrap
2396 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2397 return NULL_TREE;
2398
2399 /* If we changed the conditions that cause a trap, we lose. */
2400 if ((ltrap || rtrap) != trap)
2401 return NULL_TREE;
2402 }
2403
2404 if (compcode == COMPCODE_TRUE)
2405 return constant_boolean_node (true, truth_type);
2406 else if (compcode == COMPCODE_FALSE)
2407 return constant_boolean_node (false, truth_type);
2408 else
2409 {
2410 enum tree_code tcode;
2411
2412 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2413 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2414 }
2415 }
2416 \f
2417 /* Return nonzero if two operands (typically of the same tree node)
2418 are necessarily equal. If either argument has side-effects this
2419 function returns zero. FLAGS modifies behavior as follows:
2420
2421 If OEP_ONLY_CONST is set, only return nonzero for constants.
2422 This function tests whether the operands are indistinguishable;
2423 it does not test whether they are equal using C's == operation.
2424 The distinction is important for IEEE floating point, because
2425 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2426 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2427
2428 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2429 even though it may hold multiple values during a function.
2430 This is because a GCC tree node guarantees that nothing else is
2431 executed between the evaluation of its "operands" (which may often
2432 be evaluated in arbitrary order). Hence if the operands themselves
2433 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2434 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2435 unset means assuming isochronic (or instantaneous) tree equivalence.
2436 Unless comparing arbitrary expression trees, such as from different
2437 statements, this flag can usually be left unset.
2438
2439 If OEP_PURE_SAME is set, then pure functions with identical arguments
2440 are considered the same. It is used when the caller has other ways
2441 to ensure that global memory is unchanged in between. */
2442
2443 int
2444 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2445 {
2446 /* If either is ERROR_MARK, they aren't equal. */
2447 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2448 || TREE_TYPE (arg0) == error_mark_node
2449 || TREE_TYPE (arg1) == error_mark_node)
2450 return 0;
2451
2452 /* Similar, if either does not have a type (like a released SSA name),
2453 they aren't equal. */
2454 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2455 return 0;
2456
2457 /* Check equality of integer constants before bailing out due to
2458 precision differences. */
2459 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2460 return tree_int_cst_equal (arg0, arg1);
2461
2462 /* If both types don't have the same signedness, then we can't consider
2463 them equal. We must check this before the STRIP_NOPS calls
2464 because they may change the signedness of the arguments. As pointers
2465 strictly don't have a signedness, require either two pointers or
2466 two non-pointers as well. */
2467 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2468 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2469 return 0;
2470
2471 /* We cannot consider pointers to different address space equal. */
2472 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2473 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2474 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2475 return 0;
2476
2477 /* If both types don't have the same precision, then it is not safe
2478 to strip NOPs. */
2479 if (element_precision (TREE_TYPE (arg0))
2480 != element_precision (TREE_TYPE (arg1)))
2481 return 0;
2482
2483 STRIP_NOPS (arg0);
2484 STRIP_NOPS (arg1);
2485
2486 /* In case both args are comparisons but with different comparison
2487 code, try to swap the comparison operands of one arg to produce
2488 a match and compare that variant. */
2489 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2490 && COMPARISON_CLASS_P (arg0)
2491 && COMPARISON_CLASS_P (arg1))
2492 {
2493 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2494
2495 if (TREE_CODE (arg0) == swap_code)
2496 return operand_equal_p (TREE_OPERAND (arg0, 0),
2497 TREE_OPERAND (arg1, 1), flags)
2498 && operand_equal_p (TREE_OPERAND (arg0, 1),
2499 TREE_OPERAND (arg1, 0), flags);
2500 }
2501
2502 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2503 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2504 && !(CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1)))
2505 return 0;
2506
2507 /* This is needed for conversions and for COMPONENT_REF.
2508 Might as well play it safe and always test this. */
2509 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2510 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2511 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2512 return 0;
2513
2514 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2515 We don't care about side effects in that case because the SAVE_EXPR
2516 takes care of that for us. In all other cases, two expressions are
2517 equal if they have no side effects. If we have two identical
2518 expressions with side effects that should be treated the same due
2519 to the only side effects being identical SAVE_EXPR's, that will
2520 be detected in the recursive calls below.
2521 If we are taking an invariant address of two identical objects
2522 they are necessarily equal as well. */
2523 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2524 && (TREE_CODE (arg0) == SAVE_EXPR
2525 || (flags & OEP_CONSTANT_ADDRESS_OF)
2526 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2527 return 1;
2528
2529 /* Next handle constant cases, those for which we can return 1 even
2530 if ONLY_CONST is set. */
2531 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2532 switch (TREE_CODE (arg0))
2533 {
2534 case INTEGER_CST:
2535 return tree_int_cst_equal (arg0, arg1);
2536
2537 case FIXED_CST:
2538 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2539 TREE_FIXED_CST (arg1));
2540
2541 case REAL_CST:
2542 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2543 TREE_REAL_CST (arg1)))
2544 return 1;
2545
2546
2547 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2548 {
2549 /* If we do not distinguish between signed and unsigned zero,
2550 consider them equal. */
2551 if (real_zerop (arg0) && real_zerop (arg1))
2552 return 1;
2553 }
2554 return 0;
2555
2556 case VECTOR_CST:
2557 {
2558 unsigned i;
2559
2560 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2561 return 0;
2562
2563 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2564 {
2565 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2566 VECTOR_CST_ELT (arg1, i), flags))
2567 return 0;
2568 }
2569 return 1;
2570 }
2571
2572 case COMPLEX_CST:
2573 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2574 flags)
2575 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2576 flags));
2577
2578 case STRING_CST:
2579 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2580 && ! memcmp (TREE_STRING_POINTER (arg0),
2581 TREE_STRING_POINTER (arg1),
2582 TREE_STRING_LENGTH (arg0)));
2583
2584 case ADDR_EXPR:
2585 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2586 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2587 ? OEP_CONSTANT_ADDRESS_OF : 0);
2588 default:
2589 break;
2590 }
2591
2592 if (flags & OEP_ONLY_CONST)
2593 return 0;
2594
2595 /* Define macros to test an operand from arg0 and arg1 for equality and a
2596 variant that allows null and views null as being different from any
2597 non-null value. In the latter case, if either is null, the both
2598 must be; otherwise, do the normal comparison. */
2599 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2600 TREE_OPERAND (arg1, N), flags)
2601
2602 #define OP_SAME_WITH_NULL(N) \
2603 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2604 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2605
2606 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2607 {
2608 case tcc_unary:
2609 /* Two conversions are equal only if signedness and modes match. */
2610 switch (TREE_CODE (arg0))
2611 {
2612 CASE_CONVERT:
2613 case FIX_TRUNC_EXPR:
2614 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2615 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2616 return 0;
2617 break;
2618 default:
2619 break;
2620 }
2621
2622 return OP_SAME (0);
2623
2624
2625 case tcc_comparison:
2626 case tcc_binary:
2627 if (OP_SAME (0) && OP_SAME (1))
2628 return 1;
2629
2630 /* For commutative ops, allow the other order. */
2631 return (commutative_tree_code (TREE_CODE (arg0))
2632 && operand_equal_p (TREE_OPERAND (arg0, 0),
2633 TREE_OPERAND (arg1, 1), flags)
2634 && operand_equal_p (TREE_OPERAND (arg0, 1),
2635 TREE_OPERAND (arg1, 0), flags));
2636
2637 case tcc_reference:
2638 /* If either of the pointer (or reference) expressions we are
2639 dereferencing contain a side effect, these cannot be equal,
2640 but their addresses can be. */
2641 if ((flags & OEP_CONSTANT_ADDRESS_OF) == 0
2642 && (TREE_SIDE_EFFECTS (arg0)
2643 || TREE_SIDE_EFFECTS (arg1)))
2644 return 0;
2645
2646 switch (TREE_CODE (arg0))
2647 {
2648 case INDIRECT_REF:
2649 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2650 return OP_SAME (0);
2651
2652 case REALPART_EXPR:
2653 case IMAGPART_EXPR:
2654 return OP_SAME (0);
2655
2656 case TARGET_MEM_REF:
2657 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2658 /* Require equal extra operands and then fall through to MEM_REF
2659 handling of the two common operands. */
2660 if (!OP_SAME_WITH_NULL (2)
2661 || !OP_SAME_WITH_NULL (3)
2662 || !OP_SAME_WITH_NULL (4))
2663 return 0;
2664 /* Fallthru. */
2665 case MEM_REF:
2666 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2667 /* Require equal access sizes, and similar pointer types.
2668 We can have incomplete types for array references of
2669 variable-sized arrays from the Fortran frontend
2670 though. Also verify the types are compatible. */
2671 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2672 || (TYPE_SIZE (TREE_TYPE (arg0))
2673 && TYPE_SIZE (TREE_TYPE (arg1))
2674 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2675 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2676 && types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1))
2677 && alias_ptr_types_compatible_p
2678 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
2679 TREE_TYPE (TREE_OPERAND (arg1, 1)))
2680 && OP_SAME (0) && OP_SAME (1));
2681
2682 case ARRAY_REF:
2683 case ARRAY_RANGE_REF:
2684 /* Operands 2 and 3 may be null.
2685 Compare the array index by value if it is constant first as we
2686 may have different types but same value here. */
2687 if (!OP_SAME (0))
2688 return 0;
2689 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2690 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2691 TREE_OPERAND (arg1, 1))
2692 || OP_SAME (1))
2693 && OP_SAME_WITH_NULL (2)
2694 && OP_SAME_WITH_NULL (3));
2695
2696 case COMPONENT_REF:
2697 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2698 may be NULL when we're called to compare MEM_EXPRs. */
2699 if (!OP_SAME_WITH_NULL (0)
2700 || !OP_SAME (1))
2701 return 0;
2702 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2703 return OP_SAME_WITH_NULL (2);
2704
2705 case BIT_FIELD_REF:
2706 if (!OP_SAME (0))
2707 return 0;
2708 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2709 return OP_SAME (1) && OP_SAME (2);
2710
2711 default:
2712 return 0;
2713 }
2714
2715 case tcc_expression:
2716 switch (TREE_CODE (arg0))
2717 {
2718 case ADDR_EXPR:
2719 case TRUTH_NOT_EXPR:
2720 return OP_SAME (0);
2721
2722 case TRUTH_ANDIF_EXPR:
2723 case TRUTH_ORIF_EXPR:
2724 return OP_SAME (0) && OP_SAME (1);
2725
2726 case FMA_EXPR:
2727 case WIDEN_MULT_PLUS_EXPR:
2728 case WIDEN_MULT_MINUS_EXPR:
2729 if (!OP_SAME (2))
2730 return 0;
2731 /* The multiplcation operands are commutative. */
2732 /* FALLTHRU */
2733
2734 case TRUTH_AND_EXPR:
2735 case TRUTH_OR_EXPR:
2736 case TRUTH_XOR_EXPR:
2737 if (OP_SAME (0) && OP_SAME (1))
2738 return 1;
2739
2740 /* Otherwise take into account this is a commutative operation. */
2741 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2742 TREE_OPERAND (arg1, 1), flags)
2743 && operand_equal_p (TREE_OPERAND (arg0, 1),
2744 TREE_OPERAND (arg1, 0), flags));
2745
2746 case COND_EXPR:
2747 case VEC_COND_EXPR:
2748 case DOT_PROD_EXPR:
2749 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2750
2751 default:
2752 return 0;
2753 }
2754
2755 case tcc_vl_exp:
2756 switch (TREE_CODE (arg0))
2757 {
2758 case CALL_EXPR:
2759 /* If the CALL_EXPRs call different functions, then they
2760 clearly can not be equal. */
2761 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2762 flags))
2763 return 0;
2764
2765 {
2766 unsigned int cef = call_expr_flags (arg0);
2767 if (flags & OEP_PURE_SAME)
2768 cef &= ECF_CONST | ECF_PURE;
2769 else
2770 cef &= ECF_CONST;
2771 if (!cef)
2772 return 0;
2773 }
2774
2775 /* Now see if all the arguments are the same. */
2776 {
2777 const_call_expr_arg_iterator iter0, iter1;
2778 const_tree a0, a1;
2779 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2780 a1 = first_const_call_expr_arg (arg1, &iter1);
2781 a0 && a1;
2782 a0 = next_const_call_expr_arg (&iter0),
2783 a1 = next_const_call_expr_arg (&iter1))
2784 if (! operand_equal_p (a0, a1, flags))
2785 return 0;
2786
2787 /* If we get here and both argument lists are exhausted
2788 then the CALL_EXPRs are equal. */
2789 return ! (a0 || a1);
2790 }
2791 default:
2792 return 0;
2793 }
2794
2795 case tcc_declaration:
2796 /* Consider __builtin_sqrt equal to sqrt. */
2797 return (TREE_CODE (arg0) == FUNCTION_DECL
2798 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2799 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2800 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2801
2802 default:
2803 return 0;
2804 }
2805
2806 #undef OP_SAME
2807 #undef OP_SAME_WITH_NULL
2808 }
2809 \f
2810 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2811 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2812
2813 When in doubt, return 0. */
2814
2815 static int
2816 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2817 {
2818 int unsignedp1, unsignedpo;
2819 tree primarg0, primarg1, primother;
2820 unsigned int correct_width;
2821
2822 if (operand_equal_p (arg0, arg1, 0))
2823 return 1;
2824
2825 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2826 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2827 return 0;
2828
2829 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2830 and see if the inner values are the same. This removes any
2831 signedness comparison, which doesn't matter here. */
2832 primarg0 = arg0, primarg1 = arg1;
2833 STRIP_NOPS (primarg0);
2834 STRIP_NOPS (primarg1);
2835 if (operand_equal_p (primarg0, primarg1, 0))
2836 return 1;
2837
2838 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2839 actual comparison operand, ARG0.
2840
2841 First throw away any conversions to wider types
2842 already present in the operands. */
2843
2844 primarg1 = get_narrower (arg1, &unsignedp1);
2845 primother = get_narrower (other, &unsignedpo);
2846
2847 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2848 if (unsignedp1 == unsignedpo
2849 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2850 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2851 {
2852 tree type = TREE_TYPE (arg0);
2853
2854 /* Make sure shorter operand is extended the right way
2855 to match the longer operand. */
2856 primarg1 = fold_convert (signed_or_unsigned_type_for
2857 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2858
2859 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2860 return 1;
2861 }
2862
2863 return 0;
2864 }
2865 \f
2866 /* See if ARG is an expression that is either a comparison or is performing
2867 arithmetic on comparisons. The comparisons must only be comparing
2868 two different values, which will be stored in *CVAL1 and *CVAL2; if
2869 they are nonzero it means that some operands have already been found.
2870 No variables may be used anywhere else in the expression except in the
2871 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2872 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2873
2874 If this is true, return 1. Otherwise, return zero. */
2875
2876 static int
2877 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2878 {
2879 enum tree_code code = TREE_CODE (arg);
2880 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2881
2882 /* We can handle some of the tcc_expression cases here. */
2883 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2884 tclass = tcc_unary;
2885 else if (tclass == tcc_expression
2886 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2887 || code == COMPOUND_EXPR))
2888 tclass = tcc_binary;
2889
2890 else if (tclass == tcc_expression && code == SAVE_EXPR
2891 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2892 {
2893 /* If we've already found a CVAL1 or CVAL2, this expression is
2894 two complex to handle. */
2895 if (*cval1 || *cval2)
2896 return 0;
2897
2898 tclass = tcc_unary;
2899 *save_p = 1;
2900 }
2901
2902 switch (tclass)
2903 {
2904 case tcc_unary:
2905 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2906
2907 case tcc_binary:
2908 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2909 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2910 cval1, cval2, save_p));
2911
2912 case tcc_constant:
2913 return 1;
2914
2915 case tcc_expression:
2916 if (code == COND_EXPR)
2917 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2918 cval1, cval2, save_p)
2919 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2920 cval1, cval2, save_p)
2921 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2922 cval1, cval2, save_p));
2923 return 0;
2924
2925 case tcc_comparison:
2926 /* First see if we can handle the first operand, then the second. For
2927 the second operand, we know *CVAL1 can't be zero. It must be that
2928 one side of the comparison is each of the values; test for the
2929 case where this isn't true by failing if the two operands
2930 are the same. */
2931
2932 if (operand_equal_p (TREE_OPERAND (arg, 0),
2933 TREE_OPERAND (arg, 1), 0))
2934 return 0;
2935
2936 if (*cval1 == 0)
2937 *cval1 = TREE_OPERAND (arg, 0);
2938 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2939 ;
2940 else if (*cval2 == 0)
2941 *cval2 = TREE_OPERAND (arg, 0);
2942 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2943 ;
2944 else
2945 return 0;
2946
2947 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2948 ;
2949 else if (*cval2 == 0)
2950 *cval2 = TREE_OPERAND (arg, 1);
2951 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2952 ;
2953 else
2954 return 0;
2955
2956 return 1;
2957
2958 default:
2959 return 0;
2960 }
2961 }
2962 \f
2963 /* ARG is a tree that is known to contain just arithmetic operations and
2964 comparisons. Evaluate the operations in the tree substituting NEW0 for
2965 any occurrence of OLD0 as an operand of a comparison and likewise for
2966 NEW1 and OLD1. */
2967
2968 static tree
2969 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2970 tree old1, tree new1)
2971 {
2972 tree type = TREE_TYPE (arg);
2973 enum tree_code code = TREE_CODE (arg);
2974 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2975
2976 /* We can handle some of the tcc_expression cases here. */
2977 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2978 tclass = tcc_unary;
2979 else if (tclass == tcc_expression
2980 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2981 tclass = tcc_binary;
2982
2983 switch (tclass)
2984 {
2985 case tcc_unary:
2986 return fold_build1_loc (loc, code, type,
2987 eval_subst (loc, TREE_OPERAND (arg, 0),
2988 old0, new0, old1, new1));
2989
2990 case tcc_binary:
2991 return fold_build2_loc (loc, code, type,
2992 eval_subst (loc, TREE_OPERAND (arg, 0),
2993 old0, new0, old1, new1),
2994 eval_subst (loc, TREE_OPERAND (arg, 1),
2995 old0, new0, old1, new1));
2996
2997 case tcc_expression:
2998 switch (code)
2999 {
3000 case SAVE_EXPR:
3001 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3002 old1, new1);
3003
3004 case COMPOUND_EXPR:
3005 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3006 old1, new1);
3007
3008 case COND_EXPR:
3009 return fold_build3_loc (loc, code, type,
3010 eval_subst (loc, TREE_OPERAND (arg, 0),
3011 old0, new0, old1, new1),
3012 eval_subst (loc, TREE_OPERAND (arg, 1),
3013 old0, new0, old1, new1),
3014 eval_subst (loc, TREE_OPERAND (arg, 2),
3015 old0, new0, old1, new1));
3016 default:
3017 break;
3018 }
3019 /* Fall through - ??? */
3020
3021 case tcc_comparison:
3022 {
3023 tree arg0 = TREE_OPERAND (arg, 0);
3024 tree arg1 = TREE_OPERAND (arg, 1);
3025
3026 /* We need to check both for exact equality and tree equality. The
3027 former will be true if the operand has a side-effect. In that
3028 case, we know the operand occurred exactly once. */
3029
3030 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3031 arg0 = new0;
3032 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3033 arg0 = new1;
3034
3035 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3036 arg1 = new0;
3037 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3038 arg1 = new1;
3039
3040 return fold_build2_loc (loc, code, type, arg0, arg1);
3041 }
3042
3043 default:
3044 return arg;
3045 }
3046 }
3047 \f
3048 /* Return a tree for the case when the result of an expression is RESULT
3049 converted to TYPE and OMITTED was previously an operand of the expression
3050 but is now not needed (e.g., we folded OMITTED * 0).
3051
3052 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3053 the conversion of RESULT to TYPE. */
3054
3055 tree
3056 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3057 {
3058 tree t = fold_convert_loc (loc, type, result);
3059
3060 /* If the resulting operand is an empty statement, just return the omitted
3061 statement casted to void. */
3062 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3063 return build1_loc (loc, NOP_EXPR, void_type_node,
3064 fold_ignored_result (omitted));
3065
3066 if (TREE_SIDE_EFFECTS (omitted))
3067 return build2_loc (loc, COMPOUND_EXPR, type,
3068 fold_ignored_result (omitted), t);
3069
3070 return non_lvalue_loc (loc, t);
3071 }
3072
3073 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3074
3075 static tree
3076 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
3077 tree omitted)
3078 {
3079 tree t = fold_convert_loc (loc, type, result);
3080
3081 /* If the resulting operand is an empty statement, just return the omitted
3082 statement casted to void. */
3083 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3084 return build1_loc (loc, NOP_EXPR, void_type_node,
3085 fold_ignored_result (omitted));
3086
3087 if (TREE_SIDE_EFFECTS (omitted))
3088 return build2_loc (loc, COMPOUND_EXPR, type,
3089 fold_ignored_result (omitted), t);
3090
3091 return pedantic_non_lvalue_loc (loc, t);
3092 }
3093
3094 /* Return a tree for the case when the result of an expression is RESULT
3095 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3096 of the expression but are now not needed.
3097
3098 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3099 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3100 evaluated before OMITTED2. Otherwise, if neither has side effects,
3101 just do the conversion of RESULT to TYPE. */
3102
3103 tree
3104 omit_two_operands_loc (location_t loc, tree type, tree result,
3105 tree omitted1, tree omitted2)
3106 {
3107 tree t = fold_convert_loc (loc, type, result);
3108
3109 if (TREE_SIDE_EFFECTS (omitted2))
3110 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3111 if (TREE_SIDE_EFFECTS (omitted1))
3112 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3113
3114 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3115 }
3116
3117 \f
3118 /* Return a simplified tree node for the truth-negation of ARG. This
3119 never alters ARG itself. We assume that ARG is an operation that
3120 returns a truth value (0 or 1).
3121
3122 FIXME: one would think we would fold the result, but it causes
3123 problems with the dominator optimizer. */
3124
3125 static tree
3126 fold_truth_not_expr (location_t loc, tree arg)
3127 {
3128 tree type = TREE_TYPE (arg);
3129 enum tree_code code = TREE_CODE (arg);
3130 location_t loc1, loc2;
3131
3132 /* If this is a comparison, we can simply invert it, except for
3133 floating-point non-equality comparisons, in which case we just
3134 enclose a TRUTH_NOT_EXPR around what we have. */
3135
3136 if (TREE_CODE_CLASS (code) == tcc_comparison)
3137 {
3138 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3139 if (FLOAT_TYPE_P (op_type)
3140 && flag_trapping_math
3141 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3142 && code != NE_EXPR && code != EQ_EXPR)
3143 return NULL_TREE;
3144
3145 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3146 if (code == ERROR_MARK)
3147 return NULL_TREE;
3148
3149 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3150 TREE_OPERAND (arg, 1));
3151 }
3152
3153 switch (code)
3154 {
3155 case INTEGER_CST:
3156 return constant_boolean_node (integer_zerop (arg), type);
3157
3158 case TRUTH_AND_EXPR:
3159 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3160 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3161 return build2_loc (loc, TRUTH_OR_EXPR, type,
3162 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3163 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3164
3165 case TRUTH_OR_EXPR:
3166 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3167 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3168 return build2_loc (loc, TRUTH_AND_EXPR, type,
3169 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3170 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3171
3172 case TRUTH_XOR_EXPR:
3173 /* Here we can invert either operand. We invert the first operand
3174 unless the second operand is a TRUTH_NOT_EXPR in which case our
3175 result is the XOR of the first operand with the inside of the
3176 negation of the second operand. */
3177
3178 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3179 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3180 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3181 else
3182 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3183 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3184 TREE_OPERAND (arg, 1));
3185
3186 case TRUTH_ANDIF_EXPR:
3187 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3188 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3189 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3190 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3191 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3192
3193 case TRUTH_ORIF_EXPR:
3194 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3195 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3196 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3197 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3198 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3199
3200 case TRUTH_NOT_EXPR:
3201 return TREE_OPERAND (arg, 0);
3202
3203 case COND_EXPR:
3204 {
3205 tree arg1 = TREE_OPERAND (arg, 1);
3206 tree arg2 = TREE_OPERAND (arg, 2);
3207
3208 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3209 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3210
3211 /* A COND_EXPR may have a throw as one operand, which
3212 then has void type. Just leave void operands
3213 as they are. */
3214 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3215 VOID_TYPE_P (TREE_TYPE (arg1))
3216 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3217 VOID_TYPE_P (TREE_TYPE (arg2))
3218 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3219 }
3220
3221 case COMPOUND_EXPR:
3222 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3223 return build2_loc (loc, COMPOUND_EXPR, type,
3224 TREE_OPERAND (arg, 0),
3225 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3226
3227 case NON_LVALUE_EXPR:
3228 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3229 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3230
3231 CASE_CONVERT:
3232 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3233 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3234
3235 /* ... fall through ... */
3236
3237 case FLOAT_EXPR:
3238 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3239 return build1_loc (loc, TREE_CODE (arg), type,
3240 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3241
3242 case BIT_AND_EXPR:
3243 if (!integer_onep (TREE_OPERAND (arg, 1)))
3244 return NULL_TREE;
3245 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3246
3247 case SAVE_EXPR:
3248 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3249
3250 case CLEANUP_POINT_EXPR:
3251 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3252 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3253 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3254
3255 default:
3256 return NULL_TREE;
3257 }
3258 }
3259
3260 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3261 assume that ARG is an operation that returns a truth value (0 or 1
3262 for scalars, 0 or -1 for vectors). Return the folded expression if
3263 folding is successful. Otherwise, return NULL_TREE. */
3264
3265 static tree
3266 fold_invert_truthvalue (location_t loc, tree arg)
3267 {
3268 tree type = TREE_TYPE (arg);
3269 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3270 ? BIT_NOT_EXPR
3271 : TRUTH_NOT_EXPR,
3272 type, arg);
3273 }
3274
3275 /* Return a simplified tree node for the truth-negation of ARG. This
3276 never alters ARG itself. We assume that ARG is an operation that
3277 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3278
3279 tree
3280 invert_truthvalue_loc (location_t loc, tree arg)
3281 {
3282 if (TREE_CODE (arg) == ERROR_MARK)
3283 return arg;
3284
3285 tree type = TREE_TYPE (arg);
3286 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3287 ? BIT_NOT_EXPR
3288 : TRUTH_NOT_EXPR,
3289 type, arg);
3290 }
3291
3292 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3293 operands are another bit-wise operation with a common input. If so,
3294 distribute the bit operations to save an operation and possibly two if
3295 constants are involved. For example, convert
3296 (A | B) & (A | C) into A | (B & C)
3297 Further simplification will occur if B and C are constants.
3298
3299 If this optimization cannot be done, 0 will be returned. */
3300
3301 static tree
3302 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3303 tree arg0, tree arg1)
3304 {
3305 tree common;
3306 tree left, right;
3307
3308 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3309 || TREE_CODE (arg0) == code
3310 || (TREE_CODE (arg0) != BIT_AND_EXPR
3311 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3312 return 0;
3313
3314 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3315 {
3316 common = TREE_OPERAND (arg0, 0);
3317 left = TREE_OPERAND (arg0, 1);
3318 right = TREE_OPERAND (arg1, 1);
3319 }
3320 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3321 {
3322 common = TREE_OPERAND (arg0, 0);
3323 left = TREE_OPERAND (arg0, 1);
3324 right = TREE_OPERAND (arg1, 0);
3325 }
3326 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3327 {
3328 common = TREE_OPERAND (arg0, 1);
3329 left = TREE_OPERAND (arg0, 0);
3330 right = TREE_OPERAND (arg1, 1);
3331 }
3332 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3333 {
3334 common = TREE_OPERAND (arg0, 1);
3335 left = TREE_OPERAND (arg0, 0);
3336 right = TREE_OPERAND (arg1, 0);
3337 }
3338 else
3339 return 0;
3340
3341 common = fold_convert_loc (loc, type, common);
3342 left = fold_convert_loc (loc, type, left);
3343 right = fold_convert_loc (loc, type, right);
3344 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3345 fold_build2_loc (loc, code, type, left, right));
3346 }
3347
3348 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3349 with code CODE. This optimization is unsafe. */
3350 static tree
3351 distribute_real_division (location_t loc, enum tree_code code, tree type,
3352 tree arg0, tree arg1)
3353 {
3354 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3355 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3356
3357 /* (A / C) +- (B / C) -> (A +- B) / C. */
3358 if (mul0 == mul1
3359 && operand_equal_p (TREE_OPERAND (arg0, 1),
3360 TREE_OPERAND (arg1, 1), 0))
3361 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3362 fold_build2_loc (loc, code, type,
3363 TREE_OPERAND (arg0, 0),
3364 TREE_OPERAND (arg1, 0)),
3365 TREE_OPERAND (arg0, 1));
3366
3367 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3368 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3369 TREE_OPERAND (arg1, 0), 0)
3370 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3371 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3372 {
3373 REAL_VALUE_TYPE r0, r1;
3374 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3375 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3376 if (!mul0)
3377 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3378 if (!mul1)
3379 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3380 real_arithmetic (&r0, code, &r0, &r1);
3381 return fold_build2_loc (loc, MULT_EXPR, type,
3382 TREE_OPERAND (arg0, 0),
3383 build_real (type, r0));
3384 }
3385
3386 return NULL_TREE;
3387 }
3388 \f
3389 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3390 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3391
3392 static tree
3393 make_bit_field_ref (location_t loc, tree inner, tree type,
3394 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3395 {
3396 tree result, bftype;
3397
3398 if (bitpos == 0)
3399 {
3400 tree size = TYPE_SIZE (TREE_TYPE (inner));
3401 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3402 || POINTER_TYPE_P (TREE_TYPE (inner)))
3403 && tree_fits_shwi_p (size)
3404 && tree_to_shwi (size) == bitsize)
3405 return fold_convert_loc (loc, type, inner);
3406 }
3407
3408 bftype = type;
3409 if (TYPE_PRECISION (bftype) != bitsize
3410 || TYPE_UNSIGNED (bftype) == !unsignedp)
3411 bftype = build_nonstandard_integer_type (bitsize, 0);
3412
3413 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3414 size_int (bitsize), bitsize_int (bitpos));
3415
3416 if (bftype != type)
3417 result = fold_convert_loc (loc, type, result);
3418
3419 return result;
3420 }
3421
3422 /* Optimize a bit-field compare.
3423
3424 There are two cases: First is a compare against a constant and the
3425 second is a comparison of two items where the fields are at the same
3426 bit position relative to the start of a chunk (byte, halfword, word)
3427 large enough to contain it. In these cases we can avoid the shift
3428 implicit in bitfield extractions.
3429
3430 For constants, we emit a compare of the shifted constant with the
3431 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3432 compared. For two fields at the same position, we do the ANDs with the
3433 similar mask and compare the result of the ANDs.
3434
3435 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3436 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3437 are the left and right operands of the comparison, respectively.
3438
3439 If the optimization described above can be done, we return the resulting
3440 tree. Otherwise we return zero. */
3441
3442 static tree
3443 optimize_bit_field_compare (location_t loc, enum tree_code code,
3444 tree compare_type, tree lhs, tree rhs)
3445 {
3446 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3447 tree type = TREE_TYPE (lhs);
3448 tree unsigned_type;
3449 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3450 enum machine_mode lmode, rmode, nmode;
3451 int lunsignedp, runsignedp;
3452 int lvolatilep = 0, rvolatilep = 0;
3453 tree linner, rinner = NULL_TREE;
3454 tree mask;
3455 tree offset;
3456
3457 /* Get all the information about the extractions being done. If the bit size
3458 if the same as the size of the underlying object, we aren't doing an
3459 extraction at all and so can do nothing. We also don't want to
3460 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3461 then will no longer be able to replace it. */
3462 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3463 &lunsignedp, &lvolatilep, false);
3464 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3465 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3466 return 0;
3467
3468 if (!const_p)
3469 {
3470 /* If this is not a constant, we can only do something if bit positions,
3471 sizes, and signedness are the same. */
3472 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3473 &runsignedp, &rvolatilep, false);
3474
3475 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3476 || lunsignedp != runsignedp || offset != 0
3477 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3478 return 0;
3479 }
3480
3481 /* See if we can find a mode to refer to this field. We should be able to,
3482 but fail if we can't. */
3483 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3484 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3485 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3486 TYPE_ALIGN (TREE_TYPE (rinner))),
3487 word_mode, false);
3488 if (nmode == VOIDmode)
3489 return 0;
3490
3491 /* Set signed and unsigned types of the precision of this mode for the
3492 shifts below. */
3493 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3494
3495 /* Compute the bit position and size for the new reference and our offset
3496 within it. If the new reference is the same size as the original, we
3497 won't optimize anything, so return zero. */
3498 nbitsize = GET_MODE_BITSIZE (nmode);
3499 nbitpos = lbitpos & ~ (nbitsize - 1);
3500 lbitpos -= nbitpos;
3501 if (nbitsize == lbitsize)
3502 return 0;
3503
3504 if (BYTES_BIG_ENDIAN)
3505 lbitpos = nbitsize - lbitsize - lbitpos;
3506
3507 /* Make the mask to be used against the extracted field. */
3508 mask = build_int_cst_type (unsigned_type, -1);
3509 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3510 mask = const_binop (RSHIFT_EXPR, mask,
3511 size_int (nbitsize - lbitsize - lbitpos));
3512
3513 if (! const_p)
3514 /* If not comparing with constant, just rework the comparison
3515 and return. */
3516 return fold_build2_loc (loc, code, compare_type,
3517 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3518 make_bit_field_ref (loc, linner,
3519 unsigned_type,
3520 nbitsize, nbitpos,
3521 1),
3522 mask),
3523 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3524 make_bit_field_ref (loc, rinner,
3525 unsigned_type,
3526 nbitsize, nbitpos,
3527 1),
3528 mask));
3529
3530 /* Otherwise, we are handling the constant case. See if the constant is too
3531 big for the field. Warn and return a tree of for 0 (false) if so. We do
3532 this not only for its own sake, but to avoid having to test for this
3533 error case below. If we didn't, we might generate wrong code.
3534
3535 For unsigned fields, the constant shifted right by the field length should
3536 be all zero. For signed fields, the high-order bits should agree with
3537 the sign bit. */
3538
3539 if (lunsignedp)
3540 {
3541 if (wi::lrshift (rhs, lbitsize) != 0)
3542 {
3543 warning (0, "comparison is always %d due to width of bit-field",
3544 code == NE_EXPR);
3545 return constant_boolean_node (code == NE_EXPR, compare_type);
3546 }
3547 }
3548 else
3549 {
3550 wide_int tem = wi::arshift (rhs, lbitsize - 1);
3551 if (tem != 0 && tem != -1)
3552 {
3553 warning (0, "comparison is always %d due to width of bit-field",
3554 code == NE_EXPR);
3555 return constant_boolean_node (code == NE_EXPR, compare_type);
3556 }
3557 }
3558
3559 /* Single-bit compares should always be against zero. */
3560 if (lbitsize == 1 && ! integer_zerop (rhs))
3561 {
3562 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3563 rhs = build_int_cst (type, 0);
3564 }
3565
3566 /* Make a new bitfield reference, shift the constant over the
3567 appropriate number of bits and mask it with the computed mask
3568 (in case this was a signed field). If we changed it, make a new one. */
3569 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3570
3571 rhs = const_binop (BIT_AND_EXPR,
3572 const_binop (LSHIFT_EXPR,
3573 fold_convert_loc (loc, unsigned_type, rhs),
3574 size_int (lbitpos)),
3575 mask);
3576
3577 lhs = build2_loc (loc, code, compare_type,
3578 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3579 return lhs;
3580 }
3581 \f
3582 /* Subroutine for fold_truth_andor_1: decode a field reference.
3583
3584 If EXP is a comparison reference, we return the innermost reference.
3585
3586 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3587 set to the starting bit number.
3588
3589 If the innermost field can be completely contained in a mode-sized
3590 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3591
3592 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3593 otherwise it is not changed.
3594
3595 *PUNSIGNEDP is set to the signedness of the field.
3596
3597 *PMASK is set to the mask used. This is either contained in a
3598 BIT_AND_EXPR or derived from the width of the field.
3599
3600 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3601
3602 Return 0 if this is not a component reference or is one that we can't
3603 do anything with. */
3604
3605 static tree
3606 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3607 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3608 int *punsignedp, int *pvolatilep,
3609 tree *pmask, tree *pand_mask)
3610 {
3611 tree outer_type = 0;
3612 tree and_mask = 0;
3613 tree mask, inner, offset;
3614 tree unsigned_type;
3615 unsigned int precision;
3616
3617 /* All the optimizations using this function assume integer fields.
3618 There are problems with FP fields since the type_for_size call
3619 below can fail for, e.g., XFmode. */
3620 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3621 return 0;
3622
3623 /* We are interested in the bare arrangement of bits, so strip everything
3624 that doesn't affect the machine mode. However, record the type of the
3625 outermost expression if it may matter below. */
3626 if (CONVERT_EXPR_P (exp)
3627 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3628 outer_type = TREE_TYPE (exp);
3629 STRIP_NOPS (exp);
3630
3631 if (TREE_CODE (exp) == BIT_AND_EXPR)
3632 {
3633 and_mask = TREE_OPERAND (exp, 1);
3634 exp = TREE_OPERAND (exp, 0);
3635 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3636 if (TREE_CODE (and_mask) != INTEGER_CST)
3637 return 0;
3638 }
3639
3640 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3641 punsignedp, pvolatilep, false);
3642 if ((inner == exp && and_mask == 0)
3643 || *pbitsize < 0 || offset != 0
3644 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3645 return 0;
3646
3647 /* If the number of bits in the reference is the same as the bitsize of
3648 the outer type, then the outer type gives the signedness. Otherwise
3649 (in case of a small bitfield) the signedness is unchanged. */
3650 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3651 *punsignedp = TYPE_UNSIGNED (outer_type);
3652
3653 /* Compute the mask to access the bitfield. */
3654 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3655 precision = TYPE_PRECISION (unsigned_type);
3656
3657 mask = build_int_cst_type (unsigned_type, -1);
3658
3659 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3660 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3661
3662 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3663 if (and_mask != 0)
3664 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3665 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3666
3667 *pmask = mask;
3668 *pand_mask = and_mask;
3669 return inner;
3670 }
3671
3672 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3673 bit positions and MASK is SIGNED. */
3674
3675 static int
3676 all_ones_mask_p (const_tree mask, unsigned int size)
3677 {
3678 tree type = TREE_TYPE (mask);
3679 unsigned int precision = TYPE_PRECISION (type);
3680
3681 /* If this function returns true when the type of the mask is
3682 UNSIGNED, then there will be errors. In particular see
3683 gcc.c-torture/execute/990326-1.c. There does not appear to be
3684 any documentation paper trail as to why this is so. But the pre
3685 wide-int worked with that restriction and it has been preserved
3686 here. */
3687 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
3688 return false;
3689
3690 return wi::mask (size, false, precision) == mask;
3691 }
3692
3693 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3694 represents the sign bit of EXP's type. If EXP represents a sign
3695 or zero extension, also test VAL against the unextended type.
3696 The return value is the (sub)expression whose sign bit is VAL,
3697 or NULL_TREE otherwise. */
3698
3699 static tree
3700 sign_bit_p (tree exp, const_tree val)
3701 {
3702 int width;
3703 tree t;
3704
3705 /* Tree EXP must have an integral type. */
3706 t = TREE_TYPE (exp);
3707 if (! INTEGRAL_TYPE_P (t))
3708 return NULL_TREE;
3709
3710 /* Tree VAL must be an integer constant. */
3711 if (TREE_CODE (val) != INTEGER_CST
3712 || TREE_OVERFLOW (val))
3713 return NULL_TREE;
3714
3715 width = TYPE_PRECISION (t);
3716 if (wi::only_sign_bit_p (val, width))
3717 return exp;
3718
3719 /* Handle extension from a narrower type. */
3720 if (TREE_CODE (exp) == NOP_EXPR
3721 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3722 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3723
3724 return NULL_TREE;
3725 }
3726
3727 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3728 to be evaluated unconditionally. */
3729
3730 static int
3731 simple_operand_p (const_tree exp)
3732 {
3733 /* Strip any conversions that don't change the machine mode. */
3734 STRIP_NOPS (exp);
3735
3736 return (CONSTANT_CLASS_P (exp)
3737 || TREE_CODE (exp) == SSA_NAME
3738 || (DECL_P (exp)
3739 && ! TREE_ADDRESSABLE (exp)
3740 && ! TREE_THIS_VOLATILE (exp)
3741 && ! DECL_NONLOCAL (exp)
3742 /* Don't regard global variables as simple. They may be
3743 allocated in ways unknown to the compiler (shared memory,
3744 #pragma weak, etc). */
3745 && ! TREE_PUBLIC (exp)
3746 && ! DECL_EXTERNAL (exp)
3747 /* Weakrefs are not safe to be read, since they can be NULL.
3748 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
3749 have DECL_WEAK flag set. */
3750 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
3751 /* Loading a static variable is unduly expensive, but global
3752 registers aren't expensive. */
3753 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3754 }
3755
3756 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3757 to be evaluated unconditionally.
3758 I addition to simple_operand_p, we assume that comparisons, conversions,
3759 and logic-not operations are simple, if their operands are simple, too. */
3760
3761 static bool
3762 simple_operand_p_2 (tree exp)
3763 {
3764 enum tree_code code;
3765
3766 if (TREE_SIDE_EFFECTS (exp)
3767 || tree_could_trap_p (exp))
3768 return false;
3769
3770 while (CONVERT_EXPR_P (exp))
3771 exp = TREE_OPERAND (exp, 0);
3772
3773 code = TREE_CODE (exp);
3774
3775 if (TREE_CODE_CLASS (code) == tcc_comparison)
3776 return (simple_operand_p (TREE_OPERAND (exp, 0))
3777 && simple_operand_p (TREE_OPERAND (exp, 1)));
3778
3779 if (code == TRUTH_NOT_EXPR)
3780 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
3781
3782 return simple_operand_p (exp);
3783 }
3784
3785 \f
3786 /* The following functions are subroutines to fold_range_test and allow it to
3787 try to change a logical combination of comparisons into a range test.
3788
3789 For example, both
3790 X == 2 || X == 3 || X == 4 || X == 5
3791 and
3792 X >= 2 && X <= 5
3793 are converted to
3794 (unsigned) (X - 2) <= 3
3795
3796 We describe each set of comparisons as being either inside or outside
3797 a range, using a variable named like IN_P, and then describe the
3798 range with a lower and upper bound. If one of the bounds is omitted,
3799 it represents either the highest or lowest value of the type.
3800
3801 In the comments below, we represent a range by two numbers in brackets
3802 preceded by a "+" to designate being inside that range, or a "-" to
3803 designate being outside that range, so the condition can be inverted by
3804 flipping the prefix. An omitted bound is represented by a "-". For
3805 example, "- [-, 10]" means being outside the range starting at the lowest
3806 possible value and ending at 10, in other words, being greater than 10.
3807 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3808 always false.
3809
3810 We set up things so that the missing bounds are handled in a consistent
3811 manner so neither a missing bound nor "true" and "false" need to be
3812 handled using a special case. */
3813
3814 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3815 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3816 and UPPER1_P are nonzero if the respective argument is an upper bound
3817 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3818 must be specified for a comparison. ARG1 will be converted to ARG0's
3819 type if both are specified. */
3820
3821 static tree
3822 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3823 tree arg1, int upper1_p)
3824 {
3825 tree tem;
3826 int result;
3827 int sgn0, sgn1;
3828
3829 /* If neither arg represents infinity, do the normal operation.
3830 Else, if not a comparison, return infinity. Else handle the special
3831 comparison rules. Note that most of the cases below won't occur, but
3832 are handled for consistency. */
3833
3834 if (arg0 != 0 && arg1 != 0)
3835 {
3836 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3837 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3838 STRIP_NOPS (tem);
3839 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3840 }
3841
3842 if (TREE_CODE_CLASS (code) != tcc_comparison)
3843 return 0;
3844
3845 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3846 for neither. In real maths, we cannot assume open ended ranges are
3847 the same. But, this is computer arithmetic, where numbers are finite.
3848 We can therefore make the transformation of any unbounded range with
3849 the value Z, Z being greater than any representable number. This permits
3850 us to treat unbounded ranges as equal. */
3851 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3852 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3853 switch (code)
3854 {
3855 case EQ_EXPR:
3856 result = sgn0 == sgn1;
3857 break;
3858 case NE_EXPR:
3859 result = sgn0 != sgn1;
3860 break;
3861 case LT_EXPR:
3862 result = sgn0 < sgn1;
3863 break;
3864 case LE_EXPR:
3865 result = sgn0 <= sgn1;
3866 break;
3867 case GT_EXPR:
3868 result = sgn0 > sgn1;
3869 break;
3870 case GE_EXPR:
3871 result = sgn0 >= sgn1;
3872 break;
3873 default:
3874 gcc_unreachable ();
3875 }
3876
3877 return constant_boolean_node (result, type);
3878 }
3879 \f
3880 /* Helper routine for make_range. Perform one step for it, return
3881 new expression if the loop should continue or NULL_TREE if it should
3882 stop. */
3883
3884 tree
3885 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
3886 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
3887 bool *strict_overflow_p)
3888 {
3889 tree arg0_type = TREE_TYPE (arg0);
3890 tree n_low, n_high, low = *p_low, high = *p_high;
3891 int in_p = *p_in_p, n_in_p;
3892
3893 switch (code)
3894 {
3895 case TRUTH_NOT_EXPR:
3896 /* We can only do something if the range is testing for zero. */
3897 if (low == NULL_TREE || high == NULL_TREE
3898 || ! integer_zerop (low) || ! integer_zerop (high))
3899 return NULL_TREE;
3900 *p_in_p = ! in_p;
3901 return arg0;
3902
3903 case EQ_EXPR: case NE_EXPR:
3904 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3905 /* We can only do something if the range is testing for zero
3906 and if the second operand is an integer constant. Note that
3907 saying something is "in" the range we make is done by
3908 complementing IN_P since it will set in the initial case of
3909 being not equal to zero; "out" is leaving it alone. */
3910 if (low == NULL_TREE || high == NULL_TREE
3911 || ! integer_zerop (low) || ! integer_zerop (high)
3912 || TREE_CODE (arg1) != INTEGER_CST)
3913 return NULL_TREE;
3914
3915 switch (code)
3916 {
3917 case NE_EXPR: /* - [c, c] */
3918 low = high = arg1;
3919 break;
3920 case EQ_EXPR: /* + [c, c] */
3921 in_p = ! in_p, low = high = arg1;
3922 break;
3923 case GT_EXPR: /* - [-, c] */
3924 low = 0, high = arg1;
3925 break;
3926 case GE_EXPR: /* + [c, -] */
3927 in_p = ! in_p, low = arg1, high = 0;
3928 break;
3929 case LT_EXPR: /* - [c, -] */
3930 low = arg1, high = 0;
3931 break;
3932 case LE_EXPR: /* + [-, c] */
3933 in_p = ! in_p, low = 0, high = arg1;
3934 break;
3935 default:
3936 gcc_unreachable ();
3937 }
3938
3939 /* If this is an unsigned comparison, we also know that EXP is
3940 greater than or equal to zero. We base the range tests we make
3941 on that fact, so we record it here so we can parse existing
3942 range tests. We test arg0_type since often the return type
3943 of, e.g. EQ_EXPR, is boolean. */
3944 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3945 {
3946 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3947 in_p, low, high, 1,
3948 build_int_cst (arg0_type, 0),
3949 NULL_TREE))
3950 return NULL_TREE;
3951
3952 in_p = n_in_p, low = n_low, high = n_high;
3953
3954 /* If the high bound is missing, but we have a nonzero low
3955 bound, reverse the range so it goes from zero to the low bound
3956 minus 1. */
3957 if (high == 0 && low && ! integer_zerop (low))
3958 {
3959 in_p = ! in_p;
3960 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3961 build_int_cst (TREE_TYPE (low), 1), 0);
3962 low = build_int_cst (arg0_type, 0);
3963 }
3964 }
3965
3966 *p_low = low;
3967 *p_high = high;
3968 *p_in_p = in_p;
3969 return arg0;
3970
3971 case NEGATE_EXPR:
3972 /* If flag_wrapv and ARG0_TYPE is signed, make sure
3973 low and high are non-NULL, then normalize will DTRT. */
3974 if (!TYPE_UNSIGNED (arg0_type)
3975 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3976 {
3977 if (low == NULL_TREE)
3978 low = TYPE_MIN_VALUE (arg0_type);
3979 if (high == NULL_TREE)
3980 high = TYPE_MAX_VALUE (arg0_type);
3981 }
3982
3983 /* (-x) IN [a,b] -> x in [-b, -a] */
3984 n_low = range_binop (MINUS_EXPR, exp_type,
3985 build_int_cst (exp_type, 0),
3986 0, high, 1);
3987 n_high = range_binop (MINUS_EXPR, exp_type,
3988 build_int_cst (exp_type, 0),
3989 0, low, 0);
3990 if (n_high != 0 && TREE_OVERFLOW (n_high))
3991 return NULL_TREE;
3992 goto normalize;
3993
3994 case BIT_NOT_EXPR:
3995 /* ~ X -> -X - 1 */
3996 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
3997 build_int_cst (exp_type, 1));
3998
3999 case PLUS_EXPR:
4000 case MINUS_EXPR:
4001 if (TREE_CODE (arg1) != INTEGER_CST)
4002 return NULL_TREE;
4003
4004 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4005 move a constant to the other side. */
4006 if (!TYPE_UNSIGNED (arg0_type)
4007 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4008 return NULL_TREE;
4009
4010 /* If EXP is signed, any overflow in the computation is undefined,
4011 so we don't worry about it so long as our computations on
4012 the bounds don't overflow. For unsigned, overflow is defined
4013 and this is exactly the right thing. */
4014 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4015 arg0_type, low, 0, arg1, 0);
4016 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4017 arg0_type, high, 1, arg1, 0);
4018 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4019 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4020 return NULL_TREE;
4021
4022 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4023 *strict_overflow_p = true;
4024
4025 normalize:
4026 /* Check for an unsigned range which has wrapped around the maximum
4027 value thus making n_high < n_low, and normalize it. */
4028 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4029 {
4030 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4031 build_int_cst (TREE_TYPE (n_high), 1), 0);
4032 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4033 build_int_cst (TREE_TYPE (n_low), 1), 0);
4034
4035 /* If the range is of the form +/- [ x+1, x ], we won't
4036 be able to normalize it. But then, it represents the
4037 whole range or the empty set, so make it
4038 +/- [ -, - ]. */
4039 if (tree_int_cst_equal (n_low, low)
4040 && tree_int_cst_equal (n_high, high))
4041 low = high = 0;
4042 else
4043 in_p = ! in_p;
4044 }
4045 else
4046 low = n_low, high = n_high;
4047
4048 *p_low = low;
4049 *p_high = high;
4050 *p_in_p = in_p;
4051 return arg0;
4052
4053 CASE_CONVERT:
4054 case NON_LVALUE_EXPR:
4055 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4056 return NULL_TREE;
4057
4058 if (! INTEGRAL_TYPE_P (arg0_type)
4059 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4060 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4061 return NULL_TREE;
4062
4063 n_low = low, n_high = high;
4064
4065 if (n_low != 0)
4066 n_low = fold_convert_loc (loc, arg0_type, n_low);
4067
4068 if (n_high != 0)
4069 n_high = fold_convert_loc (loc, arg0_type, n_high);
4070
4071 /* If we're converting arg0 from an unsigned type, to exp,
4072 a signed type, we will be doing the comparison as unsigned.
4073 The tests above have already verified that LOW and HIGH
4074 are both positive.
4075
4076 So we have to ensure that we will handle large unsigned
4077 values the same way that the current signed bounds treat
4078 negative values. */
4079
4080 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4081 {
4082 tree high_positive;
4083 tree equiv_type;
4084 /* For fixed-point modes, we need to pass the saturating flag
4085 as the 2nd parameter. */
4086 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4087 equiv_type
4088 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4089 TYPE_SATURATING (arg0_type));
4090 else
4091 equiv_type
4092 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4093
4094 /* A range without an upper bound is, naturally, unbounded.
4095 Since convert would have cropped a very large value, use
4096 the max value for the destination type. */
4097 high_positive
4098 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4099 : TYPE_MAX_VALUE (arg0_type);
4100
4101 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4102 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4103 fold_convert_loc (loc, arg0_type,
4104 high_positive),
4105 build_int_cst (arg0_type, 1));
4106
4107 /* If the low bound is specified, "and" the range with the
4108 range for which the original unsigned value will be
4109 positive. */
4110 if (low != 0)
4111 {
4112 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4113 1, fold_convert_loc (loc, arg0_type,
4114 integer_zero_node),
4115 high_positive))
4116 return NULL_TREE;
4117
4118 in_p = (n_in_p == in_p);
4119 }
4120 else
4121 {
4122 /* Otherwise, "or" the range with the range of the input
4123 that will be interpreted as negative. */
4124 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4125 1, fold_convert_loc (loc, arg0_type,
4126 integer_zero_node),
4127 high_positive))
4128 return NULL_TREE;
4129
4130 in_p = (in_p != n_in_p);
4131 }
4132 }
4133
4134 *p_low = n_low;
4135 *p_high = n_high;
4136 *p_in_p = in_p;
4137 return arg0;
4138
4139 default:
4140 return NULL_TREE;
4141 }
4142 }
4143
4144 /* Given EXP, a logical expression, set the range it is testing into
4145 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4146 actually being tested. *PLOW and *PHIGH will be made of the same
4147 type as the returned expression. If EXP is not a comparison, we
4148 will most likely not be returning a useful value and range. Set
4149 *STRICT_OVERFLOW_P to true if the return value is only valid
4150 because signed overflow is undefined; otherwise, do not change
4151 *STRICT_OVERFLOW_P. */
4152
4153 tree
4154 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4155 bool *strict_overflow_p)
4156 {
4157 enum tree_code code;
4158 tree arg0, arg1 = NULL_TREE;
4159 tree exp_type, nexp;
4160 int in_p;
4161 tree low, high;
4162 location_t loc = EXPR_LOCATION (exp);
4163
4164 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4165 and see if we can refine the range. Some of the cases below may not
4166 happen, but it doesn't seem worth worrying about this. We "continue"
4167 the outer loop when we've changed something; otherwise we "break"
4168 the switch, which will "break" the while. */
4169
4170 in_p = 0;
4171 low = high = build_int_cst (TREE_TYPE (exp), 0);
4172
4173 while (1)
4174 {
4175 code = TREE_CODE (exp);
4176 exp_type = TREE_TYPE (exp);
4177 arg0 = NULL_TREE;
4178
4179 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4180 {
4181 if (TREE_OPERAND_LENGTH (exp) > 0)
4182 arg0 = TREE_OPERAND (exp, 0);
4183 if (TREE_CODE_CLASS (code) == tcc_binary
4184 || TREE_CODE_CLASS (code) == tcc_comparison
4185 || (TREE_CODE_CLASS (code) == tcc_expression
4186 && TREE_OPERAND_LENGTH (exp) > 1))
4187 arg1 = TREE_OPERAND (exp, 1);
4188 }
4189 if (arg0 == NULL_TREE)
4190 break;
4191
4192 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4193 &high, &in_p, strict_overflow_p);
4194 if (nexp == NULL_TREE)
4195 break;
4196 exp = nexp;
4197 }
4198
4199 /* If EXP is a constant, we can evaluate whether this is true or false. */
4200 if (TREE_CODE (exp) == INTEGER_CST)
4201 {
4202 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4203 exp, 0, low, 0))
4204 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4205 exp, 1, high, 1)));
4206 low = high = 0;
4207 exp = 0;
4208 }
4209
4210 *pin_p = in_p, *plow = low, *phigh = high;
4211 return exp;
4212 }
4213 \f
4214 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4215 type, TYPE, return an expression to test if EXP is in (or out of, depending
4216 on IN_P) the range. Return 0 if the test couldn't be created. */
4217
4218 tree
4219 build_range_check (location_t loc, tree type, tree exp, int in_p,
4220 tree low, tree high)
4221 {
4222 tree etype = TREE_TYPE (exp), value;
4223
4224 #ifdef HAVE_canonicalize_funcptr_for_compare
4225 /* Disable this optimization for function pointer expressions
4226 on targets that require function pointer canonicalization. */
4227 if (HAVE_canonicalize_funcptr_for_compare
4228 && TREE_CODE (etype) == POINTER_TYPE
4229 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4230 return NULL_TREE;
4231 #endif
4232
4233 if (! in_p)
4234 {
4235 value = build_range_check (loc, type, exp, 1, low, high);
4236 if (value != 0)
4237 return invert_truthvalue_loc (loc, value);
4238
4239 return 0;
4240 }
4241
4242 if (low == 0 && high == 0)
4243 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4244
4245 if (low == 0)
4246 return fold_build2_loc (loc, LE_EXPR, type, exp,
4247 fold_convert_loc (loc, etype, high));
4248
4249 if (high == 0)
4250 return fold_build2_loc (loc, GE_EXPR, type, exp,
4251 fold_convert_loc (loc, etype, low));
4252
4253 if (operand_equal_p (low, high, 0))
4254 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4255 fold_convert_loc (loc, etype, low));
4256
4257 if (integer_zerop (low))
4258 {
4259 if (! TYPE_UNSIGNED (etype))
4260 {
4261 etype = unsigned_type_for (etype);
4262 high = fold_convert_loc (loc, etype, high);
4263 exp = fold_convert_loc (loc, etype, exp);
4264 }
4265 return build_range_check (loc, type, exp, 1, 0, high);
4266 }
4267
4268 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4269 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4270 {
4271 int prec = TYPE_PRECISION (etype);
4272
4273 if (wi::mask (prec - 1, false, prec) == high)
4274 {
4275 if (TYPE_UNSIGNED (etype))
4276 {
4277 tree signed_etype = signed_type_for (etype);
4278 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4279 etype
4280 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4281 else
4282 etype = signed_etype;
4283 exp = fold_convert_loc (loc, etype, exp);
4284 }
4285 return fold_build2_loc (loc, GT_EXPR, type, exp,
4286 build_int_cst (etype, 0));
4287 }
4288 }
4289
4290 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4291 This requires wrap-around arithmetics for the type of the expression.
4292 First make sure that arithmetics in this type is valid, then make sure
4293 that it wraps around. */
4294 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4295 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4296 TYPE_UNSIGNED (etype));
4297
4298 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4299 {
4300 tree utype, minv, maxv;
4301
4302 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4303 for the type in question, as we rely on this here. */
4304 utype = unsigned_type_for (etype);
4305 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4306 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4307 build_int_cst (TREE_TYPE (maxv), 1), 1);
4308 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4309
4310 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4311 minv, 1, maxv, 1)))
4312 etype = utype;
4313 else
4314 return 0;
4315 }
4316
4317 high = fold_convert_loc (loc, etype, high);
4318 low = fold_convert_loc (loc, etype, low);
4319 exp = fold_convert_loc (loc, etype, exp);
4320
4321 value = const_binop (MINUS_EXPR, high, low);
4322
4323
4324 if (POINTER_TYPE_P (etype))
4325 {
4326 if (value != 0 && !TREE_OVERFLOW (value))
4327 {
4328 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4329 return build_range_check (loc, type,
4330 fold_build_pointer_plus_loc (loc, exp, low),
4331 1, build_int_cst (etype, 0), value);
4332 }
4333 return 0;
4334 }
4335
4336 if (value != 0 && !TREE_OVERFLOW (value))
4337 return build_range_check (loc, type,
4338 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4339 1, build_int_cst (etype, 0), value);
4340
4341 return 0;
4342 }
4343 \f
4344 /* Return the predecessor of VAL in its type, handling the infinite case. */
4345
4346 static tree
4347 range_predecessor (tree val)
4348 {
4349 tree type = TREE_TYPE (val);
4350
4351 if (INTEGRAL_TYPE_P (type)
4352 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4353 return 0;
4354 else
4355 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4356 build_int_cst (TREE_TYPE (val), 1), 0);
4357 }
4358
4359 /* Return the successor of VAL in its type, handling the infinite case. */
4360
4361 static tree
4362 range_successor (tree val)
4363 {
4364 tree type = TREE_TYPE (val);
4365
4366 if (INTEGRAL_TYPE_P (type)
4367 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4368 return 0;
4369 else
4370 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4371 build_int_cst (TREE_TYPE (val), 1), 0);
4372 }
4373
4374 /* Given two ranges, see if we can merge them into one. Return 1 if we
4375 can, 0 if we can't. Set the output range into the specified parameters. */
4376
4377 bool
4378 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4379 tree high0, int in1_p, tree low1, tree high1)
4380 {
4381 int no_overlap;
4382 int subset;
4383 int temp;
4384 tree tem;
4385 int in_p;
4386 tree low, high;
4387 int lowequal = ((low0 == 0 && low1 == 0)
4388 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4389 low0, 0, low1, 0)));
4390 int highequal = ((high0 == 0 && high1 == 0)
4391 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4392 high0, 1, high1, 1)));
4393
4394 /* Make range 0 be the range that starts first, or ends last if they
4395 start at the same value. Swap them if it isn't. */
4396 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4397 low0, 0, low1, 0))
4398 || (lowequal
4399 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4400 high1, 1, high0, 1))))
4401 {
4402 temp = in0_p, in0_p = in1_p, in1_p = temp;
4403 tem = low0, low0 = low1, low1 = tem;
4404 tem = high0, high0 = high1, high1 = tem;
4405 }
4406
4407 /* Now flag two cases, whether the ranges are disjoint or whether the
4408 second range is totally subsumed in the first. Note that the tests
4409 below are simplified by the ones above. */
4410 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4411 high0, 1, low1, 0));
4412 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4413 high1, 1, high0, 1));
4414
4415 /* We now have four cases, depending on whether we are including or
4416 excluding the two ranges. */
4417 if (in0_p && in1_p)
4418 {
4419 /* If they don't overlap, the result is false. If the second range
4420 is a subset it is the result. Otherwise, the range is from the start
4421 of the second to the end of the first. */
4422 if (no_overlap)
4423 in_p = 0, low = high = 0;
4424 else if (subset)
4425 in_p = 1, low = low1, high = high1;
4426 else
4427 in_p = 1, low = low1, high = high0;
4428 }
4429
4430 else if (in0_p && ! in1_p)
4431 {
4432 /* If they don't overlap, the result is the first range. If they are
4433 equal, the result is false. If the second range is a subset of the
4434 first, and the ranges begin at the same place, we go from just after
4435 the end of the second range to the end of the first. If the second
4436 range is not a subset of the first, or if it is a subset and both
4437 ranges end at the same place, the range starts at the start of the
4438 first range and ends just before the second range.
4439 Otherwise, we can't describe this as a single range. */
4440 if (no_overlap)
4441 in_p = 1, low = low0, high = high0;
4442 else if (lowequal && highequal)
4443 in_p = 0, low = high = 0;
4444 else if (subset && lowequal)
4445 {
4446 low = range_successor (high1);
4447 high = high0;
4448 in_p = 1;
4449 if (low == 0)
4450 {
4451 /* We are in the weird situation where high0 > high1 but
4452 high1 has no successor. Punt. */
4453 return 0;
4454 }
4455 }
4456 else if (! subset || highequal)
4457 {
4458 low = low0;
4459 high = range_predecessor (low1);
4460 in_p = 1;
4461 if (high == 0)
4462 {
4463 /* low0 < low1 but low1 has no predecessor. Punt. */
4464 return 0;
4465 }
4466 }
4467 else
4468 return 0;
4469 }
4470
4471 else if (! in0_p && in1_p)
4472 {
4473 /* If they don't overlap, the result is the second range. If the second
4474 is a subset of the first, the result is false. Otherwise,
4475 the range starts just after the first range and ends at the
4476 end of the second. */
4477 if (no_overlap)
4478 in_p = 1, low = low1, high = high1;
4479 else if (subset || highequal)
4480 in_p = 0, low = high = 0;
4481 else
4482 {
4483 low = range_successor (high0);
4484 high = high1;
4485 in_p = 1;
4486 if (low == 0)
4487 {
4488 /* high1 > high0 but high0 has no successor. Punt. */
4489 return 0;
4490 }
4491 }
4492 }
4493
4494 else
4495 {
4496 /* The case where we are excluding both ranges. Here the complex case
4497 is if they don't overlap. In that case, the only time we have a
4498 range is if they are adjacent. If the second is a subset of the
4499 first, the result is the first. Otherwise, the range to exclude
4500 starts at the beginning of the first range and ends at the end of the
4501 second. */
4502 if (no_overlap)
4503 {
4504 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4505 range_successor (high0),
4506 1, low1, 0)))
4507 in_p = 0, low = low0, high = high1;
4508 else
4509 {
4510 /* Canonicalize - [min, x] into - [-, x]. */
4511 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4512 switch (TREE_CODE (TREE_TYPE (low0)))
4513 {
4514 case ENUMERAL_TYPE:
4515 if (TYPE_PRECISION (TREE_TYPE (low0))
4516 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4517 break;
4518 /* FALLTHROUGH */
4519 case INTEGER_TYPE:
4520 if (tree_int_cst_equal (low0,
4521 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4522 low0 = 0;
4523 break;
4524 case POINTER_TYPE:
4525 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4526 && integer_zerop (low0))
4527 low0 = 0;
4528 break;
4529 default:
4530 break;
4531 }
4532
4533 /* Canonicalize - [x, max] into - [x, -]. */
4534 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4535 switch (TREE_CODE (TREE_TYPE (high1)))
4536 {
4537 case ENUMERAL_TYPE:
4538 if (TYPE_PRECISION (TREE_TYPE (high1))
4539 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4540 break;
4541 /* FALLTHROUGH */
4542 case INTEGER_TYPE:
4543 if (tree_int_cst_equal (high1,
4544 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4545 high1 = 0;
4546 break;
4547 case POINTER_TYPE:
4548 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4549 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4550 high1, 1,
4551 build_int_cst (TREE_TYPE (high1), 1),
4552 1)))
4553 high1 = 0;
4554 break;
4555 default:
4556 break;
4557 }
4558
4559 /* The ranges might be also adjacent between the maximum and
4560 minimum values of the given type. For
4561 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4562 return + [x + 1, y - 1]. */
4563 if (low0 == 0 && high1 == 0)
4564 {
4565 low = range_successor (high0);
4566 high = range_predecessor (low1);
4567 if (low == 0 || high == 0)
4568 return 0;
4569
4570 in_p = 1;
4571 }
4572 else
4573 return 0;
4574 }
4575 }
4576 else if (subset)
4577 in_p = 0, low = low0, high = high0;
4578 else
4579 in_p = 0, low = low0, high = high1;
4580 }
4581
4582 *pin_p = in_p, *plow = low, *phigh = high;
4583 return 1;
4584 }
4585 \f
4586
4587 /* Subroutine of fold, looking inside expressions of the form
4588 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4589 of the COND_EXPR. This function is being used also to optimize
4590 A op B ? C : A, by reversing the comparison first.
4591
4592 Return a folded expression whose code is not a COND_EXPR
4593 anymore, or NULL_TREE if no folding opportunity is found. */
4594
4595 static tree
4596 fold_cond_expr_with_comparison (location_t loc, tree type,
4597 tree arg0, tree arg1, tree arg2)
4598 {
4599 enum tree_code comp_code = TREE_CODE (arg0);
4600 tree arg00 = TREE_OPERAND (arg0, 0);
4601 tree arg01 = TREE_OPERAND (arg0, 1);
4602 tree arg1_type = TREE_TYPE (arg1);
4603 tree tem;
4604
4605 STRIP_NOPS (arg1);
4606 STRIP_NOPS (arg2);
4607
4608 /* If we have A op 0 ? A : -A, consider applying the following
4609 transformations:
4610
4611 A == 0? A : -A same as -A
4612 A != 0? A : -A same as A
4613 A >= 0? A : -A same as abs (A)
4614 A > 0? A : -A same as abs (A)
4615 A <= 0? A : -A same as -abs (A)
4616 A < 0? A : -A same as -abs (A)
4617
4618 None of these transformations work for modes with signed
4619 zeros. If A is +/-0, the first two transformations will
4620 change the sign of the result (from +0 to -0, or vice
4621 versa). The last four will fix the sign of the result,
4622 even though the original expressions could be positive or
4623 negative, depending on the sign of A.
4624
4625 Note that all these transformations are correct if A is
4626 NaN, since the two alternatives (A and -A) are also NaNs. */
4627 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4628 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4629 ? real_zerop (arg01)
4630 : integer_zerop (arg01))
4631 && ((TREE_CODE (arg2) == NEGATE_EXPR
4632 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4633 /* In the case that A is of the form X-Y, '-A' (arg2) may
4634 have already been folded to Y-X, check for that. */
4635 || (TREE_CODE (arg1) == MINUS_EXPR
4636 && TREE_CODE (arg2) == MINUS_EXPR
4637 && operand_equal_p (TREE_OPERAND (arg1, 0),
4638 TREE_OPERAND (arg2, 1), 0)
4639 && operand_equal_p (TREE_OPERAND (arg1, 1),
4640 TREE_OPERAND (arg2, 0), 0))))
4641 switch (comp_code)
4642 {
4643 case EQ_EXPR:
4644 case UNEQ_EXPR:
4645 tem = fold_convert_loc (loc, arg1_type, arg1);
4646 return pedantic_non_lvalue_loc (loc,
4647 fold_convert_loc (loc, type,
4648 negate_expr (tem)));
4649 case NE_EXPR:
4650 case LTGT_EXPR:
4651 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4652 case UNGE_EXPR:
4653 case UNGT_EXPR:
4654 if (flag_trapping_math)
4655 break;
4656 /* Fall through. */
4657 case GE_EXPR:
4658 case GT_EXPR:
4659 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4660 arg1 = fold_convert_loc (loc, signed_type_for
4661 (TREE_TYPE (arg1)), arg1);
4662 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4663 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4664 case UNLE_EXPR:
4665 case UNLT_EXPR:
4666 if (flag_trapping_math)
4667 break;
4668 case LE_EXPR:
4669 case LT_EXPR:
4670 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4671 arg1 = fold_convert_loc (loc, signed_type_for
4672 (TREE_TYPE (arg1)), arg1);
4673 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4674 return negate_expr (fold_convert_loc (loc, type, tem));
4675 default:
4676 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4677 break;
4678 }
4679
4680 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4681 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4682 both transformations are correct when A is NaN: A != 0
4683 is then true, and A == 0 is false. */
4684
4685 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4686 && integer_zerop (arg01) && integer_zerop (arg2))
4687 {
4688 if (comp_code == NE_EXPR)
4689 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4690 else if (comp_code == EQ_EXPR)
4691 return build_zero_cst (type);
4692 }
4693
4694 /* Try some transformations of A op B ? A : B.
4695
4696 A == B? A : B same as B
4697 A != B? A : B same as A
4698 A >= B? A : B same as max (A, B)
4699 A > B? A : B same as max (B, A)
4700 A <= B? A : B same as min (A, B)
4701 A < B? A : B same as min (B, A)
4702
4703 As above, these transformations don't work in the presence
4704 of signed zeros. For example, if A and B are zeros of
4705 opposite sign, the first two transformations will change
4706 the sign of the result. In the last four, the original
4707 expressions give different results for (A=+0, B=-0) and
4708 (A=-0, B=+0), but the transformed expressions do not.
4709
4710 The first two transformations are correct if either A or B
4711 is a NaN. In the first transformation, the condition will
4712 be false, and B will indeed be chosen. In the case of the
4713 second transformation, the condition A != B will be true,
4714 and A will be chosen.
4715
4716 The conversions to max() and min() are not correct if B is
4717 a number and A is not. The conditions in the original
4718 expressions will be false, so all four give B. The min()
4719 and max() versions would give a NaN instead. */
4720 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4721 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4722 /* Avoid these transformations if the COND_EXPR may be used
4723 as an lvalue in the C++ front-end. PR c++/19199. */
4724 && (in_gimple_form
4725 || VECTOR_TYPE_P (type)
4726 || (strcmp (lang_hooks.name, "GNU C++") != 0
4727 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4728 || ! maybe_lvalue_p (arg1)
4729 || ! maybe_lvalue_p (arg2)))
4730 {
4731 tree comp_op0 = arg00;
4732 tree comp_op1 = arg01;
4733 tree comp_type = TREE_TYPE (comp_op0);
4734
4735 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4736 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4737 {
4738 comp_type = type;
4739 comp_op0 = arg1;
4740 comp_op1 = arg2;
4741 }
4742
4743 switch (comp_code)
4744 {
4745 case EQ_EXPR:
4746 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4747 case NE_EXPR:
4748 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4749 case LE_EXPR:
4750 case LT_EXPR:
4751 case UNLE_EXPR:
4752 case UNLT_EXPR:
4753 /* In C++ a ?: expression can be an lvalue, so put the
4754 operand which will be used if they are equal first
4755 so that we can convert this back to the
4756 corresponding COND_EXPR. */
4757 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4758 {
4759 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4760 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4761 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4762 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4763 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4764 comp_op1, comp_op0);
4765 return pedantic_non_lvalue_loc (loc,
4766 fold_convert_loc (loc, type, tem));
4767 }
4768 break;
4769 case GE_EXPR:
4770 case GT_EXPR:
4771 case UNGE_EXPR:
4772 case UNGT_EXPR:
4773 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4774 {
4775 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4776 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4777 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4778 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4779 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4780 comp_op1, comp_op0);
4781 return pedantic_non_lvalue_loc (loc,
4782 fold_convert_loc (loc, type, tem));
4783 }
4784 break;
4785 case UNEQ_EXPR:
4786 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4787 return pedantic_non_lvalue_loc (loc,
4788 fold_convert_loc (loc, type, arg2));
4789 break;
4790 case LTGT_EXPR:
4791 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4792 return pedantic_non_lvalue_loc (loc,
4793 fold_convert_loc (loc, type, arg1));
4794 break;
4795 default:
4796 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4797 break;
4798 }
4799 }
4800
4801 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4802 we might still be able to simplify this. For example,
4803 if C1 is one less or one more than C2, this might have started
4804 out as a MIN or MAX and been transformed by this function.
4805 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4806
4807 if (INTEGRAL_TYPE_P (type)
4808 && TREE_CODE (arg01) == INTEGER_CST
4809 && TREE_CODE (arg2) == INTEGER_CST)
4810 switch (comp_code)
4811 {
4812 case EQ_EXPR:
4813 if (TREE_CODE (arg1) == INTEGER_CST)
4814 break;
4815 /* We can replace A with C1 in this case. */
4816 arg1 = fold_convert_loc (loc, type, arg01);
4817 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4818
4819 case LT_EXPR:
4820 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4821 MIN_EXPR, to preserve the signedness of the comparison. */
4822 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4823 OEP_ONLY_CONST)
4824 && operand_equal_p (arg01,
4825 const_binop (PLUS_EXPR, arg2,
4826 build_int_cst (type, 1)),
4827 OEP_ONLY_CONST))
4828 {
4829 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4830 fold_convert_loc (loc, TREE_TYPE (arg00),
4831 arg2));
4832 return pedantic_non_lvalue_loc (loc,
4833 fold_convert_loc (loc, type, tem));
4834 }
4835 break;
4836
4837 case LE_EXPR:
4838 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4839 as above. */
4840 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4841 OEP_ONLY_CONST)
4842 && operand_equal_p (arg01,
4843 const_binop (MINUS_EXPR, arg2,
4844 build_int_cst (type, 1)),
4845 OEP_ONLY_CONST))
4846 {
4847 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4848 fold_convert_loc (loc, TREE_TYPE (arg00),
4849 arg2));
4850 return pedantic_non_lvalue_loc (loc,
4851 fold_convert_loc (loc, type, tem));
4852 }
4853 break;
4854
4855 case GT_EXPR:
4856 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4857 MAX_EXPR, to preserve the signedness of the comparison. */
4858 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4859 OEP_ONLY_CONST)
4860 && operand_equal_p (arg01,
4861 const_binop (MINUS_EXPR, arg2,
4862 build_int_cst (type, 1)),
4863 OEP_ONLY_CONST))
4864 {
4865 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4866 fold_convert_loc (loc, TREE_TYPE (arg00),
4867 arg2));
4868 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4869 }
4870 break;
4871
4872 case GE_EXPR:
4873 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4874 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4875 OEP_ONLY_CONST)
4876 && operand_equal_p (arg01,
4877 const_binop (PLUS_EXPR, arg2,
4878 build_int_cst (type, 1)),
4879 OEP_ONLY_CONST))
4880 {
4881 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4882 fold_convert_loc (loc, TREE_TYPE (arg00),
4883 arg2));
4884 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4885 }
4886 break;
4887 case NE_EXPR:
4888 break;
4889 default:
4890 gcc_unreachable ();
4891 }
4892
4893 return NULL_TREE;
4894 }
4895
4896
4897 \f
4898 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4899 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4900 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4901 false) >= 2)
4902 #endif
4903
4904 /* EXP is some logical combination of boolean tests. See if we can
4905 merge it into some range test. Return the new tree if so. */
4906
4907 static tree
4908 fold_range_test (location_t loc, enum tree_code code, tree type,
4909 tree op0, tree op1)
4910 {
4911 int or_op = (code == TRUTH_ORIF_EXPR
4912 || code == TRUTH_OR_EXPR);
4913 int in0_p, in1_p, in_p;
4914 tree low0, low1, low, high0, high1, high;
4915 bool strict_overflow_p = false;
4916 tree tem, lhs, rhs;
4917 const char * const warnmsg = G_("assuming signed overflow does not occur "
4918 "when simplifying range test");
4919
4920 if (!INTEGRAL_TYPE_P (type))
4921 return 0;
4922
4923 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4924 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4925
4926 /* If this is an OR operation, invert both sides; we will invert
4927 again at the end. */
4928 if (or_op)
4929 in0_p = ! in0_p, in1_p = ! in1_p;
4930
4931 /* If both expressions are the same, if we can merge the ranges, and we
4932 can build the range test, return it or it inverted. If one of the
4933 ranges is always true or always false, consider it to be the same
4934 expression as the other. */
4935 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4936 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4937 in1_p, low1, high1)
4938 && 0 != (tem = (build_range_check (loc, type,
4939 lhs != 0 ? lhs
4940 : rhs != 0 ? rhs : integer_zero_node,
4941 in_p, low, high))))
4942 {
4943 if (strict_overflow_p)
4944 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4945 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4946 }
4947
4948 /* On machines where the branch cost is expensive, if this is a
4949 short-circuited branch and the underlying object on both sides
4950 is the same, make a non-short-circuit operation. */
4951 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4952 && lhs != 0 && rhs != 0
4953 && (code == TRUTH_ANDIF_EXPR
4954 || code == TRUTH_ORIF_EXPR)
4955 && operand_equal_p (lhs, rhs, 0))
4956 {
4957 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4958 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4959 which cases we can't do this. */
4960 if (simple_operand_p (lhs))
4961 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4962 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4963 type, op0, op1);
4964
4965 else if (!lang_hooks.decls.global_bindings_p ()
4966 && !CONTAINS_PLACEHOLDER_P (lhs))
4967 {
4968 tree common = save_expr (lhs);
4969
4970 if (0 != (lhs = build_range_check (loc, type, common,
4971 or_op ? ! in0_p : in0_p,
4972 low0, high0))
4973 && (0 != (rhs = build_range_check (loc, type, common,
4974 or_op ? ! in1_p : in1_p,
4975 low1, high1))))
4976 {
4977 if (strict_overflow_p)
4978 fold_overflow_warning (warnmsg,
4979 WARN_STRICT_OVERFLOW_COMPARISON);
4980 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4981 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4982 type, lhs, rhs);
4983 }
4984 }
4985 }
4986
4987 return 0;
4988 }
4989 \f
4990 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
4991 bit value. Arrange things so the extra bits will be set to zero if and
4992 only if C is signed-extended to its full width. If MASK is nonzero,
4993 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4994
4995 static tree
4996 unextend (tree c, int p, int unsignedp, tree mask)
4997 {
4998 tree type = TREE_TYPE (c);
4999 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5000 tree temp;
5001
5002 if (p == modesize || unsignedp)
5003 return c;
5004
5005 /* We work by getting just the sign bit into the low-order bit, then
5006 into the high-order bit, then sign-extend. We then XOR that value
5007 with C. */
5008 temp = build_int_cst (TREE_TYPE (c), wi::extract_uhwi (c, p - 1, 1));
5009
5010 /* We must use a signed type in order to get an arithmetic right shift.
5011 However, we must also avoid introducing accidental overflows, so that
5012 a subsequent call to integer_zerop will work. Hence we must
5013 do the type conversion here. At this point, the constant is either
5014 zero or one, and the conversion to a signed type can never overflow.
5015 We could get an overflow if this conversion is done anywhere else. */
5016 if (TYPE_UNSIGNED (type))
5017 temp = fold_convert (signed_type_for (type), temp);
5018
5019 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5020 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5021 if (mask != 0)
5022 temp = const_binop (BIT_AND_EXPR, temp,
5023 fold_convert (TREE_TYPE (c), mask));
5024 /* If necessary, convert the type back to match the type of C. */
5025 if (TYPE_UNSIGNED (type))
5026 temp = fold_convert (type, temp);
5027
5028 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5029 }
5030 \f
5031 /* For an expression that has the form
5032 (A && B) || ~B
5033 or
5034 (A || B) && ~B,
5035 we can drop one of the inner expressions and simplify to
5036 A || ~B
5037 or
5038 A && ~B
5039 LOC is the location of the resulting expression. OP is the inner
5040 logical operation; the left-hand side in the examples above, while CMPOP
5041 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5042 removing a condition that guards another, as in
5043 (A != NULL && A->...) || A == NULL
5044 which we must not transform. If RHS_ONLY is true, only eliminate the
5045 right-most operand of the inner logical operation. */
5046
5047 static tree
5048 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5049 bool rhs_only)
5050 {
5051 tree type = TREE_TYPE (cmpop);
5052 enum tree_code code = TREE_CODE (cmpop);
5053 enum tree_code truthop_code = TREE_CODE (op);
5054 tree lhs = TREE_OPERAND (op, 0);
5055 tree rhs = TREE_OPERAND (op, 1);
5056 tree orig_lhs = lhs, orig_rhs = rhs;
5057 enum tree_code rhs_code = TREE_CODE (rhs);
5058 enum tree_code lhs_code = TREE_CODE (lhs);
5059 enum tree_code inv_code;
5060
5061 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5062 return NULL_TREE;
5063
5064 if (TREE_CODE_CLASS (code) != tcc_comparison)
5065 return NULL_TREE;
5066
5067 if (rhs_code == truthop_code)
5068 {
5069 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5070 if (newrhs != NULL_TREE)
5071 {
5072 rhs = newrhs;
5073 rhs_code = TREE_CODE (rhs);
5074 }
5075 }
5076 if (lhs_code == truthop_code && !rhs_only)
5077 {
5078 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5079 if (newlhs != NULL_TREE)
5080 {
5081 lhs = newlhs;
5082 lhs_code = TREE_CODE (lhs);
5083 }
5084 }
5085
5086 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5087 if (inv_code == rhs_code
5088 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5089 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5090 return lhs;
5091 if (!rhs_only && inv_code == lhs_code
5092 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5093 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5094 return rhs;
5095 if (rhs != orig_rhs || lhs != orig_lhs)
5096 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5097 lhs, rhs);
5098 return NULL_TREE;
5099 }
5100
5101 /* Find ways of folding logical expressions of LHS and RHS:
5102 Try to merge two comparisons to the same innermost item.
5103 Look for range tests like "ch >= '0' && ch <= '9'".
5104 Look for combinations of simple terms on machines with expensive branches
5105 and evaluate the RHS unconditionally.
5106
5107 For example, if we have p->a == 2 && p->b == 4 and we can make an
5108 object large enough to span both A and B, we can do this with a comparison
5109 against the object ANDed with the a mask.
5110
5111 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5112 operations to do this with one comparison.
5113
5114 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5115 function and the one above.
5116
5117 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5118 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5119
5120 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5121 two operands.
5122
5123 We return the simplified tree or 0 if no optimization is possible. */
5124
5125 static tree
5126 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5127 tree lhs, tree rhs)
5128 {
5129 /* If this is the "or" of two comparisons, we can do something if
5130 the comparisons are NE_EXPR. If this is the "and", we can do something
5131 if the comparisons are EQ_EXPR. I.e.,
5132 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5133
5134 WANTED_CODE is this operation code. For single bit fields, we can
5135 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5136 comparison for one-bit fields. */
5137
5138 enum tree_code wanted_code;
5139 enum tree_code lcode, rcode;
5140 tree ll_arg, lr_arg, rl_arg, rr_arg;
5141 tree ll_inner, lr_inner, rl_inner, rr_inner;
5142 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5143 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5144 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5145 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5146 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5147 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5148 enum machine_mode lnmode, rnmode;
5149 tree ll_mask, lr_mask, rl_mask, rr_mask;
5150 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5151 tree l_const, r_const;
5152 tree lntype, rntype, result;
5153 HOST_WIDE_INT first_bit, end_bit;
5154 int volatilep;
5155
5156 /* Start by getting the comparison codes. Fail if anything is volatile.
5157 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5158 it were surrounded with a NE_EXPR. */
5159
5160 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5161 return 0;
5162
5163 lcode = TREE_CODE (lhs);
5164 rcode = TREE_CODE (rhs);
5165
5166 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5167 {
5168 lhs = build2 (NE_EXPR, truth_type, lhs,
5169 build_int_cst (TREE_TYPE (lhs), 0));
5170 lcode = NE_EXPR;
5171 }
5172
5173 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5174 {
5175 rhs = build2 (NE_EXPR, truth_type, rhs,
5176 build_int_cst (TREE_TYPE (rhs), 0));
5177 rcode = NE_EXPR;
5178 }
5179
5180 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5181 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5182 return 0;
5183
5184 ll_arg = TREE_OPERAND (lhs, 0);
5185 lr_arg = TREE_OPERAND (lhs, 1);
5186 rl_arg = TREE_OPERAND (rhs, 0);
5187 rr_arg = TREE_OPERAND (rhs, 1);
5188
5189 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5190 if (simple_operand_p (ll_arg)
5191 && simple_operand_p (lr_arg))
5192 {
5193 if (operand_equal_p (ll_arg, rl_arg, 0)
5194 && operand_equal_p (lr_arg, rr_arg, 0))
5195 {
5196 result = combine_comparisons (loc, code, lcode, rcode,
5197 truth_type, ll_arg, lr_arg);
5198 if (result)
5199 return result;
5200 }
5201 else if (operand_equal_p (ll_arg, rr_arg, 0)
5202 && operand_equal_p (lr_arg, rl_arg, 0))
5203 {
5204 result = combine_comparisons (loc, code, lcode,
5205 swap_tree_comparison (rcode),
5206 truth_type, ll_arg, lr_arg);
5207 if (result)
5208 return result;
5209 }
5210 }
5211
5212 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5213 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5214
5215 /* If the RHS can be evaluated unconditionally and its operands are
5216 simple, it wins to evaluate the RHS unconditionally on machines
5217 with expensive branches. In this case, this isn't a comparison
5218 that can be merged. */
5219
5220 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5221 false) >= 2
5222 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5223 && simple_operand_p (rl_arg)
5224 && simple_operand_p (rr_arg))
5225 {
5226 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5227 if (code == TRUTH_OR_EXPR
5228 && lcode == NE_EXPR && integer_zerop (lr_arg)
5229 && rcode == NE_EXPR && integer_zerop (rr_arg)
5230 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5231 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5232 return build2_loc (loc, NE_EXPR, truth_type,
5233 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5234 ll_arg, rl_arg),
5235 build_int_cst (TREE_TYPE (ll_arg), 0));
5236
5237 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5238 if (code == TRUTH_AND_EXPR
5239 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5240 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5241 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5242 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5243 return build2_loc (loc, EQ_EXPR, truth_type,
5244 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5245 ll_arg, rl_arg),
5246 build_int_cst (TREE_TYPE (ll_arg), 0));
5247 }
5248
5249 /* See if the comparisons can be merged. Then get all the parameters for
5250 each side. */
5251
5252 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5253 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5254 return 0;
5255
5256 volatilep = 0;
5257 ll_inner = decode_field_reference (loc, ll_arg,
5258 &ll_bitsize, &ll_bitpos, &ll_mode,
5259 &ll_unsignedp, &volatilep, &ll_mask,
5260 &ll_and_mask);
5261 lr_inner = decode_field_reference (loc, lr_arg,
5262 &lr_bitsize, &lr_bitpos, &lr_mode,
5263 &lr_unsignedp, &volatilep, &lr_mask,
5264 &lr_and_mask);
5265 rl_inner = decode_field_reference (loc, rl_arg,
5266 &rl_bitsize, &rl_bitpos, &rl_mode,
5267 &rl_unsignedp, &volatilep, &rl_mask,
5268 &rl_and_mask);
5269 rr_inner = decode_field_reference (loc, rr_arg,
5270 &rr_bitsize, &rr_bitpos, &rr_mode,
5271 &rr_unsignedp, &volatilep, &rr_mask,
5272 &rr_and_mask);
5273
5274 /* It must be true that the inner operation on the lhs of each
5275 comparison must be the same if we are to be able to do anything.
5276 Then see if we have constants. If not, the same must be true for
5277 the rhs's. */
5278 if (volatilep || ll_inner == 0 || rl_inner == 0
5279 || ! operand_equal_p (ll_inner, rl_inner, 0))
5280 return 0;
5281
5282 if (TREE_CODE (lr_arg) == INTEGER_CST
5283 && TREE_CODE (rr_arg) == INTEGER_CST)
5284 l_const = lr_arg, r_const = rr_arg;
5285 else if (lr_inner == 0 || rr_inner == 0
5286 || ! operand_equal_p (lr_inner, rr_inner, 0))
5287 return 0;
5288 else
5289 l_const = r_const = 0;
5290
5291 /* If either comparison code is not correct for our logical operation,
5292 fail. However, we can convert a one-bit comparison against zero into
5293 the opposite comparison against that bit being set in the field. */
5294
5295 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5296 if (lcode != wanted_code)
5297 {
5298 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5299 {
5300 /* Make the left operand unsigned, since we are only interested
5301 in the value of one bit. Otherwise we are doing the wrong
5302 thing below. */
5303 ll_unsignedp = 1;
5304 l_const = ll_mask;
5305 }
5306 else
5307 return 0;
5308 }
5309
5310 /* This is analogous to the code for l_const above. */
5311 if (rcode != wanted_code)
5312 {
5313 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5314 {
5315 rl_unsignedp = 1;
5316 r_const = rl_mask;
5317 }
5318 else
5319 return 0;
5320 }
5321
5322 /* See if we can find a mode that contains both fields being compared on
5323 the left. If we can't, fail. Otherwise, update all constants and masks
5324 to be relative to a field of that size. */
5325 first_bit = MIN (ll_bitpos, rl_bitpos);
5326 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5327 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5328 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5329 volatilep);
5330 if (lnmode == VOIDmode)
5331 return 0;
5332
5333 lnbitsize = GET_MODE_BITSIZE (lnmode);
5334 lnbitpos = first_bit & ~ (lnbitsize - 1);
5335 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5336 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5337
5338 if (BYTES_BIG_ENDIAN)
5339 {
5340 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5341 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5342 }
5343
5344 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5345 size_int (xll_bitpos));
5346 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5347 size_int (xrl_bitpos));
5348
5349 if (l_const)
5350 {
5351 l_const = fold_convert_loc (loc, lntype, l_const);
5352 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5353 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5354 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5355 fold_build1_loc (loc, BIT_NOT_EXPR,
5356 lntype, ll_mask))))
5357 {
5358 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5359
5360 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5361 }
5362 }
5363 if (r_const)
5364 {
5365 r_const = fold_convert_loc (loc, lntype, r_const);
5366 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5367 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5368 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5369 fold_build1_loc (loc, BIT_NOT_EXPR,
5370 lntype, rl_mask))))
5371 {
5372 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5373
5374 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5375 }
5376 }
5377
5378 /* If the right sides are not constant, do the same for it. Also,
5379 disallow this optimization if a size or signedness mismatch occurs
5380 between the left and right sides. */
5381 if (l_const == 0)
5382 {
5383 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5384 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5385 /* Make sure the two fields on the right
5386 correspond to the left without being swapped. */
5387 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5388 return 0;
5389
5390 first_bit = MIN (lr_bitpos, rr_bitpos);
5391 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5392 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5393 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5394 volatilep);
5395 if (rnmode == VOIDmode)
5396 return 0;
5397
5398 rnbitsize = GET_MODE_BITSIZE (rnmode);
5399 rnbitpos = first_bit & ~ (rnbitsize - 1);
5400 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5401 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5402
5403 if (BYTES_BIG_ENDIAN)
5404 {
5405 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5406 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5407 }
5408
5409 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5410 rntype, lr_mask),
5411 size_int (xlr_bitpos));
5412 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5413 rntype, rr_mask),
5414 size_int (xrr_bitpos));
5415
5416 /* Make a mask that corresponds to both fields being compared.
5417 Do this for both items being compared. If the operands are the
5418 same size and the bits being compared are in the same position
5419 then we can do this by masking both and comparing the masked
5420 results. */
5421 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5422 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5423 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5424 {
5425 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5426 ll_unsignedp || rl_unsignedp);
5427 if (! all_ones_mask_p (ll_mask, lnbitsize))
5428 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5429
5430 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5431 lr_unsignedp || rr_unsignedp);
5432 if (! all_ones_mask_p (lr_mask, rnbitsize))
5433 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5434
5435 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5436 }
5437
5438 /* There is still another way we can do something: If both pairs of
5439 fields being compared are adjacent, we may be able to make a wider
5440 field containing them both.
5441
5442 Note that we still must mask the lhs/rhs expressions. Furthermore,
5443 the mask must be shifted to account for the shift done by
5444 make_bit_field_ref. */
5445 if ((ll_bitsize + ll_bitpos == rl_bitpos
5446 && lr_bitsize + lr_bitpos == rr_bitpos)
5447 || (ll_bitpos == rl_bitpos + rl_bitsize
5448 && lr_bitpos == rr_bitpos + rr_bitsize))
5449 {
5450 tree type;
5451
5452 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5453 ll_bitsize + rl_bitsize,
5454 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5455 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5456 lr_bitsize + rr_bitsize,
5457 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5458
5459 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5460 size_int (MIN (xll_bitpos, xrl_bitpos)));
5461 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5462 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5463
5464 /* Convert to the smaller type before masking out unwanted bits. */
5465 type = lntype;
5466 if (lntype != rntype)
5467 {
5468 if (lnbitsize > rnbitsize)
5469 {
5470 lhs = fold_convert_loc (loc, rntype, lhs);
5471 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5472 type = rntype;
5473 }
5474 else if (lnbitsize < rnbitsize)
5475 {
5476 rhs = fold_convert_loc (loc, lntype, rhs);
5477 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5478 type = lntype;
5479 }
5480 }
5481
5482 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5483 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5484
5485 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5486 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5487
5488 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5489 }
5490
5491 return 0;
5492 }
5493
5494 /* Handle the case of comparisons with constants. If there is something in
5495 common between the masks, those bits of the constants must be the same.
5496 If not, the condition is always false. Test for this to avoid generating
5497 incorrect code below. */
5498 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5499 if (! integer_zerop (result)
5500 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5501 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5502 {
5503 if (wanted_code == NE_EXPR)
5504 {
5505 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5506 return constant_boolean_node (true, truth_type);
5507 }
5508 else
5509 {
5510 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5511 return constant_boolean_node (false, truth_type);
5512 }
5513 }
5514
5515 /* Construct the expression we will return. First get the component
5516 reference we will make. Unless the mask is all ones the width of
5517 that field, perform the mask operation. Then compare with the
5518 merged constant. */
5519 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5520 ll_unsignedp || rl_unsignedp);
5521
5522 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5523 if (! all_ones_mask_p (ll_mask, lnbitsize))
5524 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5525
5526 return build2_loc (loc, wanted_code, truth_type, result,
5527 const_binop (BIT_IOR_EXPR, l_const, r_const));
5528 }
5529 \f
5530 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5531 constant. */
5532
5533 static tree
5534 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5535 tree op0, tree op1)
5536 {
5537 tree arg0 = op0;
5538 enum tree_code op_code;
5539 tree comp_const;
5540 tree minmax_const;
5541 int consts_equal, consts_lt;
5542 tree inner;
5543
5544 STRIP_SIGN_NOPS (arg0);
5545
5546 op_code = TREE_CODE (arg0);
5547 minmax_const = TREE_OPERAND (arg0, 1);
5548 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5549 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5550 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5551 inner = TREE_OPERAND (arg0, 0);
5552
5553 /* If something does not permit us to optimize, return the original tree. */
5554 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5555 || TREE_CODE (comp_const) != INTEGER_CST
5556 || TREE_OVERFLOW (comp_const)
5557 || TREE_CODE (minmax_const) != INTEGER_CST
5558 || TREE_OVERFLOW (minmax_const))
5559 return NULL_TREE;
5560
5561 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5562 and GT_EXPR, doing the rest with recursive calls using logical
5563 simplifications. */
5564 switch (code)
5565 {
5566 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5567 {
5568 tree tem
5569 = optimize_minmax_comparison (loc,
5570 invert_tree_comparison (code, false),
5571 type, op0, op1);
5572 if (tem)
5573 return invert_truthvalue_loc (loc, tem);
5574 return NULL_TREE;
5575 }
5576
5577 case GE_EXPR:
5578 return
5579 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5580 optimize_minmax_comparison
5581 (loc, EQ_EXPR, type, arg0, comp_const),
5582 optimize_minmax_comparison
5583 (loc, GT_EXPR, type, arg0, comp_const));
5584
5585 case EQ_EXPR:
5586 if (op_code == MAX_EXPR && consts_equal)
5587 /* MAX (X, 0) == 0 -> X <= 0 */
5588 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5589
5590 else if (op_code == MAX_EXPR && consts_lt)
5591 /* MAX (X, 0) == 5 -> X == 5 */
5592 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5593
5594 else if (op_code == MAX_EXPR)
5595 /* MAX (X, 0) == -1 -> false */
5596 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5597
5598 else if (consts_equal)
5599 /* MIN (X, 0) == 0 -> X >= 0 */
5600 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5601
5602 else if (consts_lt)
5603 /* MIN (X, 0) == 5 -> false */
5604 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5605
5606 else
5607 /* MIN (X, 0) == -1 -> X == -1 */
5608 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5609
5610 case GT_EXPR:
5611 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5612 /* MAX (X, 0) > 0 -> X > 0
5613 MAX (X, 0) > 5 -> X > 5 */
5614 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5615
5616 else if (op_code == MAX_EXPR)
5617 /* MAX (X, 0) > -1 -> true */
5618 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5619
5620 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5621 /* MIN (X, 0) > 0 -> false
5622 MIN (X, 0) > 5 -> false */
5623 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5624
5625 else
5626 /* MIN (X, 0) > -1 -> X > -1 */
5627 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5628
5629 default:
5630 return NULL_TREE;
5631 }
5632 }
5633 \f
5634 /* T is an integer expression that is being multiplied, divided, or taken a
5635 modulus (CODE says which and what kind of divide or modulus) by a
5636 constant C. See if we can eliminate that operation by folding it with
5637 other operations already in T. WIDE_TYPE, if non-null, is a type that
5638 should be used for the computation if wider than our type.
5639
5640 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5641 (X * 2) + (Y * 4). We must, however, be assured that either the original
5642 expression would not overflow or that overflow is undefined for the type
5643 in the language in question.
5644
5645 If we return a non-null expression, it is an equivalent form of the
5646 original computation, but need not be in the original type.
5647
5648 We set *STRICT_OVERFLOW_P to true if the return values depends on
5649 signed overflow being undefined. Otherwise we do not change
5650 *STRICT_OVERFLOW_P. */
5651
5652 static tree
5653 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5654 bool *strict_overflow_p)
5655 {
5656 /* To avoid exponential search depth, refuse to allow recursion past
5657 three levels. Beyond that (1) it's highly unlikely that we'll find
5658 something interesting and (2) we've probably processed it before
5659 when we built the inner expression. */
5660
5661 static int depth;
5662 tree ret;
5663
5664 if (depth > 3)
5665 return NULL;
5666
5667 depth++;
5668 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5669 depth--;
5670
5671 return ret;
5672 }
5673
5674 static tree
5675 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5676 bool *strict_overflow_p)
5677 {
5678 tree type = TREE_TYPE (t);
5679 enum tree_code tcode = TREE_CODE (t);
5680 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5681 > GET_MODE_SIZE (TYPE_MODE (type)))
5682 ? wide_type : type);
5683 tree t1, t2;
5684 int same_p = tcode == code;
5685 tree op0 = NULL_TREE, op1 = NULL_TREE;
5686 bool sub_strict_overflow_p;
5687
5688 /* Don't deal with constants of zero here; they confuse the code below. */
5689 if (integer_zerop (c))
5690 return NULL_TREE;
5691
5692 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5693 op0 = TREE_OPERAND (t, 0);
5694
5695 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5696 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5697
5698 /* Note that we need not handle conditional operations here since fold
5699 already handles those cases. So just do arithmetic here. */
5700 switch (tcode)
5701 {
5702 case INTEGER_CST:
5703 /* For a constant, we can always simplify if we are a multiply
5704 or (for divide and modulus) if it is a multiple of our constant. */
5705 if (code == MULT_EXPR
5706 || wi::multiple_of_p (t, c, TYPE_SIGN (type)))
5707 return const_binop (code, fold_convert (ctype, t),
5708 fold_convert (ctype, c));
5709 break;
5710
5711 CASE_CONVERT: case NON_LVALUE_EXPR:
5712 /* If op0 is an expression ... */
5713 if ((COMPARISON_CLASS_P (op0)
5714 || UNARY_CLASS_P (op0)
5715 || BINARY_CLASS_P (op0)
5716 || VL_EXP_CLASS_P (op0)
5717 || EXPRESSION_CLASS_P (op0))
5718 /* ... and has wrapping overflow, and its type is smaller
5719 than ctype, then we cannot pass through as widening. */
5720 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5721 && (TYPE_PRECISION (ctype)
5722 > TYPE_PRECISION (TREE_TYPE (op0))))
5723 /* ... or this is a truncation (t is narrower than op0),
5724 then we cannot pass through this narrowing. */
5725 || (TYPE_PRECISION (type)
5726 < TYPE_PRECISION (TREE_TYPE (op0)))
5727 /* ... or signedness changes for division or modulus,
5728 then we cannot pass through this conversion. */
5729 || (code != MULT_EXPR
5730 && (TYPE_UNSIGNED (ctype)
5731 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5732 /* ... or has undefined overflow while the converted to
5733 type has not, we cannot do the operation in the inner type
5734 as that would introduce undefined overflow. */
5735 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5736 && !TYPE_OVERFLOW_UNDEFINED (type))))
5737 break;
5738
5739 /* Pass the constant down and see if we can make a simplification. If
5740 we can, replace this expression with the inner simplification for
5741 possible later conversion to our or some other type. */
5742 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5743 && TREE_CODE (t2) == INTEGER_CST
5744 && !TREE_OVERFLOW (t2)
5745 && (0 != (t1 = extract_muldiv (op0, t2, code,
5746 code == MULT_EXPR
5747 ? ctype : NULL_TREE,
5748 strict_overflow_p))))
5749 return t1;
5750 break;
5751
5752 case ABS_EXPR:
5753 /* If widening the type changes it from signed to unsigned, then we
5754 must avoid building ABS_EXPR itself as unsigned. */
5755 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5756 {
5757 tree cstype = (*signed_type_for) (ctype);
5758 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5759 != 0)
5760 {
5761 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5762 return fold_convert (ctype, t1);
5763 }
5764 break;
5765 }
5766 /* If the constant is negative, we cannot simplify this. */
5767 if (tree_int_cst_sgn (c) == -1)
5768 break;
5769 /* FALLTHROUGH */
5770 case NEGATE_EXPR:
5771 /* For division and modulus, type can't be unsigned, as e.g.
5772 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
5773 For signed types, even with wrapping overflow, this is fine. */
5774 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
5775 break;
5776 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5777 != 0)
5778 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5779 break;
5780
5781 case MIN_EXPR: case MAX_EXPR:
5782 /* If widening the type changes the signedness, then we can't perform
5783 this optimization as that changes the result. */
5784 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5785 break;
5786
5787 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5788 sub_strict_overflow_p = false;
5789 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5790 &sub_strict_overflow_p)) != 0
5791 && (t2 = extract_muldiv (op1, c, code, wide_type,
5792 &sub_strict_overflow_p)) != 0)
5793 {
5794 if (tree_int_cst_sgn (c) < 0)
5795 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5796 if (sub_strict_overflow_p)
5797 *strict_overflow_p = true;
5798 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5799 fold_convert (ctype, t2));
5800 }
5801 break;
5802
5803 case LSHIFT_EXPR: case RSHIFT_EXPR:
5804 /* If the second operand is constant, this is a multiplication
5805 or floor division, by a power of two, so we can treat it that
5806 way unless the multiplier or divisor overflows. Signed
5807 left-shift overflow is implementation-defined rather than
5808 undefined in C90, so do not convert signed left shift into
5809 multiplication. */
5810 if (TREE_CODE (op1) == INTEGER_CST
5811 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5812 /* const_binop may not detect overflow correctly,
5813 so check for it explicitly here. */
5814 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
5815 && 0 != (t1 = fold_convert (ctype,
5816 const_binop (LSHIFT_EXPR,
5817 size_one_node,
5818 op1)))
5819 && !TREE_OVERFLOW (t1))
5820 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5821 ? MULT_EXPR : FLOOR_DIV_EXPR,
5822 ctype,
5823 fold_convert (ctype, op0),
5824 t1),
5825 c, code, wide_type, strict_overflow_p);
5826 break;
5827
5828 case PLUS_EXPR: case MINUS_EXPR:
5829 /* See if we can eliminate the operation on both sides. If we can, we
5830 can return a new PLUS or MINUS. If we can't, the only remaining
5831 cases where we can do anything are if the second operand is a
5832 constant. */
5833 sub_strict_overflow_p = false;
5834 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5835 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5836 if (t1 != 0 && t2 != 0
5837 && (code == MULT_EXPR
5838 /* If not multiplication, we can only do this if both operands
5839 are divisible by c. */
5840 || (multiple_of_p (ctype, op0, c)
5841 && multiple_of_p (ctype, op1, c))))
5842 {
5843 if (sub_strict_overflow_p)
5844 *strict_overflow_p = true;
5845 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5846 fold_convert (ctype, t2));
5847 }
5848
5849 /* If this was a subtraction, negate OP1 and set it to be an addition.
5850 This simplifies the logic below. */
5851 if (tcode == MINUS_EXPR)
5852 {
5853 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5854 /* If OP1 was not easily negatable, the constant may be OP0. */
5855 if (TREE_CODE (op0) == INTEGER_CST)
5856 {
5857 tree tem = op0;
5858 op0 = op1;
5859 op1 = tem;
5860 tem = t1;
5861 t1 = t2;
5862 t2 = tem;
5863 }
5864 }
5865
5866 if (TREE_CODE (op1) != INTEGER_CST)
5867 break;
5868
5869 /* If either OP1 or C are negative, this optimization is not safe for
5870 some of the division and remainder types while for others we need
5871 to change the code. */
5872 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5873 {
5874 if (code == CEIL_DIV_EXPR)
5875 code = FLOOR_DIV_EXPR;
5876 else if (code == FLOOR_DIV_EXPR)
5877 code = CEIL_DIV_EXPR;
5878 else if (code != MULT_EXPR
5879 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5880 break;
5881 }
5882
5883 /* If it's a multiply or a division/modulus operation of a multiple
5884 of our constant, do the operation and verify it doesn't overflow. */
5885 if (code == MULT_EXPR
5886 || wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
5887 {
5888 op1 = const_binop (code, fold_convert (ctype, op1),
5889 fold_convert (ctype, c));
5890 /* We allow the constant to overflow with wrapping semantics. */
5891 if (op1 == 0
5892 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5893 break;
5894 }
5895 else
5896 break;
5897
5898 /* If we have an unsigned type, we cannot widen the operation since it
5899 will change the result if the original computation overflowed. */
5900 if (TYPE_UNSIGNED (ctype) && ctype != type)
5901 break;
5902
5903 /* If we were able to eliminate our operation from the first side,
5904 apply our operation to the second side and reform the PLUS. */
5905 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5906 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5907
5908 /* The last case is if we are a multiply. In that case, we can
5909 apply the distributive law to commute the multiply and addition
5910 if the multiplication of the constants doesn't overflow
5911 and overflow is defined. With undefined overflow
5912 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
5913 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
5914 return fold_build2 (tcode, ctype,
5915 fold_build2 (code, ctype,
5916 fold_convert (ctype, op0),
5917 fold_convert (ctype, c)),
5918 op1);
5919
5920 break;
5921
5922 case MULT_EXPR:
5923 /* We have a special case here if we are doing something like
5924 (C * 8) % 4 since we know that's zero. */
5925 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5926 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5927 /* If the multiplication can overflow we cannot optimize this. */
5928 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5929 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5930 && wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
5931 {
5932 *strict_overflow_p = true;
5933 return omit_one_operand (type, integer_zero_node, op0);
5934 }
5935
5936 /* ... fall through ... */
5937
5938 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5939 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5940 /* If we can extract our operation from the LHS, do so and return a
5941 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5942 do something only if the second operand is a constant. */
5943 if (same_p
5944 && (t1 = extract_muldiv (op0, c, code, wide_type,
5945 strict_overflow_p)) != 0)
5946 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5947 fold_convert (ctype, op1));
5948 else if (tcode == MULT_EXPR && code == MULT_EXPR
5949 && (t1 = extract_muldiv (op1, c, code, wide_type,
5950 strict_overflow_p)) != 0)
5951 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5952 fold_convert (ctype, t1));
5953 else if (TREE_CODE (op1) != INTEGER_CST)
5954 return 0;
5955
5956 /* If these are the same operation types, we can associate them
5957 assuming no overflow. */
5958 if (tcode == code)
5959 {
5960 bool overflow_p = false;
5961 bool overflow_mul_p;
5962 signop sign = TYPE_SIGN (ctype);
5963 wide_int mul = wi::mul (op1, c, sign, &overflow_mul_p);
5964 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
5965 if (overflow_mul_p
5966 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
5967 overflow_p = true;
5968 if (!overflow_p)
5969 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5970 wide_int_to_tree (ctype, mul));
5971 }
5972
5973 /* If these operations "cancel" each other, we have the main
5974 optimizations of this pass, which occur when either constant is a
5975 multiple of the other, in which case we replace this with either an
5976 operation or CODE or TCODE.
5977
5978 If we have an unsigned type, we cannot do this since it will change
5979 the result if the original computation overflowed. */
5980 if (TYPE_OVERFLOW_UNDEFINED (ctype)
5981 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5982 || (tcode == MULT_EXPR
5983 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5984 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5985 && code != MULT_EXPR)))
5986 {
5987 if (wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
5988 {
5989 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5990 *strict_overflow_p = true;
5991 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5992 fold_convert (ctype,
5993 const_binop (TRUNC_DIV_EXPR,
5994 op1, c)));
5995 }
5996 else if (wi::multiple_of_p (c, op1, TYPE_SIGN (type)))
5997 {
5998 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5999 *strict_overflow_p = true;
6000 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6001 fold_convert (ctype,
6002 const_binop (TRUNC_DIV_EXPR,
6003 c, op1)));
6004 }
6005 }
6006 break;
6007
6008 default:
6009 break;
6010 }
6011
6012 return 0;
6013 }
6014 \f
6015 /* Return a node which has the indicated constant VALUE (either 0 or
6016 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6017 and is of the indicated TYPE. */
6018
6019 tree
6020 constant_boolean_node (bool value, tree type)
6021 {
6022 if (type == integer_type_node)
6023 return value ? integer_one_node : integer_zero_node;
6024 else if (type == boolean_type_node)
6025 return value ? boolean_true_node : boolean_false_node;
6026 else if (TREE_CODE (type) == VECTOR_TYPE)
6027 return build_vector_from_val (type,
6028 build_int_cst (TREE_TYPE (type),
6029 value ? -1 : 0));
6030 else
6031 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6032 }
6033
6034
6035 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6036 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6037 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6038 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6039 COND is the first argument to CODE; otherwise (as in the example
6040 given here), it is the second argument. TYPE is the type of the
6041 original expression. Return NULL_TREE if no simplification is
6042 possible. */
6043
6044 static tree
6045 fold_binary_op_with_conditional_arg (location_t loc,
6046 enum tree_code code,
6047 tree type, tree op0, tree op1,
6048 tree cond, tree arg, int cond_first_p)
6049 {
6050 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6051 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6052 tree test, true_value, false_value;
6053 tree lhs = NULL_TREE;
6054 tree rhs = NULL_TREE;
6055 enum tree_code cond_code = COND_EXPR;
6056
6057 if (TREE_CODE (cond) == COND_EXPR
6058 || TREE_CODE (cond) == VEC_COND_EXPR)
6059 {
6060 test = TREE_OPERAND (cond, 0);
6061 true_value = TREE_OPERAND (cond, 1);
6062 false_value = TREE_OPERAND (cond, 2);
6063 /* If this operand throws an expression, then it does not make
6064 sense to try to perform a logical or arithmetic operation
6065 involving it. */
6066 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6067 lhs = true_value;
6068 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6069 rhs = false_value;
6070 }
6071 else
6072 {
6073 tree testtype = TREE_TYPE (cond);
6074 test = cond;
6075 true_value = constant_boolean_node (true, testtype);
6076 false_value = constant_boolean_node (false, testtype);
6077 }
6078
6079 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6080 cond_code = VEC_COND_EXPR;
6081
6082 /* This transformation is only worthwhile if we don't have to wrap ARG
6083 in a SAVE_EXPR and the operation can be simplified without recursing
6084 on at least one of the branches once its pushed inside the COND_EXPR. */
6085 if (!TREE_CONSTANT (arg)
6086 && (TREE_SIDE_EFFECTS (arg)
6087 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6088 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6089 return NULL_TREE;
6090
6091 arg = fold_convert_loc (loc, arg_type, arg);
6092 if (lhs == 0)
6093 {
6094 true_value = fold_convert_loc (loc, cond_type, true_value);
6095 if (cond_first_p)
6096 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6097 else
6098 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6099 }
6100 if (rhs == 0)
6101 {
6102 false_value = fold_convert_loc (loc, cond_type, false_value);
6103 if (cond_first_p)
6104 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6105 else
6106 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6107 }
6108
6109 /* Check that we have simplified at least one of the branches. */
6110 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6111 return NULL_TREE;
6112
6113 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6114 }
6115
6116 \f
6117 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6118
6119 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6120 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6121 ADDEND is the same as X.
6122
6123 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6124 and finite. The problematic cases are when X is zero, and its mode
6125 has signed zeros. In the case of rounding towards -infinity,
6126 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6127 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6128
6129 bool
6130 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6131 {
6132 if (!real_zerop (addend))
6133 return false;
6134
6135 /* Don't allow the fold with -fsignaling-nans. */
6136 if (HONOR_SNANS (TYPE_MODE (type)))
6137 return false;
6138
6139 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6140 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6141 return true;
6142
6143 /* In a vector or complex, we would need to check the sign of all zeros. */
6144 if (TREE_CODE (addend) != REAL_CST)
6145 return false;
6146
6147 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6148 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6149 negate = !negate;
6150
6151 /* The mode has signed zeros, and we have to honor their sign.
6152 In this situation, there is only one case we can return true for.
6153 X - 0 is the same as X unless rounding towards -infinity is
6154 supported. */
6155 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6156 }
6157
6158 /* Subroutine of fold() that checks comparisons of built-in math
6159 functions against real constants.
6160
6161 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6162 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6163 is the type of the result and ARG0 and ARG1 are the operands of the
6164 comparison. ARG1 must be a TREE_REAL_CST.
6165
6166 The function returns the constant folded tree if a simplification
6167 can be made, and NULL_TREE otherwise. */
6168
6169 static tree
6170 fold_mathfn_compare (location_t loc,
6171 enum built_in_function fcode, enum tree_code code,
6172 tree type, tree arg0, tree arg1)
6173 {
6174 REAL_VALUE_TYPE c;
6175
6176 if (BUILTIN_SQRT_P (fcode))
6177 {
6178 tree arg = CALL_EXPR_ARG (arg0, 0);
6179 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6180
6181 c = TREE_REAL_CST (arg1);
6182 if (REAL_VALUE_NEGATIVE (c))
6183 {
6184 /* sqrt(x) < y is always false, if y is negative. */
6185 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6186 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6187
6188 /* sqrt(x) > y is always true, if y is negative and we
6189 don't care about NaNs, i.e. negative values of x. */
6190 if (code == NE_EXPR || !HONOR_NANS (mode))
6191 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6192
6193 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6194 return fold_build2_loc (loc, GE_EXPR, type, arg,
6195 build_real (TREE_TYPE (arg), dconst0));
6196 }
6197 else if (code == GT_EXPR || code == GE_EXPR)
6198 {
6199 REAL_VALUE_TYPE c2;
6200
6201 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6202 real_convert (&c2, mode, &c2);
6203
6204 if (REAL_VALUE_ISINF (c2))
6205 {
6206 /* sqrt(x) > y is x == +Inf, when y is very large. */
6207 if (HONOR_INFINITIES (mode))
6208 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6209 build_real (TREE_TYPE (arg), c2));
6210
6211 /* sqrt(x) > y is always false, when y is very large
6212 and we don't care about infinities. */
6213 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6214 }
6215
6216 /* sqrt(x) > c is the same as x > c*c. */
6217 return fold_build2_loc (loc, code, type, arg,
6218 build_real (TREE_TYPE (arg), c2));
6219 }
6220 else if (code == LT_EXPR || code == LE_EXPR)
6221 {
6222 REAL_VALUE_TYPE c2;
6223
6224 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6225 real_convert (&c2, mode, &c2);
6226
6227 if (REAL_VALUE_ISINF (c2))
6228 {
6229 /* sqrt(x) < y is always true, when y is a very large
6230 value and we don't care about NaNs or Infinities. */
6231 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6232 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6233
6234 /* sqrt(x) < y is x != +Inf when y is very large and we
6235 don't care about NaNs. */
6236 if (! HONOR_NANS (mode))
6237 return fold_build2_loc (loc, NE_EXPR, type, arg,
6238 build_real (TREE_TYPE (arg), c2));
6239
6240 /* sqrt(x) < y is x >= 0 when y is very large and we
6241 don't care about Infinities. */
6242 if (! HONOR_INFINITIES (mode))
6243 return fold_build2_loc (loc, GE_EXPR, type, arg,
6244 build_real (TREE_TYPE (arg), dconst0));
6245
6246 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6247 arg = save_expr (arg);
6248 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6249 fold_build2_loc (loc, GE_EXPR, type, arg,
6250 build_real (TREE_TYPE (arg),
6251 dconst0)),
6252 fold_build2_loc (loc, NE_EXPR, type, arg,
6253 build_real (TREE_TYPE (arg),
6254 c2)));
6255 }
6256
6257 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6258 if (! HONOR_NANS (mode))
6259 return fold_build2_loc (loc, code, type, arg,
6260 build_real (TREE_TYPE (arg), c2));
6261
6262 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6263 arg = save_expr (arg);
6264 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6265 fold_build2_loc (loc, GE_EXPR, type, arg,
6266 build_real (TREE_TYPE (arg),
6267 dconst0)),
6268 fold_build2_loc (loc, code, type, arg,
6269 build_real (TREE_TYPE (arg),
6270 c2)));
6271 }
6272 }
6273
6274 return NULL_TREE;
6275 }
6276
6277 /* Subroutine of fold() that optimizes comparisons against Infinities,
6278 either +Inf or -Inf.
6279
6280 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6281 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6282 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6283
6284 The function returns the constant folded tree if a simplification
6285 can be made, and NULL_TREE otherwise. */
6286
6287 static tree
6288 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6289 tree arg0, tree arg1)
6290 {
6291 enum machine_mode mode;
6292 REAL_VALUE_TYPE max;
6293 tree temp;
6294 bool neg;
6295
6296 mode = TYPE_MODE (TREE_TYPE (arg0));
6297
6298 /* For negative infinity swap the sense of the comparison. */
6299 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6300 if (neg)
6301 code = swap_tree_comparison (code);
6302
6303 switch (code)
6304 {
6305 case GT_EXPR:
6306 /* x > +Inf is always false, if with ignore sNANs. */
6307 if (HONOR_SNANS (mode))
6308 return NULL_TREE;
6309 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6310
6311 case LE_EXPR:
6312 /* x <= +Inf is always true, if we don't case about NaNs. */
6313 if (! HONOR_NANS (mode))
6314 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6315
6316 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6317 arg0 = save_expr (arg0);
6318 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6319
6320 case EQ_EXPR:
6321 case GE_EXPR:
6322 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6323 real_maxval (&max, neg, mode);
6324 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6325 arg0, build_real (TREE_TYPE (arg0), max));
6326
6327 case LT_EXPR:
6328 /* x < +Inf is always equal to x <= DBL_MAX. */
6329 real_maxval (&max, neg, mode);
6330 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6331 arg0, build_real (TREE_TYPE (arg0), max));
6332
6333 case NE_EXPR:
6334 /* x != +Inf is always equal to !(x > DBL_MAX). */
6335 real_maxval (&max, neg, mode);
6336 if (! HONOR_NANS (mode))
6337 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6338 arg0, build_real (TREE_TYPE (arg0), max));
6339
6340 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6341 arg0, build_real (TREE_TYPE (arg0), max));
6342 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6343
6344 default:
6345 break;
6346 }
6347
6348 return NULL_TREE;
6349 }
6350
6351 /* Subroutine of fold() that optimizes comparisons of a division by
6352 a nonzero integer constant against an integer constant, i.e.
6353 X/C1 op C2.
6354
6355 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6356 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6357 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6358
6359 The function returns the constant folded tree if a simplification
6360 can be made, and NULL_TREE otherwise. */
6361
6362 static tree
6363 fold_div_compare (location_t loc,
6364 enum tree_code code, tree type, tree arg0, tree arg1)
6365 {
6366 tree prod, tmp, hi, lo;
6367 tree arg00 = TREE_OPERAND (arg0, 0);
6368 tree arg01 = TREE_OPERAND (arg0, 1);
6369 signop sign = TYPE_SIGN (TREE_TYPE (arg0));
6370 bool neg_overflow = false;
6371 bool overflow;
6372
6373 /* We have to do this the hard way to detect unsigned overflow.
6374 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6375 wide_int val = wi::mul (arg01, arg1, sign, &overflow);
6376 prod = force_fit_type (TREE_TYPE (arg00), val, -1, overflow);
6377 neg_overflow = false;
6378
6379 if (sign == UNSIGNED)
6380 {
6381 tmp = int_const_binop (MINUS_EXPR, arg01,
6382 build_int_cst (TREE_TYPE (arg01), 1));
6383 lo = prod;
6384
6385 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6386 val = wi::add (prod, tmp, sign, &overflow);
6387 hi = force_fit_type (TREE_TYPE (arg00), val,
6388 -1, overflow | TREE_OVERFLOW (prod));
6389 }
6390 else if (tree_int_cst_sgn (arg01) >= 0)
6391 {
6392 tmp = int_const_binop (MINUS_EXPR, arg01,
6393 build_int_cst (TREE_TYPE (arg01), 1));
6394 switch (tree_int_cst_sgn (arg1))
6395 {
6396 case -1:
6397 neg_overflow = true;
6398 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6399 hi = prod;
6400 break;
6401
6402 case 0:
6403 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6404 hi = tmp;
6405 break;
6406
6407 case 1:
6408 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6409 lo = prod;
6410 break;
6411
6412 default:
6413 gcc_unreachable ();
6414 }
6415 }
6416 else
6417 {
6418 /* A negative divisor reverses the relational operators. */
6419 code = swap_tree_comparison (code);
6420
6421 tmp = int_const_binop (PLUS_EXPR, arg01,
6422 build_int_cst (TREE_TYPE (arg01), 1));
6423 switch (tree_int_cst_sgn (arg1))
6424 {
6425 case -1:
6426 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6427 lo = prod;
6428 break;
6429
6430 case 0:
6431 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6432 lo = tmp;
6433 break;
6434
6435 case 1:
6436 neg_overflow = true;
6437 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6438 hi = prod;
6439 break;
6440
6441 default:
6442 gcc_unreachable ();
6443 }
6444 }
6445
6446 switch (code)
6447 {
6448 case EQ_EXPR:
6449 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6450 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6451 if (TREE_OVERFLOW (hi))
6452 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6453 if (TREE_OVERFLOW (lo))
6454 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6455 return build_range_check (loc, type, arg00, 1, lo, hi);
6456
6457 case NE_EXPR:
6458 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6459 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6460 if (TREE_OVERFLOW (hi))
6461 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6462 if (TREE_OVERFLOW (lo))
6463 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6464 return build_range_check (loc, type, arg00, 0, lo, hi);
6465
6466 case LT_EXPR:
6467 if (TREE_OVERFLOW (lo))
6468 {
6469 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6470 return omit_one_operand_loc (loc, type, tmp, arg00);
6471 }
6472 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6473
6474 case LE_EXPR:
6475 if (TREE_OVERFLOW (hi))
6476 {
6477 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6478 return omit_one_operand_loc (loc, type, tmp, arg00);
6479 }
6480 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6481
6482 case GT_EXPR:
6483 if (TREE_OVERFLOW (hi))
6484 {
6485 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6486 return omit_one_operand_loc (loc, type, tmp, arg00);
6487 }
6488 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6489
6490 case GE_EXPR:
6491 if (TREE_OVERFLOW (lo))
6492 {
6493 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6494 return omit_one_operand_loc (loc, type, tmp, arg00);
6495 }
6496 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6497
6498 default:
6499 break;
6500 }
6501
6502 return NULL_TREE;
6503 }
6504
6505
6506 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6507 equality/inequality test, then return a simplified form of the test
6508 using a sign testing. Otherwise return NULL. TYPE is the desired
6509 result type. */
6510
6511 static tree
6512 fold_single_bit_test_into_sign_test (location_t loc,
6513 enum tree_code code, tree arg0, tree arg1,
6514 tree result_type)
6515 {
6516 /* If this is testing a single bit, we can optimize the test. */
6517 if ((code == NE_EXPR || code == EQ_EXPR)
6518 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6519 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6520 {
6521 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6522 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6523 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6524
6525 if (arg00 != NULL_TREE
6526 /* This is only a win if casting to a signed type is cheap,
6527 i.e. when arg00's type is not a partial mode. */
6528 && TYPE_PRECISION (TREE_TYPE (arg00))
6529 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6530 {
6531 tree stype = signed_type_for (TREE_TYPE (arg00));
6532 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6533 result_type,
6534 fold_convert_loc (loc, stype, arg00),
6535 build_int_cst (stype, 0));
6536 }
6537 }
6538
6539 return NULL_TREE;
6540 }
6541
6542 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6543 equality/inequality test, then return a simplified form of
6544 the test using shifts and logical operations. Otherwise return
6545 NULL. TYPE is the desired result type. */
6546
6547 tree
6548 fold_single_bit_test (location_t loc, enum tree_code code,
6549 tree arg0, tree arg1, tree result_type)
6550 {
6551 /* If this is testing a single bit, we can optimize the test. */
6552 if ((code == NE_EXPR || code == EQ_EXPR)
6553 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6554 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6555 {
6556 tree inner = TREE_OPERAND (arg0, 0);
6557 tree type = TREE_TYPE (arg0);
6558 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6559 enum machine_mode operand_mode = TYPE_MODE (type);
6560 int ops_unsigned;
6561 tree signed_type, unsigned_type, intermediate_type;
6562 tree tem, one;
6563
6564 /* First, see if we can fold the single bit test into a sign-bit
6565 test. */
6566 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6567 result_type);
6568 if (tem)
6569 return tem;
6570
6571 /* Otherwise we have (A & C) != 0 where C is a single bit,
6572 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6573 Similarly for (A & C) == 0. */
6574
6575 /* If INNER is a right shift of a constant and it plus BITNUM does
6576 not overflow, adjust BITNUM and INNER. */
6577 if (TREE_CODE (inner) == RSHIFT_EXPR
6578 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6579 && bitnum < TYPE_PRECISION (type)
6580 && wi::ltu_p (TREE_OPERAND (inner, 1),
6581 TYPE_PRECISION (type) - bitnum))
6582 {
6583 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6584 inner = TREE_OPERAND (inner, 0);
6585 }
6586
6587 /* If we are going to be able to omit the AND below, we must do our
6588 operations as unsigned. If we must use the AND, we have a choice.
6589 Normally unsigned is faster, but for some machines signed is. */
6590 #ifdef LOAD_EXTEND_OP
6591 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6592 && !flag_syntax_only) ? 0 : 1;
6593 #else
6594 ops_unsigned = 1;
6595 #endif
6596
6597 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6598 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6599 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6600 inner = fold_convert_loc (loc, intermediate_type, inner);
6601
6602 if (bitnum != 0)
6603 inner = build2 (RSHIFT_EXPR, intermediate_type,
6604 inner, size_int (bitnum));
6605
6606 one = build_int_cst (intermediate_type, 1);
6607
6608 if (code == EQ_EXPR)
6609 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6610
6611 /* Put the AND last so it can combine with more things. */
6612 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6613
6614 /* Make sure to return the proper type. */
6615 inner = fold_convert_loc (loc, result_type, inner);
6616
6617 return inner;
6618 }
6619 return NULL_TREE;
6620 }
6621
6622 /* Check whether we are allowed to reorder operands arg0 and arg1,
6623 such that the evaluation of arg1 occurs before arg0. */
6624
6625 static bool
6626 reorder_operands_p (const_tree arg0, const_tree arg1)
6627 {
6628 if (! flag_evaluation_order)
6629 return true;
6630 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6631 return true;
6632 return ! TREE_SIDE_EFFECTS (arg0)
6633 && ! TREE_SIDE_EFFECTS (arg1);
6634 }
6635
6636 /* Test whether it is preferable two swap two operands, ARG0 and
6637 ARG1, for example because ARG0 is an integer constant and ARG1
6638 isn't. If REORDER is true, only recommend swapping if we can
6639 evaluate the operands in reverse order. */
6640
6641 bool
6642 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6643 {
6644 STRIP_SIGN_NOPS (arg0);
6645 STRIP_SIGN_NOPS (arg1);
6646
6647 if (TREE_CODE (arg1) == INTEGER_CST)
6648 return 0;
6649 if (TREE_CODE (arg0) == INTEGER_CST)
6650 return 1;
6651
6652 if (TREE_CODE (arg1) == REAL_CST)
6653 return 0;
6654 if (TREE_CODE (arg0) == REAL_CST)
6655 return 1;
6656
6657 if (TREE_CODE (arg1) == FIXED_CST)
6658 return 0;
6659 if (TREE_CODE (arg0) == FIXED_CST)
6660 return 1;
6661
6662 if (TREE_CODE (arg1) == COMPLEX_CST)
6663 return 0;
6664 if (TREE_CODE (arg0) == COMPLEX_CST)
6665 return 1;
6666
6667 if (TREE_CONSTANT (arg1))
6668 return 0;
6669 if (TREE_CONSTANT (arg0))
6670 return 1;
6671
6672 if (optimize_function_for_size_p (cfun))
6673 return 0;
6674
6675 if (reorder && flag_evaluation_order
6676 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6677 return 0;
6678
6679 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6680 for commutative and comparison operators. Ensuring a canonical
6681 form allows the optimizers to find additional redundancies without
6682 having to explicitly check for both orderings. */
6683 if (TREE_CODE (arg0) == SSA_NAME
6684 && TREE_CODE (arg1) == SSA_NAME
6685 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6686 return 1;
6687
6688 /* Put SSA_NAMEs last. */
6689 if (TREE_CODE (arg1) == SSA_NAME)
6690 return 0;
6691 if (TREE_CODE (arg0) == SSA_NAME)
6692 return 1;
6693
6694 /* Put variables last. */
6695 if (DECL_P (arg1))
6696 return 0;
6697 if (DECL_P (arg0))
6698 return 1;
6699
6700 return 0;
6701 }
6702
6703 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6704 ARG0 is extended to a wider type. */
6705
6706 static tree
6707 fold_widened_comparison (location_t loc, enum tree_code code,
6708 tree type, tree arg0, tree arg1)
6709 {
6710 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6711 tree arg1_unw;
6712 tree shorter_type, outer_type;
6713 tree min, max;
6714 bool above, below;
6715
6716 if (arg0_unw == arg0)
6717 return NULL_TREE;
6718 shorter_type = TREE_TYPE (arg0_unw);
6719
6720 #ifdef HAVE_canonicalize_funcptr_for_compare
6721 /* Disable this optimization if we're casting a function pointer
6722 type on targets that require function pointer canonicalization. */
6723 if (HAVE_canonicalize_funcptr_for_compare
6724 && TREE_CODE (shorter_type) == POINTER_TYPE
6725 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6726 return NULL_TREE;
6727 #endif
6728
6729 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6730 return NULL_TREE;
6731
6732 arg1_unw = get_unwidened (arg1, NULL_TREE);
6733
6734 /* If possible, express the comparison in the shorter mode. */
6735 if ((code == EQ_EXPR || code == NE_EXPR
6736 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6737 && (TREE_TYPE (arg1_unw) == shorter_type
6738 || ((TYPE_PRECISION (shorter_type)
6739 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6740 && (TYPE_UNSIGNED (shorter_type)
6741 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6742 || (TREE_CODE (arg1_unw) == INTEGER_CST
6743 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6744 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6745 && int_fits_type_p (arg1_unw, shorter_type))))
6746 return fold_build2_loc (loc, code, type, arg0_unw,
6747 fold_convert_loc (loc, shorter_type, arg1_unw));
6748
6749 if (TREE_CODE (arg1_unw) != INTEGER_CST
6750 || TREE_CODE (shorter_type) != INTEGER_TYPE
6751 || !int_fits_type_p (arg1_unw, shorter_type))
6752 return NULL_TREE;
6753
6754 /* If we are comparing with the integer that does not fit into the range
6755 of the shorter type, the result is known. */
6756 outer_type = TREE_TYPE (arg1_unw);
6757 min = lower_bound_in_type (outer_type, shorter_type);
6758 max = upper_bound_in_type (outer_type, shorter_type);
6759
6760 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6761 max, arg1_unw));
6762 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6763 arg1_unw, min));
6764
6765 switch (code)
6766 {
6767 case EQ_EXPR:
6768 if (above || below)
6769 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6770 break;
6771
6772 case NE_EXPR:
6773 if (above || below)
6774 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6775 break;
6776
6777 case LT_EXPR:
6778 case LE_EXPR:
6779 if (above)
6780 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6781 else if (below)
6782 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6783
6784 case GT_EXPR:
6785 case GE_EXPR:
6786 if (above)
6787 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6788 else if (below)
6789 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6790
6791 default:
6792 break;
6793 }
6794
6795 return NULL_TREE;
6796 }
6797
6798 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6799 ARG0 just the signedness is changed. */
6800
6801 static tree
6802 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6803 tree arg0, tree arg1)
6804 {
6805 tree arg0_inner;
6806 tree inner_type, outer_type;
6807
6808 if (!CONVERT_EXPR_P (arg0))
6809 return NULL_TREE;
6810
6811 outer_type = TREE_TYPE (arg0);
6812 arg0_inner = TREE_OPERAND (arg0, 0);
6813 inner_type = TREE_TYPE (arg0_inner);
6814
6815 #ifdef HAVE_canonicalize_funcptr_for_compare
6816 /* Disable this optimization if we're casting a function pointer
6817 type on targets that require function pointer canonicalization. */
6818 if (HAVE_canonicalize_funcptr_for_compare
6819 && TREE_CODE (inner_type) == POINTER_TYPE
6820 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6821 return NULL_TREE;
6822 #endif
6823
6824 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6825 return NULL_TREE;
6826
6827 if (TREE_CODE (arg1) != INTEGER_CST
6828 && !(CONVERT_EXPR_P (arg1)
6829 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6830 return NULL_TREE;
6831
6832 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6833 && code != NE_EXPR
6834 && code != EQ_EXPR)
6835 return NULL_TREE;
6836
6837 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6838 return NULL_TREE;
6839
6840 if (TREE_CODE (arg1) == INTEGER_CST)
6841 arg1 = force_fit_type (inner_type, wi::to_widest (arg1), 0,
6842 TREE_OVERFLOW (arg1));
6843 else
6844 arg1 = fold_convert_loc (loc, inner_type, arg1);
6845
6846 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6847 }
6848
6849 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6850 step of the array. Reconstructs s and delta in the case of s *
6851 delta being an integer constant (and thus already folded). ADDR is
6852 the address. MULT is the multiplicative expression. If the
6853 function succeeds, the new address expression is returned.
6854 Otherwise NULL_TREE is returned. LOC is the location of the
6855 resulting expression. */
6856
6857 static tree
6858 try_move_mult_to_index (location_t loc, tree addr, tree op1)
6859 {
6860 tree s, delta, step;
6861 tree ref = TREE_OPERAND (addr, 0), pref;
6862 tree ret, pos;
6863 tree itype;
6864 bool mdim = false;
6865
6866 /* Strip the nops that might be added when converting op1 to sizetype. */
6867 STRIP_NOPS (op1);
6868
6869 /* Canonicalize op1 into a possibly non-constant delta
6870 and an INTEGER_CST s. */
6871 if (TREE_CODE (op1) == MULT_EXPR)
6872 {
6873 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6874
6875 STRIP_NOPS (arg0);
6876 STRIP_NOPS (arg1);
6877
6878 if (TREE_CODE (arg0) == INTEGER_CST)
6879 {
6880 s = arg0;
6881 delta = arg1;
6882 }
6883 else if (TREE_CODE (arg1) == INTEGER_CST)
6884 {
6885 s = arg1;
6886 delta = arg0;
6887 }
6888 else
6889 return NULL_TREE;
6890 }
6891 else if (TREE_CODE (op1) == INTEGER_CST)
6892 {
6893 delta = op1;
6894 s = NULL_TREE;
6895 }
6896 else
6897 {
6898 /* Simulate we are delta * 1. */
6899 delta = op1;
6900 s = integer_one_node;
6901 }
6902
6903 /* Handle &x.array the same as we would handle &x.array[0]. */
6904 if (TREE_CODE (ref) == COMPONENT_REF
6905 && TREE_CODE (TREE_TYPE (ref)) == ARRAY_TYPE)
6906 {
6907 tree domain;
6908
6909 /* Remember if this was a multi-dimensional array. */
6910 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6911 mdim = true;
6912
6913 domain = TYPE_DOMAIN (TREE_TYPE (ref));
6914 if (! domain)
6915 goto cont;
6916 itype = TREE_TYPE (domain);
6917
6918 step = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ref)));
6919 if (TREE_CODE (step) != INTEGER_CST)
6920 goto cont;
6921
6922 if (s)
6923 {
6924 if (! tree_int_cst_equal (step, s))
6925 goto cont;
6926 }
6927 else
6928 {
6929 /* Try if delta is a multiple of step. */
6930 tree tmp = div_if_zero_remainder (op1, step);
6931 if (! tmp)
6932 goto cont;
6933 delta = tmp;
6934 }
6935
6936 /* Only fold here if we can verify we do not overflow one
6937 dimension of a multi-dimensional array. */
6938 if (mdim)
6939 {
6940 tree tmp;
6941
6942 if (!TYPE_MIN_VALUE (domain)
6943 || !TYPE_MAX_VALUE (domain)
6944 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6945 goto cont;
6946
6947 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6948 fold_convert_loc (loc, itype,
6949 TYPE_MIN_VALUE (domain)),
6950 fold_convert_loc (loc, itype, delta));
6951 if (TREE_CODE (tmp) != INTEGER_CST
6952 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6953 goto cont;
6954 }
6955
6956 /* We found a suitable component reference. */
6957
6958 pref = TREE_OPERAND (addr, 0);
6959 ret = copy_node (pref);
6960 SET_EXPR_LOCATION (ret, loc);
6961
6962 ret = build4_loc (loc, ARRAY_REF, TREE_TYPE (TREE_TYPE (ref)), ret,
6963 fold_build2_loc
6964 (loc, PLUS_EXPR, itype,
6965 fold_convert_loc (loc, itype,
6966 TYPE_MIN_VALUE
6967 (TYPE_DOMAIN (TREE_TYPE (ref)))),
6968 fold_convert_loc (loc, itype, delta)),
6969 NULL_TREE, NULL_TREE);
6970 return build_fold_addr_expr_loc (loc, ret);
6971 }
6972
6973 cont:
6974
6975 for (;; ref = TREE_OPERAND (ref, 0))
6976 {
6977 if (TREE_CODE (ref) == ARRAY_REF)
6978 {
6979 tree domain;
6980
6981 /* Remember if this was a multi-dimensional array. */
6982 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6983 mdim = true;
6984
6985 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6986 if (! domain)
6987 continue;
6988 itype = TREE_TYPE (domain);
6989
6990 step = array_ref_element_size (ref);
6991 if (TREE_CODE (step) != INTEGER_CST)
6992 continue;
6993
6994 if (s)
6995 {
6996 if (! tree_int_cst_equal (step, s))
6997 continue;
6998 }
6999 else
7000 {
7001 /* Try if delta is a multiple of step. */
7002 tree tmp = div_if_zero_remainder (op1, step);
7003 if (! tmp)
7004 continue;
7005 delta = tmp;
7006 }
7007
7008 /* Only fold here if we can verify we do not overflow one
7009 dimension of a multi-dimensional array. */
7010 if (mdim)
7011 {
7012 tree tmp;
7013
7014 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
7015 || !TYPE_MAX_VALUE (domain)
7016 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
7017 continue;
7018
7019 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
7020 fold_convert_loc (loc, itype,
7021 TREE_OPERAND (ref, 1)),
7022 fold_convert_loc (loc, itype, delta));
7023 if (!tmp
7024 || TREE_CODE (tmp) != INTEGER_CST
7025 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
7026 continue;
7027 }
7028
7029 break;
7030 }
7031 else
7032 mdim = false;
7033
7034 if (!handled_component_p (ref))
7035 return NULL_TREE;
7036 }
7037
7038 /* We found the suitable array reference. So copy everything up to it,
7039 and replace the index. */
7040
7041 pref = TREE_OPERAND (addr, 0);
7042 ret = copy_node (pref);
7043 SET_EXPR_LOCATION (ret, loc);
7044 pos = ret;
7045
7046 while (pref != ref)
7047 {
7048 pref = TREE_OPERAND (pref, 0);
7049 TREE_OPERAND (pos, 0) = copy_node (pref);
7050 pos = TREE_OPERAND (pos, 0);
7051 }
7052
7053 TREE_OPERAND (pos, 1)
7054 = fold_build2_loc (loc, PLUS_EXPR, itype,
7055 fold_convert_loc (loc, itype, TREE_OPERAND (pos, 1)),
7056 fold_convert_loc (loc, itype, delta));
7057 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
7058 }
7059
7060
7061 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7062 means A >= Y && A != MAX, but in this case we know that
7063 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7064
7065 static tree
7066 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7067 {
7068 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7069
7070 if (TREE_CODE (bound) == LT_EXPR)
7071 a = TREE_OPERAND (bound, 0);
7072 else if (TREE_CODE (bound) == GT_EXPR)
7073 a = TREE_OPERAND (bound, 1);
7074 else
7075 return NULL_TREE;
7076
7077 typea = TREE_TYPE (a);
7078 if (!INTEGRAL_TYPE_P (typea)
7079 && !POINTER_TYPE_P (typea))
7080 return NULL_TREE;
7081
7082 if (TREE_CODE (ineq) == LT_EXPR)
7083 {
7084 a1 = TREE_OPERAND (ineq, 1);
7085 y = TREE_OPERAND (ineq, 0);
7086 }
7087 else if (TREE_CODE (ineq) == GT_EXPR)
7088 {
7089 a1 = TREE_OPERAND (ineq, 0);
7090 y = TREE_OPERAND (ineq, 1);
7091 }
7092 else
7093 return NULL_TREE;
7094
7095 if (TREE_TYPE (a1) != typea)
7096 return NULL_TREE;
7097
7098 if (POINTER_TYPE_P (typea))
7099 {
7100 /* Convert the pointer types into integer before taking the difference. */
7101 tree ta = fold_convert_loc (loc, ssizetype, a);
7102 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7103 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7104 }
7105 else
7106 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7107
7108 if (!diff || !integer_onep (diff))
7109 return NULL_TREE;
7110
7111 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7112 }
7113
7114 /* Fold a sum or difference of at least one multiplication.
7115 Returns the folded tree or NULL if no simplification could be made. */
7116
7117 static tree
7118 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7119 tree arg0, tree arg1)
7120 {
7121 tree arg00, arg01, arg10, arg11;
7122 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7123
7124 /* (A * C) +- (B * C) -> (A+-B) * C.
7125 (A * C) +- A -> A * (C+-1).
7126 We are most concerned about the case where C is a constant,
7127 but other combinations show up during loop reduction. Since
7128 it is not difficult, try all four possibilities. */
7129
7130 if (TREE_CODE (arg0) == MULT_EXPR)
7131 {
7132 arg00 = TREE_OPERAND (arg0, 0);
7133 arg01 = TREE_OPERAND (arg0, 1);
7134 }
7135 else if (TREE_CODE (arg0) == INTEGER_CST)
7136 {
7137 arg00 = build_one_cst (type);
7138 arg01 = arg0;
7139 }
7140 else
7141 {
7142 /* We cannot generate constant 1 for fract. */
7143 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7144 return NULL_TREE;
7145 arg00 = arg0;
7146 arg01 = build_one_cst (type);
7147 }
7148 if (TREE_CODE (arg1) == MULT_EXPR)
7149 {
7150 arg10 = TREE_OPERAND (arg1, 0);
7151 arg11 = TREE_OPERAND (arg1, 1);
7152 }
7153 else if (TREE_CODE (arg1) == INTEGER_CST)
7154 {
7155 arg10 = build_one_cst (type);
7156 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7157 the purpose of this canonicalization. */
7158 if (wi::neg_p (arg1, TYPE_SIGN (TREE_TYPE (arg1)))
7159 && negate_expr_p (arg1)
7160 && code == PLUS_EXPR)
7161 {
7162 arg11 = negate_expr (arg1);
7163 code = MINUS_EXPR;
7164 }
7165 else
7166 arg11 = arg1;
7167 }
7168 else
7169 {
7170 /* We cannot generate constant 1 for fract. */
7171 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7172 return NULL_TREE;
7173 arg10 = arg1;
7174 arg11 = build_one_cst (type);
7175 }
7176 same = NULL_TREE;
7177
7178 if (operand_equal_p (arg01, arg11, 0))
7179 same = arg01, alt0 = arg00, alt1 = arg10;
7180 else if (operand_equal_p (arg00, arg10, 0))
7181 same = arg00, alt0 = arg01, alt1 = arg11;
7182 else if (operand_equal_p (arg00, arg11, 0))
7183 same = arg00, alt0 = arg01, alt1 = arg10;
7184 else if (operand_equal_p (arg01, arg10, 0))
7185 same = arg01, alt0 = arg00, alt1 = arg11;
7186
7187 /* No identical multiplicands; see if we can find a common
7188 power-of-two factor in non-power-of-two multiplies. This
7189 can help in multi-dimensional array access. */
7190 else if (tree_fits_shwi_p (arg01)
7191 && tree_fits_shwi_p (arg11))
7192 {
7193 HOST_WIDE_INT int01, int11, tmp;
7194 bool swap = false;
7195 tree maybe_same;
7196 int01 = tree_to_shwi (arg01);
7197 int11 = tree_to_shwi (arg11);
7198
7199 /* Move min of absolute values to int11. */
7200 if (absu_hwi (int01) < absu_hwi (int11))
7201 {
7202 tmp = int01, int01 = int11, int11 = tmp;
7203 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7204 maybe_same = arg01;
7205 swap = true;
7206 }
7207 else
7208 maybe_same = arg11;
7209
7210 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7211 /* The remainder should not be a constant, otherwise we
7212 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7213 increased the number of multiplications necessary. */
7214 && TREE_CODE (arg10) != INTEGER_CST)
7215 {
7216 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7217 build_int_cst (TREE_TYPE (arg00),
7218 int01 / int11));
7219 alt1 = arg10;
7220 same = maybe_same;
7221 if (swap)
7222 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7223 }
7224 }
7225
7226 if (same)
7227 return fold_build2_loc (loc, MULT_EXPR, type,
7228 fold_build2_loc (loc, code, type,
7229 fold_convert_loc (loc, type, alt0),
7230 fold_convert_loc (loc, type, alt1)),
7231 fold_convert_loc (loc, type, same));
7232
7233 return NULL_TREE;
7234 }
7235
7236 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7237 specified by EXPR into the buffer PTR of length LEN bytes.
7238 Return the number of bytes placed in the buffer, or zero
7239 upon failure. */
7240
7241 static int
7242 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7243 {
7244 tree type = TREE_TYPE (expr);
7245 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7246 int byte, offset, word, words;
7247 unsigned char value;
7248
7249 if (total_bytes > len)
7250 return 0;
7251 words = total_bytes / UNITS_PER_WORD;
7252
7253 for (byte = 0; byte < total_bytes; byte++)
7254 {
7255 int bitpos = byte * BITS_PER_UNIT;
7256 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7257 number of bytes. */
7258 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7259
7260 if (total_bytes > UNITS_PER_WORD)
7261 {
7262 word = byte / UNITS_PER_WORD;
7263 if (WORDS_BIG_ENDIAN)
7264 word = (words - 1) - word;
7265 offset = word * UNITS_PER_WORD;
7266 if (BYTES_BIG_ENDIAN)
7267 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7268 else
7269 offset += byte % UNITS_PER_WORD;
7270 }
7271 else
7272 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7273 ptr[offset] = value;
7274 }
7275 return total_bytes;
7276 }
7277
7278
7279 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7280 specified by EXPR into the buffer PTR of length LEN bytes.
7281 Return the number of bytes placed in the buffer, or zero
7282 upon failure. */
7283
7284 static int
7285 native_encode_fixed (const_tree expr, unsigned char *ptr, int len)
7286 {
7287 tree type = TREE_TYPE (expr);
7288 enum machine_mode mode = TYPE_MODE (type);
7289 int total_bytes = GET_MODE_SIZE (mode);
7290 FIXED_VALUE_TYPE value;
7291 tree i_value, i_type;
7292
7293 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7294 return 0;
7295
7296 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7297
7298 if (NULL_TREE == i_type
7299 || TYPE_PRECISION (i_type) != total_bytes)
7300 return 0;
7301
7302 value = TREE_FIXED_CST (expr);
7303 i_value = double_int_to_tree (i_type, value.data);
7304
7305 return native_encode_int (i_value, ptr, len);
7306 }
7307
7308
7309 /* Subroutine of native_encode_expr. Encode the REAL_CST
7310 specified by EXPR into the buffer PTR of length LEN bytes.
7311 Return the number of bytes placed in the buffer, or zero
7312 upon failure. */
7313
7314 static int
7315 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7316 {
7317 tree type = TREE_TYPE (expr);
7318 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7319 int byte, offset, word, words, bitpos;
7320 unsigned char value;
7321
7322 /* There are always 32 bits in each long, no matter the size of
7323 the hosts long. We handle floating point representations with
7324 up to 192 bits. */
7325 long tmp[6];
7326
7327 if (total_bytes > len)
7328 return 0;
7329 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7330
7331 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7332
7333 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7334 bitpos += BITS_PER_UNIT)
7335 {
7336 byte = (bitpos / BITS_PER_UNIT) & 3;
7337 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7338
7339 if (UNITS_PER_WORD < 4)
7340 {
7341 word = byte / UNITS_PER_WORD;
7342 if (WORDS_BIG_ENDIAN)
7343 word = (words - 1) - word;
7344 offset = word * UNITS_PER_WORD;
7345 if (BYTES_BIG_ENDIAN)
7346 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7347 else
7348 offset += byte % UNITS_PER_WORD;
7349 }
7350 else
7351 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7352 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7353 }
7354 return total_bytes;
7355 }
7356
7357 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7358 specified by EXPR into the buffer PTR of length LEN bytes.
7359 Return the number of bytes placed in the buffer, or zero
7360 upon failure. */
7361
7362 static int
7363 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7364 {
7365 int rsize, isize;
7366 tree part;
7367
7368 part = TREE_REALPART (expr);
7369 rsize = native_encode_expr (part, ptr, len);
7370 if (rsize == 0)
7371 return 0;
7372 part = TREE_IMAGPART (expr);
7373 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7374 if (isize != rsize)
7375 return 0;
7376 return rsize + isize;
7377 }
7378
7379
7380 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7381 specified by EXPR into the buffer PTR of length LEN bytes.
7382 Return the number of bytes placed in the buffer, or zero
7383 upon failure. */
7384
7385 static int
7386 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7387 {
7388 unsigned i, count;
7389 int size, offset;
7390 tree itype, elem;
7391
7392 offset = 0;
7393 count = VECTOR_CST_NELTS (expr);
7394 itype = TREE_TYPE (TREE_TYPE (expr));
7395 size = GET_MODE_SIZE (TYPE_MODE (itype));
7396 for (i = 0; i < count; i++)
7397 {
7398 elem = VECTOR_CST_ELT (expr, i);
7399 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7400 return 0;
7401 offset += size;
7402 }
7403 return offset;
7404 }
7405
7406
7407 /* Subroutine of native_encode_expr. Encode the STRING_CST
7408 specified by EXPR into the buffer PTR of length LEN bytes.
7409 Return the number of bytes placed in the buffer, or zero
7410 upon failure. */
7411
7412 static int
7413 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7414 {
7415 tree type = TREE_TYPE (expr);
7416 HOST_WIDE_INT total_bytes;
7417
7418 if (TREE_CODE (type) != ARRAY_TYPE
7419 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7420 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7421 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7422 return 0;
7423 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7424 if (total_bytes > len)
7425 return 0;
7426 if (TREE_STRING_LENGTH (expr) < total_bytes)
7427 {
7428 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7429 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7430 total_bytes - TREE_STRING_LENGTH (expr));
7431 }
7432 else
7433 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7434 return total_bytes;
7435 }
7436
7437
7438 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7439 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7440 buffer PTR of length LEN bytes. Return the number of bytes
7441 placed in the buffer, or zero upon failure. */
7442
7443 int
7444 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7445 {
7446 switch (TREE_CODE (expr))
7447 {
7448 case INTEGER_CST:
7449 return native_encode_int (expr, ptr, len);
7450
7451 case REAL_CST:
7452 return native_encode_real (expr, ptr, len);
7453
7454 case FIXED_CST:
7455 return native_encode_fixed (expr, ptr, len);
7456
7457 case COMPLEX_CST:
7458 return native_encode_complex (expr, ptr, len);
7459
7460 case VECTOR_CST:
7461 return native_encode_vector (expr, ptr, len);
7462
7463 case STRING_CST:
7464 return native_encode_string (expr, ptr, len);
7465
7466 default:
7467 return 0;
7468 }
7469 }
7470
7471
7472 /* Subroutine of native_interpret_expr. Interpret the contents of
7473 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7474 If the buffer cannot be interpreted, return NULL_TREE. */
7475
7476 static tree
7477 native_interpret_int (tree type, const unsigned char *ptr, int len)
7478 {
7479 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7480
7481 if (total_bytes > len
7482 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7483 return NULL_TREE;
7484
7485 wide_int result = wi::from_buffer (ptr, total_bytes);
7486
7487 return wide_int_to_tree (type, result);
7488 }
7489
7490
7491 /* Subroutine of native_interpret_expr. Interpret the contents of
7492 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7493 If the buffer cannot be interpreted, return NULL_TREE. */
7494
7495 static tree
7496 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7497 {
7498 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7499 double_int result;
7500 FIXED_VALUE_TYPE fixed_value;
7501
7502 if (total_bytes > len
7503 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7504 return NULL_TREE;
7505
7506 result = double_int::from_buffer (ptr, total_bytes);
7507 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7508
7509 return build_fixed (type, fixed_value);
7510 }
7511
7512
7513 /* Subroutine of native_interpret_expr. Interpret the contents of
7514 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7515 If the buffer cannot be interpreted, return NULL_TREE. */
7516
7517 static tree
7518 native_interpret_real (tree type, const unsigned char *ptr, int len)
7519 {
7520 enum machine_mode mode = TYPE_MODE (type);
7521 int total_bytes = GET_MODE_SIZE (mode);
7522 int byte, offset, word, words, bitpos;
7523 unsigned char value;
7524 /* There are always 32 bits in each long, no matter the size of
7525 the hosts long. We handle floating point representations with
7526 up to 192 bits. */
7527 REAL_VALUE_TYPE r;
7528 long tmp[6];
7529
7530 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7531 if (total_bytes > len || total_bytes > 24)
7532 return NULL_TREE;
7533 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7534
7535 memset (tmp, 0, sizeof (tmp));
7536 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7537 bitpos += BITS_PER_UNIT)
7538 {
7539 byte = (bitpos / BITS_PER_UNIT) & 3;
7540 if (UNITS_PER_WORD < 4)
7541 {
7542 word = byte / UNITS_PER_WORD;
7543 if (WORDS_BIG_ENDIAN)
7544 word = (words - 1) - word;
7545 offset = word * UNITS_PER_WORD;
7546 if (BYTES_BIG_ENDIAN)
7547 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7548 else
7549 offset += byte % UNITS_PER_WORD;
7550 }
7551 else
7552 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7553 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7554
7555 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7556 }
7557
7558 real_from_target (&r, tmp, mode);
7559 return build_real (type, r);
7560 }
7561
7562
7563 /* Subroutine of native_interpret_expr. Interpret the contents of
7564 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7565 If the buffer cannot be interpreted, return NULL_TREE. */
7566
7567 static tree
7568 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7569 {
7570 tree etype, rpart, ipart;
7571 int size;
7572
7573 etype = TREE_TYPE (type);
7574 size = GET_MODE_SIZE (TYPE_MODE (etype));
7575 if (size * 2 > len)
7576 return NULL_TREE;
7577 rpart = native_interpret_expr (etype, ptr, size);
7578 if (!rpart)
7579 return NULL_TREE;
7580 ipart = native_interpret_expr (etype, ptr+size, size);
7581 if (!ipart)
7582 return NULL_TREE;
7583 return build_complex (type, rpart, ipart);
7584 }
7585
7586
7587 /* Subroutine of native_interpret_expr. Interpret the contents of
7588 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7589 If the buffer cannot be interpreted, return NULL_TREE. */
7590
7591 static tree
7592 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7593 {
7594 tree etype, elem;
7595 int i, size, count;
7596 tree *elements;
7597
7598 etype = TREE_TYPE (type);
7599 size = GET_MODE_SIZE (TYPE_MODE (etype));
7600 count = TYPE_VECTOR_SUBPARTS (type);
7601 if (size * count > len)
7602 return NULL_TREE;
7603
7604 elements = XALLOCAVEC (tree, count);
7605 for (i = count - 1; i >= 0; i--)
7606 {
7607 elem = native_interpret_expr (etype, ptr+(i*size), size);
7608 if (!elem)
7609 return NULL_TREE;
7610 elements[i] = elem;
7611 }
7612 return build_vector (type, elements);
7613 }
7614
7615
7616 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7617 the buffer PTR of length LEN as a constant of type TYPE. For
7618 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7619 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7620 return NULL_TREE. */
7621
7622 tree
7623 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7624 {
7625 switch (TREE_CODE (type))
7626 {
7627 case INTEGER_TYPE:
7628 case ENUMERAL_TYPE:
7629 case BOOLEAN_TYPE:
7630 case POINTER_TYPE:
7631 case REFERENCE_TYPE:
7632 return native_interpret_int (type, ptr, len);
7633
7634 case REAL_TYPE:
7635 return native_interpret_real (type, ptr, len);
7636
7637 case FIXED_POINT_TYPE:
7638 return native_interpret_fixed (type, ptr, len);
7639
7640 case COMPLEX_TYPE:
7641 return native_interpret_complex (type, ptr, len);
7642
7643 case VECTOR_TYPE:
7644 return native_interpret_vector (type, ptr, len);
7645
7646 default:
7647 return NULL_TREE;
7648 }
7649 }
7650
7651 /* Returns true if we can interpret the contents of a native encoding
7652 as TYPE. */
7653
7654 static bool
7655 can_native_interpret_type_p (tree type)
7656 {
7657 switch (TREE_CODE (type))
7658 {
7659 case INTEGER_TYPE:
7660 case ENUMERAL_TYPE:
7661 case BOOLEAN_TYPE:
7662 case POINTER_TYPE:
7663 case REFERENCE_TYPE:
7664 case FIXED_POINT_TYPE:
7665 case REAL_TYPE:
7666 case COMPLEX_TYPE:
7667 case VECTOR_TYPE:
7668 return true;
7669 default:
7670 return false;
7671 }
7672 }
7673
7674 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7675 TYPE at compile-time. If we're unable to perform the conversion
7676 return NULL_TREE. */
7677
7678 static tree
7679 fold_view_convert_expr (tree type, tree expr)
7680 {
7681 /* We support up to 512-bit values (for V8DFmode). */
7682 unsigned char buffer[64];
7683 int len;
7684
7685 /* Check that the host and target are sane. */
7686 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7687 return NULL_TREE;
7688
7689 len = native_encode_expr (expr, buffer, sizeof (buffer));
7690 if (len == 0)
7691 return NULL_TREE;
7692
7693 return native_interpret_expr (type, buffer, len);
7694 }
7695
7696 /* Build an expression for the address of T. Folds away INDIRECT_REF
7697 to avoid confusing the gimplify process. */
7698
7699 tree
7700 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7701 {
7702 /* The size of the object is not relevant when talking about its address. */
7703 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7704 t = TREE_OPERAND (t, 0);
7705
7706 if (TREE_CODE (t) == INDIRECT_REF)
7707 {
7708 t = TREE_OPERAND (t, 0);
7709
7710 if (TREE_TYPE (t) != ptrtype)
7711 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7712 }
7713 else if (TREE_CODE (t) == MEM_REF
7714 && integer_zerop (TREE_OPERAND (t, 1)))
7715 return TREE_OPERAND (t, 0);
7716 else if (TREE_CODE (t) == MEM_REF
7717 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7718 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7719 TREE_OPERAND (t, 0),
7720 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7721 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7722 {
7723 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7724
7725 if (TREE_TYPE (t) != ptrtype)
7726 t = fold_convert_loc (loc, ptrtype, t);
7727 }
7728 else
7729 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7730
7731 return t;
7732 }
7733
7734 /* Build an expression for the address of T. */
7735
7736 tree
7737 build_fold_addr_expr_loc (location_t loc, tree t)
7738 {
7739 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7740
7741 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7742 }
7743
7744 static bool vec_cst_ctor_to_array (tree, tree *);
7745
7746 /* Fold a unary expression of code CODE and type TYPE with operand
7747 OP0. Return the folded expression if folding is successful.
7748 Otherwise, return NULL_TREE. */
7749
7750 tree
7751 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7752 {
7753 tree tem;
7754 tree arg0;
7755 enum tree_code_class kind = TREE_CODE_CLASS (code);
7756
7757 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7758 && TREE_CODE_LENGTH (code) == 1);
7759
7760 arg0 = op0;
7761 if (arg0)
7762 {
7763 if (CONVERT_EXPR_CODE_P (code)
7764 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7765 {
7766 /* Don't use STRIP_NOPS, because signedness of argument type
7767 matters. */
7768 STRIP_SIGN_NOPS (arg0);
7769 }
7770 else
7771 {
7772 /* Strip any conversions that don't change the mode. This
7773 is safe for every expression, except for a comparison
7774 expression because its signedness is derived from its
7775 operands.
7776
7777 Note that this is done as an internal manipulation within
7778 the constant folder, in order to find the simplest
7779 representation of the arguments so that their form can be
7780 studied. In any cases, the appropriate type conversions
7781 should be put back in the tree that will get out of the
7782 constant folder. */
7783 STRIP_NOPS (arg0);
7784 }
7785 }
7786
7787 if (TREE_CODE_CLASS (code) == tcc_unary)
7788 {
7789 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7790 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7791 fold_build1_loc (loc, code, type,
7792 fold_convert_loc (loc, TREE_TYPE (op0),
7793 TREE_OPERAND (arg0, 1))));
7794 else if (TREE_CODE (arg0) == COND_EXPR)
7795 {
7796 tree arg01 = TREE_OPERAND (arg0, 1);
7797 tree arg02 = TREE_OPERAND (arg0, 2);
7798 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7799 arg01 = fold_build1_loc (loc, code, type,
7800 fold_convert_loc (loc,
7801 TREE_TYPE (op0), arg01));
7802 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7803 arg02 = fold_build1_loc (loc, code, type,
7804 fold_convert_loc (loc,
7805 TREE_TYPE (op0), arg02));
7806 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7807 arg01, arg02);
7808
7809 /* If this was a conversion, and all we did was to move into
7810 inside the COND_EXPR, bring it back out. But leave it if
7811 it is a conversion from integer to integer and the
7812 result precision is no wider than a word since such a
7813 conversion is cheap and may be optimized away by combine,
7814 while it couldn't if it were outside the COND_EXPR. Then return
7815 so we don't get into an infinite recursion loop taking the
7816 conversion out and then back in. */
7817
7818 if ((CONVERT_EXPR_CODE_P (code)
7819 || code == NON_LVALUE_EXPR)
7820 && TREE_CODE (tem) == COND_EXPR
7821 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7822 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7823 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7824 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7825 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7826 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7827 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7828 && (INTEGRAL_TYPE_P
7829 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7830 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7831 || flag_syntax_only))
7832 tem = build1_loc (loc, code, type,
7833 build3 (COND_EXPR,
7834 TREE_TYPE (TREE_OPERAND
7835 (TREE_OPERAND (tem, 1), 0)),
7836 TREE_OPERAND (tem, 0),
7837 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7838 TREE_OPERAND (TREE_OPERAND (tem, 2),
7839 0)));
7840 return tem;
7841 }
7842 }
7843
7844 switch (code)
7845 {
7846 case PAREN_EXPR:
7847 /* Re-association barriers around constants and other re-association
7848 barriers can be removed. */
7849 if (CONSTANT_CLASS_P (op0)
7850 || TREE_CODE (op0) == PAREN_EXPR)
7851 return fold_convert_loc (loc, type, op0);
7852 return NULL_TREE;
7853
7854 case NON_LVALUE_EXPR:
7855 if (!maybe_lvalue_p (op0))
7856 return fold_convert_loc (loc, type, op0);
7857 return NULL_TREE;
7858
7859 CASE_CONVERT:
7860 case FLOAT_EXPR:
7861 case FIX_TRUNC_EXPR:
7862 if (TREE_TYPE (op0) == type)
7863 return op0;
7864
7865 if (COMPARISON_CLASS_P (op0))
7866 {
7867 /* If we have (type) (a CMP b) and type is an integral type, return
7868 new expression involving the new type. Canonicalize
7869 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7870 non-integral type.
7871 Do not fold the result as that would not simplify further, also
7872 folding again results in recursions. */
7873 if (TREE_CODE (type) == BOOLEAN_TYPE)
7874 return build2_loc (loc, TREE_CODE (op0), type,
7875 TREE_OPERAND (op0, 0),
7876 TREE_OPERAND (op0, 1));
7877 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7878 && TREE_CODE (type) != VECTOR_TYPE)
7879 return build3_loc (loc, COND_EXPR, type, op0,
7880 constant_boolean_node (true, type),
7881 constant_boolean_node (false, type));
7882 }
7883
7884 /* Handle cases of two conversions in a row. */
7885 if (CONVERT_EXPR_P (op0))
7886 {
7887 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7888 tree inter_type = TREE_TYPE (op0);
7889 int inside_int = INTEGRAL_TYPE_P (inside_type);
7890 int inside_ptr = POINTER_TYPE_P (inside_type);
7891 int inside_float = FLOAT_TYPE_P (inside_type);
7892 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7893 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7894 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7895 int inter_int = INTEGRAL_TYPE_P (inter_type);
7896 int inter_ptr = POINTER_TYPE_P (inter_type);
7897 int inter_float = FLOAT_TYPE_P (inter_type);
7898 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7899 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7900 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7901 int final_int = INTEGRAL_TYPE_P (type);
7902 int final_ptr = POINTER_TYPE_P (type);
7903 int final_float = FLOAT_TYPE_P (type);
7904 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7905 unsigned int final_prec = TYPE_PRECISION (type);
7906 int final_unsignedp = TYPE_UNSIGNED (type);
7907
7908 /* In addition to the cases of two conversions in a row
7909 handled below, if we are converting something to its own
7910 type via an object of identical or wider precision, neither
7911 conversion is needed. */
7912 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7913 && (((inter_int || inter_ptr) && final_int)
7914 || (inter_float && final_float))
7915 && inter_prec >= final_prec)
7916 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7917
7918 /* Likewise, if the intermediate and initial types are either both
7919 float or both integer, we don't need the middle conversion if the
7920 former is wider than the latter and doesn't change the signedness
7921 (for integers). Avoid this if the final type is a pointer since
7922 then we sometimes need the middle conversion. Likewise if the
7923 final type has a precision not equal to the size of its mode. */
7924 if (((inter_int && inside_int)
7925 || (inter_float && inside_float)
7926 || (inter_vec && inside_vec))
7927 && inter_prec >= inside_prec
7928 && (inter_float || inter_vec
7929 || inter_unsignedp == inside_unsignedp)
7930 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7931 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7932 && ! final_ptr
7933 && (! final_vec || inter_prec == inside_prec))
7934 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7935
7936 /* If we have a sign-extension of a zero-extended value, we can
7937 replace that by a single zero-extension. Likewise if the
7938 final conversion does not change precision we can drop the
7939 intermediate conversion. */
7940 if (inside_int && inter_int && final_int
7941 && ((inside_prec < inter_prec && inter_prec < final_prec
7942 && inside_unsignedp && !inter_unsignedp)
7943 || final_prec == inter_prec))
7944 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7945
7946 /* Two conversions in a row are not needed unless:
7947 - some conversion is floating-point (overstrict for now), or
7948 - some conversion is a vector (overstrict for now), or
7949 - the intermediate type is narrower than both initial and
7950 final, or
7951 - the intermediate type and innermost type differ in signedness,
7952 and the outermost type is wider than the intermediate, or
7953 - the initial type is a pointer type and the precisions of the
7954 intermediate and final types differ, or
7955 - the final type is a pointer type and the precisions of the
7956 initial and intermediate types differ. */
7957 if (! inside_float && ! inter_float && ! final_float
7958 && ! inside_vec && ! inter_vec && ! final_vec
7959 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7960 && ! (inside_int && inter_int
7961 && inter_unsignedp != inside_unsignedp
7962 && inter_prec < final_prec)
7963 && ((inter_unsignedp && inter_prec > inside_prec)
7964 == (final_unsignedp && final_prec > inter_prec))
7965 && ! (inside_ptr && inter_prec != final_prec)
7966 && ! (final_ptr && inside_prec != inter_prec)
7967 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7968 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7969 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7970 }
7971
7972 /* Handle (T *)&A.B.C for A being of type T and B and C
7973 living at offset zero. This occurs frequently in
7974 C++ upcasting and then accessing the base. */
7975 if (TREE_CODE (op0) == ADDR_EXPR
7976 && POINTER_TYPE_P (type)
7977 && handled_component_p (TREE_OPERAND (op0, 0)))
7978 {
7979 HOST_WIDE_INT bitsize, bitpos;
7980 tree offset;
7981 enum machine_mode mode;
7982 int unsignedp, volatilep;
7983 tree base = TREE_OPERAND (op0, 0);
7984 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7985 &mode, &unsignedp, &volatilep, false);
7986 /* If the reference was to a (constant) zero offset, we can use
7987 the address of the base if it has the same base type
7988 as the result type and the pointer type is unqualified. */
7989 if (! offset && bitpos == 0
7990 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7991 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7992 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7993 return fold_convert_loc (loc, type,
7994 build_fold_addr_expr_loc (loc, base));
7995 }
7996
7997 if (TREE_CODE (op0) == MODIFY_EXPR
7998 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7999 /* Detect assigning a bitfield. */
8000 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
8001 && DECL_BIT_FIELD
8002 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
8003 {
8004 /* Don't leave an assignment inside a conversion
8005 unless assigning a bitfield. */
8006 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
8007 /* First do the assignment, then return converted constant. */
8008 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
8009 TREE_NO_WARNING (tem) = 1;
8010 TREE_USED (tem) = 1;
8011 return tem;
8012 }
8013
8014 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8015 constants (if x has signed type, the sign bit cannot be set
8016 in c). This folds extension into the BIT_AND_EXPR.
8017 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8018 very likely don't have maximal range for their precision and this
8019 transformation effectively doesn't preserve non-maximal ranges. */
8020 if (TREE_CODE (type) == INTEGER_TYPE
8021 && TREE_CODE (op0) == BIT_AND_EXPR
8022 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
8023 {
8024 tree and_expr = op0;
8025 tree and0 = TREE_OPERAND (and_expr, 0);
8026 tree and1 = TREE_OPERAND (and_expr, 1);
8027 int change = 0;
8028
8029 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
8030 || (TYPE_PRECISION (type)
8031 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
8032 change = 1;
8033 else if (TYPE_PRECISION (TREE_TYPE (and1))
8034 <= HOST_BITS_PER_WIDE_INT
8035 && tree_fits_uhwi_p (and1))
8036 {
8037 unsigned HOST_WIDE_INT cst;
8038
8039 cst = tree_to_uhwi (and1);
8040 cst &= HOST_WIDE_INT_M1U
8041 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8042 change = (cst == 0);
8043 #ifdef LOAD_EXTEND_OP
8044 if (change
8045 && !flag_syntax_only
8046 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
8047 == ZERO_EXTEND))
8048 {
8049 tree uns = unsigned_type_for (TREE_TYPE (and0));
8050 and0 = fold_convert_loc (loc, uns, and0);
8051 and1 = fold_convert_loc (loc, uns, and1);
8052 }
8053 #endif
8054 }
8055 if (change)
8056 {
8057 tem = force_fit_type (type, wi::to_widest (and1), 0,
8058 TREE_OVERFLOW (and1));
8059 return fold_build2_loc (loc, BIT_AND_EXPR, type,
8060 fold_convert_loc (loc, type, and0), tem);
8061 }
8062 }
8063
8064 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8065 when one of the new casts will fold away. Conservatively we assume
8066 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8067 if (POINTER_TYPE_P (type)
8068 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8069 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
8070 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8071 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
8072 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
8073 {
8074 tree arg00 = TREE_OPERAND (arg0, 0);
8075 tree arg01 = TREE_OPERAND (arg0, 1);
8076
8077 return fold_build_pointer_plus_loc
8078 (loc, fold_convert_loc (loc, type, arg00), arg01);
8079 }
8080
8081 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8082 of the same precision, and X is an integer type not narrower than
8083 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8084 if (INTEGRAL_TYPE_P (type)
8085 && TREE_CODE (op0) == BIT_NOT_EXPR
8086 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8087 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8088 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8089 {
8090 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8091 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8092 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8093 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8094 fold_convert_loc (loc, type, tem));
8095 }
8096
8097 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8098 type of X and Y (integer types only). */
8099 if (INTEGRAL_TYPE_P (type)
8100 && TREE_CODE (op0) == MULT_EXPR
8101 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8102 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8103 {
8104 /* Be careful not to introduce new overflows. */
8105 tree mult_type;
8106 if (TYPE_OVERFLOW_WRAPS (type))
8107 mult_type = type;
8108 else
8109 mult_type = unsigned_type_for (type);
8110
8111 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8112 {
8113 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8114 fold_convert_loc (loc, mult_type,
8115 TREE_OPERAND (op0, 0)),
8116 fold_convert_loc (loc, mult_type,
8117 TREE_OPERAND (op0, 1)));
8118 return fold_convert_loc (loc, type, tem);
8119 }
8120 }
8121
8122 tem = fold_convert_const (code, type, arg0);
8123 return tem ? tem : NULL_TREE;
8124
8125 case ADDR_SPACE_CONVERT_EXPR:
8126 if (integer_zerop (arg0))
8127 return fold_convert_const (code, type, arg0);
8128 return NULL_TREE;
8129
8130 case FIXED_CONVERT_EXPR:
8131 tem = fold_convert_const (code, type, arg0);
8132 return tem ? tem : NULL_TREE;
8133
8134 case VIEW_CONVERT_EXPR:
8135 if (TREE_TYPE (op0) == type)
8136 return op0;
8137 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8138 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8139 type, TREE_OPERAND (op0, 0));
8140 if (TREE_CODE (op0) == MEM_REF)
8141 return fold_build2_loc (loc, MEM_REF, type,
8142 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8143
8144 /* For integral conversions with the same precision or pointer
8145 conversions use a NOP_EXPR instead. */
8146 if ((INTEGRAL_TYPE_P (type)
8147 || POINTER_TYPE_P (type))
8148 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8149 || POINTER_TYPE_P (TREE_TYPE (op0)))
8150 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8151 return fold_convert_loc (loc, type, op0);
8152
8153 /* Strip inner integral conversions that do not change the precision. */
8154 if (CONVERT_EXPR_P (op0)
8155 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8156 || POINTER_TYPE_P (TREE_TYPE (op0)))
8157 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8158 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8159 && (TYPE_PRECISION (TREE_TYPE (op0))
8160 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8161 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8162 type, TREE_OPERAND (op0, 0));
8163
8164 return fold_view_convert_expr (type, op0);
8165
8166 case NEGATE_EXPR:
8167 tem = fold_negate_expr (loc, arg0);
8168 if (tem)
8169 return fold_convert_loc (loc, type, tem);
8170 return NULL_TREE;
8171
8172 case ABS_EXPR:
8173 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8174 return fold_abs_const (arg0, type);
8175 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8176 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8177 /* Convert fabs((double)float) into (double)fabsf(float). */
8178 else if (TREE_CODE (arg0) == NOP_EXPR
8179 && TREE_CODE (type) == REAL_TYPE)
8180 {
8181 tree targ0 = strip_float_extensions (arg0);
8182 if (targ0 != arg0)
8183 return fold_convert_loc (loc, type,
8184 fold_build1_loc (loc, ABS_EXPR,
8185 TREE_TYPE (targ0),
8186 targ0));
8187 }
8188 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8189 else if (TREE_CODE (arg0) == ABS_EXPR)
8190 return arg0;
8191 else if (tree_expr_nonnegative_p (arg0))
8192 return arg0;
8193
8194 /* Strip sign ops from argument. */
8195 if (TREE_CODE (type) == REAL_TYPE)
8196 {
8197 tem = fold_strip_sign_ops (arg0);
8198 if (tem)
8199 return fold_build1_loc (loc, ABS_EXPR, type,
8200 fold_convert_loc (loc, type, tem));
8201 }
8202 return NULL_TREE;
8203
8204 case CONJ_EXPR:
8205 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8206 return fold_convert_loc (loc, type, arg0);
8207 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8208 {
8209 tree itype = TREE_TYPE (type);
8210 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8211 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8212 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8213 negate_expr (ipart));
8214 }
8215 if (TREE_CODE (arg0) == COMPLEX_CST)
8216 {
8217 tree itype = TREE_TYPE (type);
8218 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8219 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8220 return build_complex (type, rpart, negate_expr (ipart));
8221 }
8222 if (TREE_CODE (arg0) == CONJ_EXPR)
8223 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8224 return NULL_TREE;
8225
8226 case BIT_NOT_EXPR:
8227 if (TREE_CODE (arg0) == INTEGER_CST)
8228 return fold_not_const (arg0, type);
8229 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8230 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8231 /* Convert ~ (-A) to A - 1. */
8232 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8233 return fold_build2_loc (loc, MINUS_EXPR, type,
8234 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8235 build_int_cst (type, 1));
8236 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8237 else if (INTEGRAL_TYPE_P (type)
8238 && ((TREE_CODE (arg0) == MINUS_EXPR
8239 && integer_onep (TREE_OPERAND (arg0, 1)))
8240 || (TREE_CODE (arg0) == PLUS_EXPR
8241 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8242 return fold_build1_loc (loc, NEGATE_EXPR, type,
8243 fold_convert_loc (loc, type,
8244 TREE_OPERAND (arg0, 0)));
8245 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8246 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8247 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8248 fold_convert_loc (loc, type,
8249 TREE_OPERAND (arg0, 0)))))
8250 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8251 fold_convert_loc (loc, type,
8252 TREE_OPERAND (arg0, 1)));
8253 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8254 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8255 fold_convert_loc (loc, type,
8256 TREE_OPERAND (arg0, 1)))))
8257 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8258 fold_convert_loc (loc, type,
8259 TREE_OPERAND (arg0, 0)), tem);
8260 /* Perform BIT_NOT_EXPR on each element individually. */
8261 else if (TREE_CODE (arg0) == VECTOR_CST)
8262 {
8263 tree *elements;
8264 tree elem;
8265 unsigned count = VECTOR_CST_NELTS (arg0), i;
8266
8267 elements = XALLOCAVEC (tree, count);
8268 for (i = 0; i < count; i++)
8269 {
8270 elem = VECTOR_CST_ELT (arg0, i);
8271 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8272 if (elem == NULL_TREE)
8273 break;
8274 elements[i] = elem;
8275 }
8276 if (i == count)
8277 return build_vector (type, elements);
8278 }
8279 else if (COMPARISON_CLASS_P (arg0)
8280 && (VECTOR_TYPE_P (type)
8281 || (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) == 1)))
8282 {
8283 tree op_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
8284 enum tree_code subcode = invert_tree_comparison (TREE_CODE (arg0),
8285 HONOR_NANS (TYPE_MODE (op_type)));
8286 if (subcode != ERROR_MARK)
8287 return build2_loc (loc, subcode, type, TREE_OPERAND (arg0, 0),
8288 TREE_OPERAND (arg0, 1));
8289 }
8290
8291
8292 return NULL_TREE;
8293
8294 case TRUTH_NOT_EXPR:
8295 /* Note that the operand of this must be an int
8296 and its values must be 0 or 1.
8297 ("true" is a fixed value perhaps depending on the language,
8298 but we don't handle values other than 1 correctly yet.) */
8299 tem = fold_truth_not_expr (loc, arg0);
8300 if (!tem)
8301 return NULL_TREE;
8302 return fold_convert_loc (loc, type, tem);
8303
8304 case REALPART_EXPR:
8305 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8306 return fold_convert_loc (loc, type, arg0);
8307 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8308 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8309 TREE_OPERAND (arg0, 1));
8310 if (TREE_CODE (arg0) == COMPLEX_CST)
8311 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8312 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8313 {
8314 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8315 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8316 fold_build1_loc (loc, REALPART_EXPR, itype,
8317 TREE_OPERAND (arg0, 0)),
8318 fold_build1_loc (loc, REALPART_EXPR, itype,
8319 TREE_OPERAND (arg0, 1)));
8320 return fold_convert_loc (loc, type, tem);
8321 }
8322 if (TREE_CODE (arg0) == CONJ_EXPR)
8323 {
8324 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8325 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8326 TREE_OPERAND (arg0, 0));
8327 return fold_convert_loc (loc, type, tem);
8328 }
8329 if (TREE_CODE (arg0) == CALL_EXPR)
8330 {
8331 tree fn = get_callee_fndecl (arg0);
8332 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8333 switch (DECL_FUNCTION_CODE (fn))
8334 {
8335 CASE_FLT_FN (BUILT_IN_CEXPI):
8336 fn = mathfn_built_in (type, BUILT_IN_COS);
8337 if (fn)
8338 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8339 break;
8340
8341 default:
8342 break;
8343 }
8344 }
8345 return NULL_TREE;
8346
8347 case IMAGPART_EXPR:
8348 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8349 return build_zero_cst (type);
8350 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8351 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8352 TREE_OPERAND (arg0, 0));
8353 if (TREE_CODE (arg0) == COMPLEX_CST)
8354 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8355 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8356 {
8357 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8358 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8359 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8360 TREE_OPERAND (arg0, 0)),
8361 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8362 TREE_OPERAND (arg0, 1)));
8363 return fold_convert_loc (loc, type, tem);
8364 }
8365 if (TREE_CODE (arg0) == CONJ_EXPR)
8366 {
8367 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8368 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8369 return fold_convert_loc (loc, type, negate_expr (tem));
8370 }
8371 if (TREE_CODE (arg0) == CALL_EXPR)
8372 {
8373 tree fn = get_callee_fndecl (arg0);
8374 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8375 switch (DECL_FUNCTION_CODE (fn))
8376 {
8377 CASE_FLT_FN (BUILT_IN_CEXPI):
8378 fn = mathfn_built_in (type, BUILT_IN_SIN);
8379 if (fn)
8380 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8381 break;
8382
8383 default:
8384 break;
8385 }
8386 }
8387 return NULL_TREE;
8388
8389 case INDIRECT_REF:
8390 /* Fold *&X to X if X is an lvalue. */
8391 if (TREE_CODE (op0) == ADDR_EXPR)
8392 {
8393 tree op00 = TREE_OPERAND (op0, 0);
8394 if ((TREE_CODE (op00) == VAR_DECL
8395 || TREE_CODE (op00) == PARM_DECL
8396 || TREE_CODE (op00) == RESULT_DECL)
8397 && !TREE_READONLY (op00))
8398 return op00;
8399 }
8400 return NULL_TREE;
8401
8402 case VEC_UNPACK_LO_EXPR:
8403 case VEC_UNPACK_HI_EXPR:
8404 case VEC_UNPACK_FLOAT_LO_EXPR:
8405 case VEC_UNPACK_FLOAT_HI_EXPR:
8406 {
8407 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8408 tree *elts;
8409 enum tree_code subcode;
8410
8411 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
8412 if (TREE_CODE (arg0) != VECTOR_CST)
8413 return NULL_TREE;
8414
8415 elts = XALLOCAVEC (tree, nelts * 2);
8416 if (!vec_cst_ctor_to_array (arg0, elts))
8417 return NULL_TREE;
8418
8419 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
8420 || code == VEC_UNPACK_FLOAT_LO_EXPR))
8421 elts += nelts;
8422
8423 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
8424 subcode = NOP_EXPR;
8425 else
8426 subcode = FLOAT_EXPR;
8427
8428 for (i = 0; i < nelts; i++)
8429 {
8430 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
8431 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
8432 return NULL_TREE;
8433 }
8434
8435 return build_vector (type, elts);
8436 }
8437
8438 case REDUC_MIN_EXPR:
8439 case REDUC_MAX_EXPR:
8440 case REDUC_PLUS_EXPR:
8441 {
8442 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8443 tree *elts;
8444 enum tree_code subcode;
8445
8446 if (TREE_CODE (op0) != VECTOR_CST)
8447 return NULL_TREE;
8448
8449 elts = XALLOCAVEC (tree, nelts);
8450 if (!vec_cst_ctor_to_array (op0, elts))
8451 return NULL_TREE;
8452
8453 switch (code)
8454 {
8455 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
8456 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
8457 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
8458 default: gcc_unreachable ();
8459 }
8460
8461 for (i = 1; i < nelts; i++)
8462 {
8463 elts[0] = const_binop (subcode, elts[0], elts[i]);
8464 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
8465 return NULL_TREE;
8466 elts[i] = build_zero_cst (TREE_TYPE (type));
8467 }
8468
8469 return build_vector (type, elts);
8470 }
8471
8472 default:
8473 return NULL_TREE;
8474 } /* switch (code) */
8475 }
8476
8477
8478 /* If the operation was a conversion do _not_ mark a resulting constant
8479 with TREE_OVERFLOW if the original constant was not. These conversions
8480 have implementation defined behavior and retaining the TREE_OVERFLOW
8481 flag here would confuse later passes such as VRP. */
8482 tree
8483 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8484 tree type, tree op0)
8485 {
8486 tree res = fold_unary_loc (loc, code, type, op0);
8487 if (res
8488 && TREE_CODE (res) == INTEGER_CST
8489 && TREE_CODE (op0) == INTEGER_CST
8490 && CONVERT_EXPR_CODE_P (code))
8491 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8492
8493 return res;
8494 }
8495
8496 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8497 operands OP0 and OP1. LOC is the location of the resulting expression.
8498 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8499 Return the folded expression if folding is successful. Otherwise,
8500 return NULL_TREE. */
8501 static tree
8502 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8503 tree arg0, tree arg1, tree op0, tree op1)
8504 {
8505 tree tem;
8506
8507 /* We only do these simplifications if we are optimizing. */
8508 if (!optimize)
8509 return NULL_TREE;
8510
8511 /* Check for things like (A || B) && (A || C). We can convert this
8512 to A || (B && C). Note that either operator can be any of the four
8513 truth and/or operations and the transformation will still be
8514 valid. Also note that we only care about order for the
8515 ANDIF and ORIF operators. If B contains side effects, this
8516 might change the truth-value of A. */
8517 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8518 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8519 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8520 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8521 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8522 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8523 {
8524 tree a00 = TREE_OPERAND (arg0, 0);
8525 tree a01 = TREE_OPERAND (arg0, 1);
8526 tree a10 = TREE_OPERAND (arg1, 0);
8527 tree a11 = TREE_OPERAND (arg1, 1);
8528 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8529 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8530 && (code == TRUTH_AND_EXPR
8531 || code == TRUTH_OR_EXPR));
8532
8533 if (operand_equal_p (a00, a10, 0))
8534 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8535 fold_build2_loc (loc, code, type, a01, a11));
8536 else if (commutative && operand_equal_p (a00, a11, 0))
8537 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8538 fold_build2_loc (loc, code, type, a01, a10));
8539 else if (commutative && operand_equal_p (a01, a10, 0))
8540 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8541 fold_build2_loc (loc, code, type, a00, a11));
8542
8543 /* This case if tricky because we must either have commutative
8544 operators or else A10 must not have side-effects. */
8545
8546 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8547 && operand_equal_p (a01, a11, 0))
8548 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8549 fold_build2_loc (loc, code, type, a00, a10),
8550 a01);
8551 }
8552
8553 /* See if we can build a range comparison. */
8554 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8555 return tem;
8556
8557 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8558 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8559 {
8560 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8561 if (tem)
8562 return fold_build2_loc (loc, code, type, tem, arg1);
8563 }
8564
8565 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8566 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8567 {
8568 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8569 if (tem)
8570 return fold_build2_loc (loc, code, type, arg0, tem);
8571 }
8572
8573 /* Check for the possibility of merging component references. If our
8574 lhs is another similar operation, try to merge its rhs with our
8575 rhs. Then try to merge our lhs and rhs. */
8576 if (TREE_CODE (arg0) == code
8577 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8578 TREE_OPERAND (arg0, 1), arg1)))
8579 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8580
8581 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8582 return tem;
8583
8584 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8585 && (code == TRUTH_AND_EXPR
8586 || code == TRUTH_ANDIF_EXPR
8587 || code == TRUTH_OR_EXPR
8588 || code == TRUTH_ORIF_EXPR))
8589 {
8590 enum tree_code ncode, icode;
8591
8592 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8593 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8594 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8595
8596 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8597 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8598 We don't want to pack more than two leafs to a non-IF AND/OR
8599 expression.
8600 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8601 equal to IF-CODE, then we don't want to add right-hand operand.
8602 If the inner right-hand side of left-hand operand has
8603 side-effects, or isn't simple, then we can't add to it,
8604 as otherwise we might destroy if-sequence. */
8605 if (TREE_CODE (arg0) == icode
8606 && simple_operand_p_2 (arg1)
8607 /* Needed for sequence points to handle trappings, and
8608 side-effects. */
8609 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8610 {
8611 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8612 arg1);
8613 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8614 tem);
8615 }
8616 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8617 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8618 else if (TREE_CODE (arg1) == icode
8619 && simple_operand_p_2 (arg0)
8620 /* Needed for sequence points to handle trappings, and
8621 side-effects. */
8622 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8623 {
8624 tem = fold_build2_loc (loc, ncode, type,
8625 arg0, TREE_OPERAND (arg1, 0));
8626 return fold_build2_loc (loc, icode, type, tem,
8627 TREE_OPERAND (arg1, 1));
8628 }
8629 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8630 into (A OR B).
8631 For sequence point consistancy, we need to check for trapping,
8632 and side-effects. */
8633 else if (code == icode && simple_operand_p_2 (arg0)
8634 && simple_operand_p_2 (arg1))
8635 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8636 }
8637
8638 return NULL_TREE;
8639 }
8640
8641 /* Fold a binary expression of code CODE and type TYPE with operands
8642 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8643 Return the folded expression if folding is successful. Otherwise,
8644 return NULL_TREE. */
8645
8646 static tree
8647 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8648 {
8649 enum tree_code compl_code;
8650
8651 if (code == MIN_EXPR)
8652 compl_code = MAX_EXPR;
8653 else if (code == MAX_EXPR)
8654 compl_code = MIN_EXPR;
8655 else
8656 gcc_unreachable ();
8657
8658 /* MIN (MAX (a, b), b) == b. */
8659 if (TREE_CODE (op0) == compl_code
8660 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8661 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8662
8663 /* MIN (MAX (b, a), b) == b. */
8664 if (TREE_CODE (op0) == compl_code
8665 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8666 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8667 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8668
8669 /* MIN (a, MAX (a, b)) == a. */
8670 if (TREE_CODE (op1) == compl_code
8671 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8672 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8673 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8674
8675 /* MIN (a, MAX (b, a)) == a. */
8676 if (TREE_CODE (op1) == compl_code
8677 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8678 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8679 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8680
8681 return NULL_TREE;
8682 }
8683
8684 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8685 by changing CODE to reduce the magnitude of constants involved in
8686 ARG0 of the comparison.
8687 Returns a canonicalized comparison tree if a simplification was
8688 possible, otherwise returns NULL_TREE.
8689 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8690 valid if signed overflow is undefined. */
8691
8692 static tree
8693 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8694 tree arg0, tree arg1,
8695 bool *strict_overflow_p)
8696 {
8697 enum tree_code code0 = TREE_CODE (arg0);
8698 tree t, cst0 = NULL_TREE;
8699 int sgn0;
8700 bool swap = false;
8701
8702 /* Match A +- CST code arg1 and CST code arg1. We can change the
8703 first form only if overflow is undefined. */
8704 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8705 /* In principle pointers also have undefined overflow behavior,
8706 but that causes problems elsewhere. */
8707 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8708 && (code0 == MINUS_EXPR
8709 || code0 == PLUS_EXPR)
8710 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8711 || code0 == INTEGER_CST))
8712 return NULL_TREE;
8713
8714 /* Identify the constant in arg0 and its sign. */
8715 if (code0 == INTEGER_CST)
8716 cst0 = arg0;
8717 else
8718 cst0 = TREE_OPERAND (arg0, 1);
8719 sgn0 = tree_int_cst_sgn (cst0);
8720
8721 /* Overflowed constants and zero will cause problems. */
8722 if (integer_zerop (cst0)
8723 || TREE_OVERFLOW (cst0))
8724 return NULL_TREE;
8725
8726 /* See if we can reduce the magnitude of the constant in
8727 arg0 by changing the comparison code. */
8728 if (code0 == INTEGER_CST)
8729 {
8730 /* CST <= arg1 -> CST-1 < arg1. */
8731 if (code == LE_EXPR && sgn0 == 1)
8732 code = LT_EXPR;
8733 /* -CST < arg1 -> -CST-1 <= arg1. */
8734 else if (code == LT_EXPR && sgn0 == -1)
8735 code = LE_EXPR;
8736 /* CST > arg1 -> CST-1 >= arg1. */
8737 else if (code == GT_EXPR && sgn0 == 1)
8738 code = GE_EXPR;
8739 /* -CST >= arg1 -> -CST-1 > arg1. */
8740 else if (code == GE_EXPR && sgn0 == -1)
8741 code = GT_EXPR;
8742 else
8743 return NULL_TREE;
8744 /* arg1 code' CST' might be more canonical. */
8745 swap = true;
8746 }
8747 else
8748 {
8749 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8750 if (code == LT_EXPR
8751 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8752 code = LE_EXPR;
8753 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8754 else if (code == GT_EXPR
8755 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8756 code = GE_EXPR;
8757 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8758 else if (code == LE_EXPR
8759 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8760 code = LT_EXPR;
8761 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8762 else if (code == GE_EXPR
8763 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8764 code = GT_EXPR;
8765 else
8766 return NULL_TREE;
8767 *strict_overflow_p = true;
8768 }
8769
8770 /* Now build the constant reduced in magnitude. But not if that
8771 would produce one outside of its types range. */
8772 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8773 && ((sgn0 == 1
8774 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8775 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8776 || (sgn0 == -1
8777 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8778 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8779 /* We cannot swap the comparison here as that would cause us to
8780 endlessly recurse. */
8781 return NULL_TREE;
8782
8783 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8784 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8785 if (code0 != INTEGER_CST)
8786 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8787 t = fold_convert (TREE_TYPE (arg1), t);
8788
8789 /* If swapping might yield to a more canonical form, do so. */
8790 if (swap)
8791 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8792 else
8793 return fold_build2_loc (loc, code, type, t, arg1);
8794 }
8795
8796 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8797 overflow further. Try to decrease the magnitude of constants involved
8798 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8799 and put sole constants at the second argument position.
8800 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8801
8802 static tree
8803 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8804 tree arg0, tree arg1)
8805 {
8806 tree t;
8807 bool strict_overflow_p;
8808 const char * const warnmsg = G_("assuming signed overflow does not occur "
8809 "when reducing constant in comparison");
8810
8811 /* Try canonicalization by simplifying arg0. */
8812 strict_overflow_p = false;
8813 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8814 &strict_overflow_p);
8815 if (t)
8816 {
8817 if (strict_overflow_p)
8818 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8819 return t;
8820 }
8821
8822 /* Try canonicalization by simplifying arg1 using the swapped
8823 comparison. */
8824 code = swap_tree_comparison (code);
8825 strict_overflow_p = false;
8826 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8827 &strict_overflow_p);
8828 if (t && strict_overflow_p)
8829 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8830 return t;
8831 }
8832
8833 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8834 space. This is used to avoid issuing overflow warnings for
8835 expressions like &p->x which can not wrap. */
8836
8837 static bool
8838 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8839 {
8840 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8841 return true;
8842
8843 if (bitpos < 0)
8844 return true;
8845
8846 wide_int wi_offset;
8847 int precision = TYPE_PRECISION (TREE_TYPE (base));
8848 if (offset == NULL_TREE)
8849 wi_offset = wi::zero (precision);
8850 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8851 return true;
8852 else
8853 wi_offset = offset;
8854
8855 bool overflow;
8856 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8857 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8858 if (overflow)
8859 return true;
8860
8861 if (!wi::fits_uhwi_p (total))
8862 return true;
8863
8864 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8865 if (size <= 0)
8866 return true;
8867
8868 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8869 array. */
8870 if (TREE_CODE (base) == ADDR_EXPR)
8871 {
8872 HOST_WIDE_INT base_size;
8873
8874 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8875 if (base_size > 0 && size < base_size)
8876 size = base_size;
8877 }
8878
8879 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8880 }
8881
8882 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8883 kind INTEGER_CST. This makes sure to properly sign-extend the
8884 constant. */
8885
8886 static HOST_WIDE_INT
8887 size_low_cst (const_tree t)
8888 {
8889 HOST_WIDE_INT w = TREE_INT_CST_ELT (t, 0);
8890 int prec = TYPE_PRECISION (TREE_TYPE (t));
8891 if (prec < HOST_BITS_PER_WIDE_INT)
8892 return sext_hwi (w, prec);
8893 return w;
8894 }
8895
8896 /* Subroutine of fold_binary. This routine performs all of the
8897 transformations that are common to the equality/inequality
8898 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8899 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8900 fold_binary should call fold_binary. Fold a comparison with
8901 tree code CODE and type TYPE with operands OP0 and OP1. Return
8902 the folded comparison or NULL_TREE. */
8903
8904 static tree
8905 fold_comparison (location_t loc, enum tree_code code, tree type,
8906 tree op0, tree op1)
8907 {
8908 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8909 tree arg0, arg1, tem;
8910
8911 arg0 = op0;
8912 arg1 = op1;
8913
8914 STRIP_SIGN_NOPS (arg0);
8915 STRIP_SIGN_NOPS (arg1);
8916
8917 tem = fold_relational_const (code, type, arg0, arg1);
8918 if (tem != NULL_TREE)
8919 return tem;
8920
8921 /* If one arg is a real or integer constant, put it last. */
8922 if (tree_swap_operands_p (arg0, arg1, true))
8923 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
8924
8925 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
8926 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8927 && (equality_code || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8928 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8929 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8930 && TREE_CODE (arg1) == INTEGER_CST
8931 && !TREE_OVERFLOW (arg1))
8932 {
8933 const enum tree_code
8934 reverse_op = TREE_CODE (arg0) == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR;
8935 tree const1 = TREE_OPERAND (arg0, 1);
8936 tree const2 = fold_convert_loc (loc, TREE_TYPE (const1), arg1);
8937 tree variable = TREE_OPERAND (arg0, 0);
8938 tree new_const = int_const_binop (reverse_op, const2, const1);
8939
8940 /* If the constant operation overflowed this can be
8941 simplified as a comparison against INT_MAX/INT_MIN. */
8942 if (TREE_OVERFLOW (new_const))
8943 {
8944 int const1_sgn = tree_int_cst_sgn (const1);
8945 enum tree_code code2 = code;
8946
8947 /* Get the sign of the constant on the lhs if the
8948 operation were VARIABLE + CONST1. */
8949 if (TREE_CODE (arg0) == MINUS_EXPR)
8950 const1_sgn = -const1_sgn;
8951
8952 /* The sign of the constant determines if we overflowed
8953 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8954 Canonicalize to the INT_MIN overflow by swapping the comparison
8955 if necessary. */
8956 if (const1_sgn == -1)
8957 code2 = swap_tree_comparison (code);
8958
8959 /* We now can look at the canonicalized case
8960 VARIABLE + 1 CODE2 INT_MIN
8961 and decide on the result. */
8962 switch (code2)
8963 {
8964 case EQ_EXPR:
8965 case LT_EXPR:
8966 case LE_EXPR:
8967 return
8968 omit_one_operand_loc (loc, type, boolean_false_node, variable);
8969
8970 case NE_EXPR:
8971 case GE_EXPR:
8972 case GT_EXPR:
8973 return
8974 omit_one_operand_loc (loc, type, boolean_true_node, variable);
8975
8976 default:
8977 gcc_unreachable ();
8978 }
8979 }
8980 else
8981 {
8982 if (!equality_code)
8983 fold_overflow_warning ("assuming signed overflow does not occur "
8984 "when changing X +- C1 cmp C2 to "
8985 "X cmp C2 -+ C1",
8986 WARN_STRICT_OVERFLOW_COMPARISON);
8987 return fold_build2_loc (loc, code, type, variable, new_const);
8988 }
8989 }
8990
8991 /* Transform comparisons of the form X - Y CMP 0 to X CMP Y. */
8992 if (TREE_CODE (arg0) == MINUS_EXPR
8993 && (equality_code || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8994 && integer_zerop (arg1))
8995 {
8996 if (!equality_code)
8997 fold_overflow_warning ("assuming signed overflow does not occur "
8998 "when changing X - Y cmp 0 to X cmp Y",
8999 WARN_STRICT_OVERFLOW_COMPARISON);
9000 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
9001 TREE_OPERAND (arg0, 1));
9002 }
9003
9004 /* For comparisons of pointers we can decompose it to a compile time
9005 comparison of the base objects and the offsets into the object.
9006 This requires at least one operand being an ADDR_EXPR or a
9007 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
9008 if (POINTER_TYPE_P (TREE_TYPE (arg0))
9009 && (TREE_CODE (arg0) == ADDR_EXPR
9010 || TREE_CODE (arg1) == ADDR_EXPR
9011 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
9012 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
9013 {
9014 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
9015 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
9016 enum machine_mode mode;
9017 int volatilep, unsignedp;
9018 bool indirect_base0 = false, indirect_base1 = false;
9019
9020 /* Get base and offset for the access. Strip ADDR_EXPR for
9021 get_inner_reference, but put it back by stripping INDIRECT_REF
9022 off the base object if possible. indirect_baseN will be true
9023 if baseN is not an address but refers to the object itself. */
9024 base0 = arg0;
9025 if (TREE_CODE (arg0) == ADDR_EXPR)
9026 {
9027 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
9028 &bitsize, &bitpos0, &offset0, &mode,
9029 &unsignedp, &volatilep, false);
9030 if (TREE_CODE (base0) == INDIRECT_REF)
9031 base0 = TREE_OPERAND (base0, 0);
9032 else
9033 indirect_base0 = true;
9034 }
9035 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9036 {
9037 base0 = TREE_OPERAND (arg0, 0);
9038 STRIP_SIGN_NOPS (base0);
9039 if (TREE_CODE (base0) == ADDR_EXPR)
9040 {
9041 base0 = TREE_OPERAND (base0, 0);
9042 indirect_base0 = true;
9043 }
9044 offset0 = TREE_OPERAND (arg0, 1);
9045 if (tree_fits_shwi_p (offset0))
9046 {
9047 HOST_WIDE_INT off = size_low_cst (offset0);
9048 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
9049 * BITS_PER_UNIT)
9050 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
9051 {
9052 bitpos0 = off * BITS_PER_UNIT;
9053 offset0 = NULL_TREE;
9054 }
9055 }
9056 }
9057
9058 base1 = arg1;
9059 if (TREE_CODE (arg1) == ADDR_EXPR)
9060 {
9061 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
9062 &bitsize, &bitpos1, &offset1, &mode,
9063 &unsignedp, &volatilep, false);
9064 if (TREE_CODE (base1) == INDIRECT_REF)
9065 base1 = TREE_OPERAND (base1, 0);
9066 else
9067 indirect_base1 = true;
9068 }
9069 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9070 {
9071 base1 = TREE_OPERAND (arg1, 0);
9072 STRIP_SIGN_NOPS (base1);
9073 if (TREE_CODE (base1) == ADDR_EXPR)
9074 {
9075 base1 = TREE_OPERAND (base1, 0);
9076 indirect_base1 = true;
9077 }
9078 offset1 = TREE_OPERAND (arg1, 1);
9079 if (tree_fits_shwi_p (offset1))
9080 {
9081 HOST_WIDE_INT off = size_low_cst (offset1);
9082 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
9083 * BITS_PER_UNIT)
9084 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
9085 {
9086 bitpos1 = off * BITS_PER_UNIT;
9087 offset1 = NULL_TREE;
9088 }
9089 }
9090 }
9091
9092 /* A local variable can never be pointed to by
9093 the default SSA name of an incoming parameter. */
9094 if ((TREE_CODE (arg0) == ADDR_EXPR
9095 && indirect_base0
9096 && TREE_CODE (base0) == VAR_DECL
9097 && auto_var_in_fn_p (base0, current_function_decl)
9098 && !indirect_base1
9099 && TREE_CODE (base1) == SSA_NAME
9100 && SSA_NAME_IS_DEFAULT_DEF (base1)
9101 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
9102 || (TREE_CODE (arg1) == ADDR_EXPR
9103 && indirect_base1
9104 && TREE_CODE (base1) == VAR_DECL
9105 && auto_var_in_fn_p (base1, current_function_decl)
9106 && !indirect_base0
9107 && TREE_CODE (base0) == SSA_NAME
9108 && SSA_NAME_IS_DEFAULT_DEF (base0)
9109 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
9110 {
9111 if (code == NE_EXPR)
9112 return constant_boolean_node (1, type);
9113 else if (code == EQ_EXPR)
9114 return constant_boolean_node (0, type);
9115 }
9116 /* If we have equivalent bases we might be able to simplify. */
9117 else if (indirect_base0 == indirect_base1
9118 && operand_equal_p (base0, base1, 0))
9119 {
9120 /* We can fold this expression to a constant if the non-constant
9121 offset parts are equal. */
9122 if ((offset0 == offset1
9123 || (offset0 && offset1
9124 && operand_equal_p (offset0, offset1, 0)))
9125 && (code == EQ_EXPR
9126 || code == NE_EXPR
9127 || (indirect_base0 && DECL_P (base0))
9128 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9129
9130 {
9131 if (!equality_code
9132 && bitpos0 != bitpos1
9133 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9134 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9135 fold_overflow_warning (("assuming pointer wraparound does not "
9136 "occur when comparing P +- C1 with "
9137 "P +- C2"),
9138 WARN_STRICT_OVERFLOW_CONDITIONAL);
9139
9140 switch (code)
9141 {
9142 case EQ_EXPR:
9143 return constant_boolean_node (bitpos0 == bitpos1, type);
9144 case NE_EXPR:
9145 return constant_boolean_node (bitpos0 != bitpos1, type);
9146 case LT_EXPR:
9147 return constant_boolean_node (bitpos0 < bitpos1, type);
9148 case LE_EXPR:
9149 return constant_boolean_node (bitpos0 <= bitpos1, type);
9150 case GE_EXPR:
9151 return constant_boolean_node (bitpos0 >= bitpos1, type);
9152 case GT_EXPR:
9153 return constant_boolean_node (bitpos0 > bitpos1, type);
9154 default:;
9155 }
9156 }
9157 /* We can simplify the comparison to a comparison of the variable
9158 offset parts if the constant offset parts are equal.
9159 Be careful to use signed sizetype here because otherwise we
9160 mess with array offsets in the wrong way. This is possible
9161 because pointer arithmetic is restricted to retain within an
9162 object and overflow on pointer differences is undefined as of
9163 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9164 else if (bitpos0 == bitpos1
9165 && (equality_code
9166 || (indirect_base0 && DECL_P (base0))
9167 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9168 {
9169 /* By converting to signed sizetype we cover middle-end pointer
9170 arithmetic which operates on unsigned pointer types of size
9171 type size and ARRAY_REF offsets which are properly sign or
9172 zero extended from their type in case it is narrower than
9173 sizetype. */
9174 if (offset0 == NULL_TREE)
9175 offset0 = build_int_cst (ssizetype, 0);
9176 else
9177 offset0 = fold_convert_loc (loc, ssizetype, offset0);
9178 if (offset1 == NULL_TREE)
9179 offset1 = build_int_cst (ssizetype, 0);
9180 else
9181 offset1 = fold_convert_loc (loc, ssizetype, offset1);
9182
9183 if (!equality_code
9184 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9185 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9186 fold_overflow_warning (("assuming pointer wraparound does not "
9187 "occur when comparing P +- C1 with "
9188 "P +- C2"),
9189 WARN_STRICT_OVERFLOW_COMPARISON);
9190
9191 return fold_build2_loc (loc, code, type, offset0, offset1);
9192 }
9193 }
9194 /* For non-equal bases we can simplify if they are addresses
9195 of local binding decls or constants. */
9196 else if (indirect_base0 && indirect_base1
9197 /* We know that !operand_equal_p (base0, base1, 0)
9198 because the if condition was false. But make
9199 sure two decls are not the same. */
9200 && base0 != base1
9201 && TREE_CODE (arg0) == ADDR_EXPR
9202 && TREE_CODE (arg1) == ADDR_EXPR
9203 && (((TREE_CODE (base0) == VAR_DECL
9204 || TREE_CODE (base0) == PARM_DECL)
9205 && (targetm.binds_local_p (base0)
9206 || CONSTANT_CLASS_P (base1)))
9207 || CONSTANT_CLASS_P (base0))
9208 && (((TREE_CODE (base1) == VAR_DECL
9209 || TREE_CODE (base1) == PARM_DECL)
9210 && (targetm.binds_local_p (base1)
9211 || CONSTANT_CLASS_P (base0)))
9212 || CONSTANT_CLASS_P (base1)))
9213 {
9214 if (code == EQ_EXPR)
9215 return omit_two_operands_loc (loc, type, boolean_false_node,
9216 arg0, arg1);
9217 else if (code == NE_EXPR)
9218 return omit_two_operands_loc (loc, type, boolean_true_node,
9219 arg0, arg1);
9220 }
9221 /* For equal offsets we can simplify to a comparison of the
9222 base addresses. */
9223 else if (bitpos0 == bitpos1
9224 && (indirect_base0
9225 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9226 && (indirect_base1
9227 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9228 && ((offset0 == offset1)
9229 || (offset0 && offset1
9230 && operand_equal_p (offset0, offset1, 0))))
9231 {
9232 if (indirect_base0)
9233 base0 = build_fold_addr_expr_loc (loc, base0);
9234 if (indirect_base1)
9235 base1 = build_fold_addr_expr_loc (loc, base1);
9236 return fold_build2_loc (loc, code, type, base0, base1);
9237 }
9238 }
9239
9240 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9241 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9242 the resulting offset is smaller in absolute value than the
9243 original one and has the same sign. */
9244 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9245 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9246 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9247 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9248 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9249 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9250 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9251 {
9252 tree const1 = TREE_OPERAND (arg0, 1);
9253 tree const2 = TREE_OPERAND (arg1, 1);
9254 tree variable1 = TREE_OPERAND (arg0, 0);
9255 tree variable2 = TREE_OPERAND (arg1, 0);
9256 tree cst;
9257 const char * const warnmsg = G_("assuming signed overflow does not "
9258 "occur when combining constants around "
9259 "a comparison");
9260
9261 /* Put the constant on the side where it doesn't overflow and is
9262 of lower absolute value and of same sign than before. */
9263 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9264 ? MINUS_EXPR : PLUS_EXPR,
9265 const2, const1);
9266 if (!TREE_OVERFLOW (cst)
9267 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
9268 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
9269 {
9270 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9271 return fold_build2_loc (loc, code, type,
9272 variable1,
9273 fold_build2_loc (loc, TREE_CODE (arg1),
9274 TREE_TYPE (arg1),
9275 variable2, cst));
9276 }
9277
9278 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9279 ? MINUS_EXPR : PLUS_EXPR,
9280 const1, const2);
9281 if (!TREE_OVERFLOW (cst)
9282 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
9283 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
9284 {
9285 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9286 return fold_build2_loc (loc, code, type,
9287 fold_build2_loc (loc, TREE_CODE (arg0),
9288 TREE_TYPE (arg0),
9289 variable1, cst),
9290 variable2);
9291 }
9292 }
9293
9294 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9295 signed arithmetic case. That form is created by the compiler
9296 often enough for folding it to be of value. One example is in
9297 computing loop trip counts after Operator Strength Reduction. */
9298 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9299 && TREE_CODE (arg0) == MULT_EXPR
9300 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9301 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9302 && integer_zerop (arg1))
9303 {
9304 tree const1 = TREE_OPERAND (arg0, 1);
9305 tree const2 = arg1; /* zero */
9306 tree variable1 = TREE_OPERAND (arg0, 0);
9307 enum tree_code cmp_code = code;
9308
9309 /* Handle unfolded multiplication by zero. */
9310 if (integer_zerop (const1))
9311 return fold_build2_loc (loc, cmp_code, type, const1, const2);
9312
9313 fold_overflow_warning (("assuming signed overflow does not occur when "
9314 "eliminating multiplication in comparison "
9315 "with zero"),
9316 WARN_STRICT_OVERFLOW_COMPARISON);
9317
9318 /* If const1 is negative we swap the sense of the comparison. */
9319 if (tree_int_cst_sgn (const1) < 0)
9320 cmp_code = swap_tree_comparison (cmp_code);
9321
9322 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9323 }
9324
9325 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9326 if (tem)
9327 return tem;
9328
9329 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9330 {
9331 tree targ0 = strip_float_extensions (arg0);
9332 tree targ1 = strip_float_extensions (arg1);
9333 tree newtype = TREE_TYPE (targ0);
9334
9335 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9336 newtype = TREE_TYPE (targ1);
9337
9338 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9339 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9340 return fold_build2_loc (loc, code, type,
9341 fold_convert_loc (loc, newtype, targ0),
9342 fold_convert_loc (loc, newtype, targ1));
9343
9344 /* (-a) CMP (-b) -> b CMP a */
9345 if (TREE_CODE (arg0) == NEGATE_EXPR
9346 && TREE_CODE (arg1) == NEGATE_EXPR)
9347 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9348 TREE_OPERAND (arg0, 0));
9349
9350 if (TREE_CODE (arg1) == REAL_CST)
9351 {
9352 REAL_VALUE_TYPE cst;
9353 cst = TREE_REAL_CST (arg1);
9354
9355 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9356 if (TREE_CODE (arg0) == NEGATE_EXPR)
9357 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9358 TREE_OPERAND (arg0, 0),
9359 build_real (TREE_TYPE (arg1),
9360 real_value_negate (&cst)));
9361
9362 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9363 /* a CMP (-0) -> a CMP 0 */
9364 if (REAL_VALUE_MINUS_ZERO (cst))
9365 return fold_build2_loc (loc, code, type, arg0,
9366 build_real (TREE_TYPE (arg1), dconst0));
9367
9368 /* x != NaN is always true, other ops are always false. */
9369 if (REAL_VALUE_ISNAN (cst)
9370 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9371 {
9372 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9373 return omit_one_operand_loc (loc, type, tem, arg0);
9374 }
9375
9376 /* Fold comparisons against infinity. */
9377 if (REAL_VALUE_ISINF (cst)
9378 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9379 {
9380 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9381 if (tem != NULL_TREE)
9382 return tem;
9383 }
9384 }
9385
9386 /* If this is a comparison of a real constant with a PLUS_EXPR
9387 or a MINUS_EXPR of a real constant, we can convert it into a
9388 comparison with a revised real constant as long as no overflow
9389 occurs when unsafe_math_optimizations are enabled. */
9390 if (flag_unsafe_math_optimizations
9391 && TREE_CODE (arg1) == REAL_CST
9392 && (TREE_CODE (arg0) == PLUS_EXPR
9393 || TREE_CODE (arg0) == MINUS_EXPR)
9394 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9395 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9396 ? MINUS_EXPR : PLUS_EXPR,
9397 arg1, TREE_OPERAND (arg0, 1)))
9398 && !TREE_OVERFLOW (tem))
9399 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9400
9401 /* Likewise, we can simplify a comparison of a real constant with
9402 a MINUS_EXPR whose first operand is also a real constant, i.e.
9403 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9404 floating-point types only if -fassociative-math is set. */
9405 if (flag_associative_math
9406 && TREE_CODE (arg1) == REAL_CST
9407 && TREE_CODE (arg0) == MINUS_EXPR
9408 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9409 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9410 arg1))
9411 && !TREE_OVERFLOW (tem))
9412 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9413 TREE_OPERAND (arg0, 1), tem);
9414
9415 /* Fold comparisons against built-in math functions. */
9416 if (TREE_CODE (arg1) == REAL_CST
9417 && flag_unsafe_math_optimizations
9418 && ! flag_errno_math)
9419 {
9420 enum built_in_function fcode = builtin_mathfn_code (arg0);
9421
9422 if (fcode != END_BUILTINS)
9423 {
9424 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9425 if (tem != NULL_TREE)
9426 return tem;
9427 }
9428 }
9429 }
9430
9431 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9432 && CONVERT_EXPR_P (arg0))
9433 {
9434 /* If we are widening one operand of an integer comparison,
9435 see if the other operand is similarly being widened. Perhaps we
9436 can do the comparison in the narrower type. */
9437 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9438 if (tem)
9439 return tem;
9440
9441 /* Or if we are changing signedness. */
9442 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9443 if (tem)
9444 return tem;
9445 }
9446
9447 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9448 constant, we can simplify it. */
9449 if (TREE_CODE (arg1) == INTEGER_CST
9450 && (TREE_CODE (arg0) == MIN_EXPR
9451 || TREE_CODE (arg0) == MAX_EXPR)
9452 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9453 {
9454 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9455 if (tem)
9456 return tem;
9457 }
9458
9459 /* Simplify comparison of something with itself. (For IEEE
9460 floating-point, we can only do some of these simplifications.) */
9461 if (operand_equal_p (arg0, arg1, 0))
9462 {
9463 switch (code)
9464 {
9465 case EQ_EXPR:
9466 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9467 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9468 return constant_boolean_node (1, type);
9469 break;
9470
9471 case GE_EXPR:
9472 case LE_EXPR:
9473 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9474 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9475 return constant_boolean_node (1, type);
9476 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9477
9478 case NE_EXPR:
9479 /* For NE, we can only do this simplification if integer
9480 or we don't honor IEEE floating point NaNs. */
9481 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9482 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9483 break;
9484 /* ... fall through ... */
9485 case GT_EXPR:
9486 case LT_EXPR:
9487 return constant_boolean_node (0, type);
9488 default:
9489 gcc_unreachable ();
9490 }
9491 }
9492
9493 /* If we are comparing an expression that just has comparisons
9494 of two integer values, arithmetic expressions of those comparisons,
9495 and constants, we can simplify it. There are only three cases
9496 to check: the two values can either be equal, the first can be
9497 greater, or the second can be greater. Fold the expression for
9498 those three values. Since each value must be 0 or 1, we have
9499 eight possibilities, each of which corresponds to the constant 0
9500 or 1 or one of the six possible comparisons.
9501
9502 This handles common cases like (a > b) == 0 but also handles
9503 expressions like ((x > y) - (y > x)) > 0, which supposedly
9504 occur in macroized code. */
9505
9506 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9507 {
9508 tree cval1 = 0, cval2 = 0;
9509 int save_p = 0;
9510
9511 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9512 /* Don't handle degenerate cases here; they should already
9513 have been handled anyway. */
9514 && cval1 != 0 && cval2 != 0
9515 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9516 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9517 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9518 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9519 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9520 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9521 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9522 {
9523 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9524 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9525
9526 /* We can't just pass T to eval_subst in case cval1 or cval2
9527 was the same as ARG1. */
9528
9529 tree high_result
9530 = fold_build2_loc (loc, code, type,
9531 eval_subst (loc, arg0, cval1, maxval,
9532 cval2, minval),
9533 arg1);
9534 tree equal_result
9535 = fold_build2_loc (loc, code, type,
9536 eval_subst (loc, arg0, cval1, maxval,
9537 cval2, maxval),
9538 arg1);
9539 tree low_result
9540 = fold_build2_loc (loc, code, type,
9541 eval_subst (loc, arg0, cval1, minval,
9542 cval2, maxval),
9543 arg1);
9544
9545 /* All three of these results should be 0 or 1. Confirm they are.
9546 Then use those values to select the proper code to use. */
9547
9548 if (TREE_CODE (high_result) == INTEGER_CST
9549 && TREE_CODE (equal_result) == INTEGER_CST
9550 && TREE_CODE (low_result) == INTEGER_CST)
9551 {
9552 /* Make a 3-bit mask with the high-order bit being the
9553 value for `>', the next for '=', and the low for '<'. */
9554 switch ((integer_onep (high_result) * 4)
9555 + (integer_onep (equal_result) * 2)
9556 + integer_onep (low_result))
9557 {
9558 case 0:
9559 /* Always false. */
9560 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9561 case 1:
9562 code = LT_EXPR;
9563 break;
9564 case 2:
9565 code = EQ_EXPR;
9566 break;
9567 case 3:
9568 code = LE_EXPR;
9569 break;
9570 case 4:
9571 code = GT_EXPR;
9572 break;
9573 case 5:
9574 code = NE_EXPR;
9575 break;
9576 case 6:
9577 code = GE_EXPR;
9578 break;
9579 case 7:
9580 /* Always true. */
9581 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9582 }
9583
9584 if (save_p)
9585 {
9586 tem = save_expr (build2 (code, type, cval1, cval2));
9587 SET_EXPR_LOCATION (tem, loc);
9588 return tem;
9589 }
9590 return fold_build2_loc (loc, code, type, cval1, cval2);
9591 }
9592 }
9593 }
9594
9595 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9596 into a single range test. */
9597 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9598 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9599 && TREE_CODE (arg1) == INTEGER_CST
9600 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9601 && !integer_zerop (TREE_OPERAND (arg0, 1))
9602 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9603 && !TREE_OVERFLOW (arg1))
9604 {
9605 tem = fold_div_compare (loc, code, type, arg0, arg1);
9606 if (tem != NULL_TREE)
9607 return tem;
9608 }
9609
9610 /* Fold ~X op ~Y as Y op X. */
9611 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9612 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9613 {
9614 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9615 return fold_build2_loc (loc, code, type,
9616 fold_convert_loc (loc, cmp_type,
9617 TREE_OPERAND (arg1, 0)),
9618 TREE_OPERAND (arg0, 0));
9619 }
9620
9621 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9622 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9623 && (TREE_CODE (arg1) == INTEGER_CST || TREE_CODE (arg1) == VECTOR_CST))
9624 {
9625 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9626 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9627 TREE_OPERAND (arg0, 0),
9628 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9629 fold_convert_loc (loc, cmp_type, arg1)));
9630 }
9631
9632 return NULL_TREE;
9633 }
9634
9635
9636 /* Subroutine of fold_binary. Optimize complex multiplications of the
9637 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9638 argument EXPR represents the expression "z" of type TYPE. */
9639
9640 static tree
9641 fold_mult_zconjz (location_t loc, tree type, tree expr)
9642 {
9643 tree itype = TREE_TYPE (type);
9644 tree rpart, ipart, tem;
9645
9646 if (TREE_CODE (expr) == COMPLEX_EXPR)
9647 {
9648 rpart = TREE_OPERAND (expr, 0);
9649 ipart = TREE_OPERAND (expr, 1);
9650 }
9651 else if (TREE_CODE (expr) == COMPLEX_CST)
9652 {
9653 rpart = TREE_REALPART (expr);
9654 ipart = TREE_IMAGPART (expr);
9655 }
9656 else
9657 {
9658 expr = save_expr (expr);
9659 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9660 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9661 }
9662
9663 rpart = save_expr (rpart);
9664 ipart = save_expr (ipart);
9665 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9666 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9667 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9668 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9669 build_zero_cst (itype));
9670 }
9671
9672
9673 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9674 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9675 guarantees that P and N have the same least significant log2(M) bits.
9676 N is not otherwise constrained. In particular, N is not normalized to
9677 0 <= N < M as is common. In general, the precise value of P is unknown.
9678 M is chosen as large as possible such that constant N can be determined.
9679
9680 Returns M and sets *RESIDUE to N.
9681
9682 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9683 account. This is not always possible due to PR 35705.
9684 */
9685
9686 static unsigned HOST_WIDE_INT
9687 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9688 bool allow_func_align)
9689 {
9690 enum tree_code code;
9691
9692 *residue = 0;
9693
9694 code = TREE_CODE (expr);
9695 if (code == ADDR_EXPR)
9696 {
9697 unsigned int bitalign;
9698 get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9699 *residue /= BITS_PER_UNIT;
9700 return bitalign / BITS_PER_UNIT;
9701 }
9702 else if (code == POINTER_PLUS_EXPR)
9703 {
9704 tree op0, op1;
9705 unsigned HOST_WIDE_INT modulus;
9706 enum tree_code inner_code;
9707
9708 op0 = TREE_OPERAND (expr, 0);
9709 STRIP_NOPS (op0);
9710 modulus = get_pointer_modulus_and_residue (op0, residue,
9711 allow_func_align);
9712
9713 op1 = TREE_OPERAND (expr, 1);
9714 STRIP_NOPS (op1);
9715 inner_code = TREE_CODE (op1);
9716 if (inner_code == INTEGER_CST)
9717 {
9718 *residue += TREE_INT_CST_LOW (op1);
9719 return modulus;
9720 }
9721 else if (inner_code == MULT_EXPR)
9722 {
9723 op1 = TREE_OPERAND (op1, 1);
9724 if (TREE_CODE (op1) == INTEGER_CST)
9725 {
9726 unsigned HOST_WIDE_INT align;
9727
9728 /* Compute the greatest power-of-2 divisor of op1. */
9729 align = TREE_INT_CST_LOW (op1);
9730 align &= -align;
9731
9732 /* If align is non-zero and less than *modulus, replace
9733 *modulus with align., If align is 0, then either op1 is 0
9734 or the greatest power-of-2 divisor of op1 doesn't fit in an
9735 unsigned HOST_WIDE_INT. In either case, no additional
9736 constraint is imposed. */
9737 if (align)
9738 modulus = MIN (modulus, align);
9739
9740 return modulus;
9741 }
9742 }
9743 }
9744
9745 /* If we get here, we were unable to determine anything useful about the
9746 expression. */
9747 return 1;
9748 }
9749
9750 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9751 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9752
9753 static bool
9754 vec_cst_ctor_to_array (tree arg, tree *elts)
9755 {
9756 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9757
9758 if (TREE_CODE (arg) == VECTOR_CST)
9759 {
9760 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9761 elts[i] = VECTOR_CST_ELT (arg, i);
9762 }
9763 else if (TREE_CODE (arg) == CONSTRUCTOR)
9764 {
9765 constructor_elt *elt;
9766
9767 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9768 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9769 return false;
9770 else
9771 elts[i] = elt->value;
9772 }
9773 else
9774 return false;
9775 for (; i < nelts; i++)
9776 elts[i]
9777 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9778 return true;
9779 }
9780
9781 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9782 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9783 NULL_TREE otherwise. */
9784
9785 static tree
9786 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9787 {
9788 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9789 tree *elts;
9790 bool need_ctor = false;
9791
9792 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9793 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9794 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9795 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9796 return NULL_TREE;
9797
9798 elts = XALLOCAVEC (tree, nelts * 3);
9799 if (!vec_cst_ctor_to_array (arg0, elts)
9800 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9801 return NULL_TREE;
9802
9803 for (i = 0; i < nelts; i++)
9804 {
9805 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9806 need_ctor = true;
9807 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9808 }
9809
9810 if (need_ctor)
9811 {
9812 vec<constructor_elt, va_gc> *v;
9813 vec_alloc (v, nelts);
9814 for (i = 0; i < nelts; i++)
9815 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9816 return build_constructor (type, v);
9817 }
9818 else
9819 return build_vector (type, &elts[2 * nelts]);
9820 }
9821
9822 /* Try to fold a pointer difference of type TYPE two address expressions of
9823 array references AREF0 and AREF1 using location LOC. Return a
9824 simplified expression for the difference or NULL_TREE. */
9825
9826 static tree
9827 fold_addr_of_array_ref_difference (location_t loc, tree type,
9828 tree aref0, tree aref1)
9829 {
9830 tree base0 = TREE_OPERAND (aref0, 0);
9831 tree base1 = TREE_OPERAND (aref1, 0);
9832 tree base_offset = build_int_cst (type, 0);
9833
9834 /* If the bases are array references as well, recurse. If the bases
9835 are pointer indirections compute the difference of the pointers.
9836 If the bases are equal, we are set. */
9837 if ((TREE_CODE (base0) == ARRAY_REF
9838 && TREE_CODE (base1) == ARRAY_REF
9839 && (base_offset
9840 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9841 || (INDIRECT_REF_P (base0)
9842 && INDIRECT_REF_P (base1)
9843 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9844 TREE_OPERAND (base0, 0),
9845 TREE_OPERAND (base1, 0))))
9846 || operand_equal_p (base0, base1, 0))
9847 {
9848 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9849 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9850 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9851 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9852 return fold_build2_loc (loc, PLUS_EXPR, type,
9853 base_offset,
9854 fold_build2_loc (loc, MULT_EXPR, type,
9855 diff, esz));
9856 }
9857 return NULL_TREE;
9858 }
9859
9860 /* If the real or vector real constant CST of type TYPE has an exact
9861 inverse, return it, else return NULL. */
9862
9863 static tree
9864 exact_inverse (tree type, tree cst)
9865 {
9866 REAL_VALUE_TYPE r;
9867 tree unit_type, *elts;
9868 enum machine_mode mode;
9869 unsigned vec_nelts, i;
9870
9871 switch (TREE_CODE (cst))
9872 {
9873 case REAL_CST:
9874 r = TREE_REAL_CST (cst);
9875
9876 if (exact_real_inverse (TYPE_MODE (type), &r))
9877 return build_real (type, r);
9878
9879 return NULL_TREE;
9880
9881 case VECTOR_CST:
9882 vec_nelts = VECTOR_CST_NELTS (cst);
9883 elts = XALLOCAVEC (tree, vec_nelts);
9884 unit_type = TREE_TYPE (type);
9885 mode = TYPE_MODE (unit_type);
9886
9887 for (i = 0; i < vec_nelts; i++)
9888 {
9889 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9890 if (!exact_real_inverse (mode, &r))
9891 return NULL_TREE;
9892 elts[i] = build_real (unit_type, r);
9893 }
9894
9895 return build_vector (type, elts);
9896
9897 default:
9898 return NULL_TREE;
9899 }
9900 }
9901
9902 /* Mask out the tz least significant bits of X of type TYPE where
9903 tz is the number of trailing zeroes in Y. */
9904 static wide_int
9905 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
9906 {
9907 int tz = wi::ctz (y);
9908 if (tz > 0)
9909 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
9910 return x;
9911 }
9912
9913 /* Return true when T is an address and is known to be nonzero.
9914 For floating point we further ensure that T is not denormal.
9915 Similar logic is present in nonzero_address in rtlanal.h.
9916
9917 If the return value is based on the assumption that signed overflow
9918 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9919 change *STRICT_OVERFLOW_P. */
9920
9921 static bool
9922 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
9923 {
9924 tree type = TREE_TYPE (t);
9925 enum tree_code code;
9926
9927 /* Doing something useful for floating point would need more work. */
9928 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9929 return false;
9930
9931 code = TREE_CODE (t);
9932 switch (TREE_CODE_CLASS (code))
9933 {
9934 case tcc_unary:
9935 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9936 strict_overflow_p);
9937 case tcc_binary:
9938 case tcc_comparison:
9939 return tree_binary_nonzero_warnv_p (code, type,
9940 TREE_OPERAND (t, 0),
9941 TREE_OPERAND (t, 1),
9942 strict_overflow_p);
9943 case tcc_constant:
9944 case tcc_declaration:
9945 case tcc_reference:
9946 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9947
9948 default:
9949 break;
9950 }
9951
9952 switch (code)
9953 {
9954 case TRUTH_NOT_EXPR:
9955 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9956 strict_overflow_p);
9957
9958 case TRUTH_AND_EXPR:
9959 case TRUTH_OR_EXPR:
9960 case TRUTH_XOR_EXPR:
9961 return tree_binary_nonzero_warnv_p (code, type,
9962 TREE_OPERAND (t, 0),
9963 TREE_OPERAND (t, 1),
9964 strict_overflow_p);
9965
9966 case COND_EXPR:
9967 case CONSTRUCTOR:
9968 case OBJ_TYPE_REF:
9969 case ASSERT_EXPR:
9970 case ADDR_EXPR:
9971 case WITH_SIZE_EXPR:
9972 case SSA_NAME:
9973 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9974
9975 case COMPOUND_EXPR:
9976 case MODIFY_EXPR:
9977 case BIND_EXPR:
9978 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
9979 strict_overflow_p);
9980
9981 case SAVE_EXPR:
9982 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
9983 strict_overflow_p);
9984
9985 case CALL_EXPR:
9986 {
9987 tree fndecl = get_callee_fndecl (t);
9988 if (!fndecl) return false;
9989 if (flag_delete_null_pointer_checks && !flag_check_new
9990 && DECL_IS_OPERATOR_NEW (fndecl)
9991 && !TREE_NOTHROW (fndecl))
9992 return true;
9993 if (flag_delete_null_pointer_checks
9994 && lookup_attribute ("returns_nonnull",
9995 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9996 return true;
9997 return alloca_call_p (t);
9998 }
9999
10000 default:
10001 break;
10002 }
10003 return false;
10004 }
10005
10006 /* Return true when T is an address and is known to be nonzero.
10007 Handle warnings about undefined signed overflow. */
10008
10009 static bool
10010 tree_expr_nonzero_p (tree t)
10011 {
10012 bool ret, strict_overflow_p;
10013
10014 strict_overflow_p = false;
10015 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
10016 if (strict_overflow_p)
10017 fold_overflow_warning (("assuming signed overflow does not occur when "
10018 "determining that expression is always "
10019 "non-zero"),
10020 WARN_STRICT_OVERFLOW_MISC);
10021 return ret;
10022 }
10023
10024 /* Fold a binary expression of code CODE and type TYPE with operands
10025 OP0 and OP1. LOC is the location of the resulting expression.
10026 Return the folded expression if folding is successful. Otherwise,
10027 return NULL_TREE. */
10028
10029 tree
10030 fold_binary_loc (location_t loc,
10031 enum tree_code code, tree type, tree op0, tree op1)
10032 {
10033 enum tree_code_class kind = TREE_CODE_CLASS (code);
10034 tree arg0, arg1, tem;
10035 tree t1 = NULL_TREE;
10036 bool strict_overflow_p;
10037 unsigned int prec;
10038
10039 gcc_assert (IS_EXPR_CODE_CLASS (kind)
10040 && TREE_CODE_LENGTH (code) == 2
10041 && op0 != NULL_TREE
10042 && op1 != NULL_TREE);
10043
10044 arg0 = op0;
10045 arg1 = op1;
10046
10047 /* Strip any conversions that don't change the mode. This is
10048 safe for every expression, except for a comparison expression
10049 because its signedness is derived from its operands. So, in
10050 the latter case, only strip conversions that don't change the
10051 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
10052 preserved.
10053
10054 Note that this is done as an internal manipulation within the
10055 constant folder, in order to find the simplest representation
10056 of the arguments so that their form can be studied. In any
10057 cases, the appropriate type conversions should be put back in
10058 the tree that will get out of the constant folder. */
10059
10060 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
10061 {
10062 STRIP_SIGN_NOPS (arg0);
10063 STRIP_SIGN_NOPS (arg1);
10064 }
10065 else
10066 {
10067 STRIP_NOPS (arg0);
10068 STRIP_NOPS (arg1);
10069 }
10070
10071 /* Note that TREE_CONSTANT isn't enough: static var addresses are
10072 constant but we can't do arithmetic on them. */
10073 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10074 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
10075 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
10076 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
10077 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
10078 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST)
10079 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == INTEGER_CST))
10080 {
10081 if (kind == tcc_binary)
10082 {
10083 /* Make sure type and arg0 have the same saturating flag. */
10084 gcc_assert (TYPE_SATURATING (type)
10085 == TYPE_SATURATING (TREE_TYPE (arg0)));
10086 tem = const_binop (code, arg0, arg1);
10087 }
10088 else if (kind == tcc_comparison)
10089 tem = fold_relational_const (code, type, arg0, arg1);
10090 else
10091 tem = NULL_TREE;
10092
10093 if (tem != NULL_TREE)
10094 {
10095 if (TREE_TYPE (tem) != type)
10096 tem = fold_convert_loc (loc, type, tem);
10097 return tem;
10098 }
10099 }
10100
10101 /* If this is a commutative operation, and ARG0 is a constant, move it
10102 to ARG1 to reduce the number of tests below. */
10103 if (commutative_tree_code (code)
10104 && tree_swap_operands_p (arg0, arg1, true))
10105 return fold_build2_loc (loc, code, type, op1, op0);
10106
10107 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
10108
10109 First check for cases where an arithmetic operation is applied to a
10110 compound, conditional, or comparison operation. Push the arithmetic
10111 operation inside the compound or conditional to see if any folding
10112 can then be done. Convert comparison to conditional for this purpose.
10113 The also optimizes non-constant cases that used to be done in
10114 expand_expr.
10115
10116 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
10117 one of the operands is a comparison and the other is a comparison, a
10118 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
10119 code below would make the expression more complex. Change it to a
10120 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
10121 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
10122
10123 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
10124 || code == EQ_EXPR || code == NE_EXPR)
10125 && TREE_CODE (type) != VECTOR_TYPE
10126 && ((truth_value_p (TREE_CODE (arg0))
10127 && (truth_value_p (TREE_CODE (arg1))
10128 || (TREE_CODE (arg1) == BIT_AND_EXPR
10129 && integer_onep (TREE_OPERAND (arg1, 1)))))
10130 || (truth_value_p (TREE_CODE (arg1))
10131 && (truth_value_p (TREE_CODE (arg0))
10132 || (TREE_CODE (arg0) == BIT_AND_EXPR
10133 && integer_onep (TREE_OPERAND (arg0, 1)))))))
10134 {
10135 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
10136 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
10137 : TRUTH_XOR_EXPR,
10138 boolean_type_node,
10139 fold_convert_loc (loc, boolean_type_node, arg0),
10140 fold_convert_loc (loc, boolean_type_node, arg1));
10141
10142 if (code == EQ_EXPR)
10143 tem = invert_truthvalue_loc (loc, tem);
10144
10145 return fold_convert_loc (loc, type, tem);
10146 }
10147
10148 if (TREE_CODE_CLASS (code) == tcc_binary
10149 || TREE_CODE_CLASS (code) == tcc_comparison)
10150 {
10151 if (TREE_CODE (arg0) == COMPOUND_EXPR)
10152 {
10153 tem = fold_build2_loc (loc, code, type,
10154 fold_convert_loc (loc, TREE_TYPE (op0),
10155 TREE_OPERAND (arg0, 1)), op1);
10156 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
10157 tem);
10158 }
10159 if (TREE_CODE (arg1) == COMPOUND_EXPR
10160 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10161 {
10162 tem = fold_build2_loc (loc, code, type, op0,
10163 fold_convert_loc (loc, TREE_TYPE (op1),
10164 TREE_OPERAND (arg1, 1)));
10165 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
10166 tem);
10167 }
10168
10169 if (TREE_CODE (arg0) == COND_EXPR
10170 || TREE_CODE (arg0) == VEC_COND_EXPR
10171 || COMPARISON_CLASS_P (arg0))
10172 {
10173 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10174 arg0, arg1,
10175 /*cond_first_p=*/1);
10176 if (tem != NULL_TREE)
10177 return tem;
10178 }
10179
10180 if (TREE_CODE (arg1) == COND_EXPR
10181 || TREE_CODE (arg1) == VEC_COND_EXPR
10182 || COMPARISON_CLASS_P (arg1))
10183 {
10184 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10185 arg1, arg0,
10186 /*cond_first_p=*/0);
10187 if (tem != NULL_TREE)
10188 return tem;
10189 }
10190 }
10191
10192 switch (code)
10193 {
10194 case MEM_REF:
10195 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10196 if (TREE_CODE (arg0) == ADDR_EXPR
10197 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
10198 {
10199 tree iref = TREE_OPERAND (arg0, 0);
10200 return fold_build2 (MEM_REF, type,
10201 TREE_OPERAND (iref, 0),
10202 int_const_binop (PLUS_EXPR, arg1,
10203 TREE_OPERAND (iref, 1)));
10204 }
10205
10206 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10207 if (TREE_CODE (arg0) == ADDR_EXPR
10208 && handled_component_p (TREE_OPERAND (arg0, 0)))
10209 {
10210 tree base;
10211 HOST_WIDE_INT coffset;
10212 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
10213 &coffset);
10214 if (!base)
10215 return NULL_TREE;
10216 return fold_build2 (MEM_REF, type,
10217 build_fold_addr_expr (base),
10218 int_const_binop (PLUS_EXPR, arg1,
10219 size_int (coffset)));
10220 }
10221
10222 return NULL_TREE;
10223
10224 case POINTER_PLUS_EXPR:
10225 /* 0 +p index -> (type)index */
10226 if (integer_zerop (arg0))
10227 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10228
10229 /* PTR +p 0 -> PTR */
10230 if (integer_zerop (arg1))
10231 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10232
10233 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10234 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10235 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10236 return fold_convert_loc (loc, type,
10237 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10238 fold_convert_loc (loc, sizetype,
10239 arg1),
10240 fold_convert_loc (loc, sizetype,
10241 arg0)));
10242
10243 /* (PTR +p B) +p A -> PTR +p (B + A) */
10244 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10245 {
10246 tree inner;
10247 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
10248 tree arg00 = TREE_OPERAND (arg0, 0);
10249 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
10250 arg01, fold_convert_loc (loc, sizetype, arg1));
10251 return fold_convert_loc (loc, type,
10252 fold_build_pointer_plus_loc (loc,
10253 arg00, inner));
10254 }
10255
10256 /* PTR_CST +p CST -> CST1 */
10257 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10258 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
10259 fold_convert_loc (loc, type, arg1));
10260
10261 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
10262 of the array. Loop optimizer sometimes produce this type of
10263 expressions. */
10264 if (TREE_CODE (arg0) == ADDR_EXPR)
10265 {
10266 tem = try_move_mult_to_index (loc, arg0,
10267 fold_convert_loc (loc,
10268 ssizetype, arg1));
10269 if (tem)
10270 return fold_convert_loc (loc, type, tem);
10271 }
10272
10273 return NULL_TREE;
10274
10275 case PLUS_EXPR:
10276 /* A + (-B) -> A - B */
10277 if (TREE_CODE (arg1) == NEGATE_EXPR
10278 && (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
10279 return fold_build2_loc (loc, MINUS_EXPR, type,
10280 fold_convert_loc (loc, type, arg0),
10281 fold_convert_loc (loc, type,
10282 TREE_OPERAND (arg1, 0)));
10283 /* (-A) + B -> B - A */
10284 if (TREE_CODE (arg0) == NEGATE_EXPR
10285 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1)
10286 && (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
10287 return fold_build2_loc (loc, MINUS_EXPR, type,
10288 fold_convert_loc (loc, type, arg1),
10289 fold_convert_loc (loc, type,
10290 TREE_OPERAND (arg0, 0)));
10291
10292 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10293 {
10294 /* Convert ~A + 1 to -A. */
10295 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10296 && integer_onep (arg1))
10297 return fold_build1_loc (loc, NEGATE_EXPR, type,
10298 fold_convert_loc (loc, type,
10299 TREE_OPERAND (arg0, 0)));
10300
10301 /* ~X + X is -1. */
10302 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10303 && !TYPE_OVERFLOW_TRAPS (type))
10304 {
10305 tree tem = TREE_OPERAND (arg0, 0);
10306
10307 STRIP_NOPS (tem);
10308 if (operand_equal_p (tem, arg1, 0))
10309 {
10310 t1 = build_all_ones_cst (type);
10311 return omit_one_operand_loc (loc, type, t1, arg1);
10312 }
10313 }
10314
10315 /* X + ~X is -1. */
10316 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10317 && !TYPE_OVERFLOW_TRAPS (type))
10318 {
10319 tree tem = TREE_OPERAND (arg1, 0);
10320
10321 STRIP_NOPS (tem);
10322 if (operand_equal_p (arg0, tem, 0))
10323 {
10324 t1 = build_all_ones_cst (type);
10325 return omit_one_operand_loc (loc, type, t1, arg0);
10326 }
10327 }
10328
10329 /* X + (X / CST) * -CST is X % CST. */
10330 if (TREE_CODE (arg1) == MULT_EXPR
10331 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10332 && operand_equal_p (arg0,
10333 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10334 {
10335 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10336 tree cst1 = TREE_OPERAND (arg1, 1);
10337 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10338 cst1, cst0);
10339 if (sum && integer_zerop (sum))
10340 return fold_convert_loc (loc, type,
10341 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10342 TREE_TYPE (arg0), arg0,
10343 cst0));
10344 }
10345 }
10346
10347 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10348 one. Make sure the type is not saturating and has the signedness of
10349 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10350 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10351 if ((TREE_CODE (arg0) == MULT_EXPR
10352 || TREE_CODE (arg1) == MULT_EXPR)
10353 && !TYPE_SATURATING (type)
10354 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10355 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10356 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10357 {
10358 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10359 if (tem)
10360 return tem;
10361 }
10362
10363 if (! FLOAT_TYPE_P (type))
10364 {
10365 if (integer_zerop (arg1))
10366 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10367
10368 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10369 with a constant, and the two constants have no bits in common,
10370 we should treat this as a BIT_IOR_EXPR since this may produce more
10371 simplifications. */
10372 if (TREE_CODE (arg0) == BIT_AND_EXPR
10373 && TREE_CODE (arg1) == BIT_AND_EXPR
10374 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10375 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10376 && wi::bit_and (TREE_OPERAND (arg0, 1),
10377 TREE_OPERAND (arg1, 1)) == 0)
10378 {
10379 code = BIT_IOR_EXPR;
10380 goto bit_ior;
10381 }
10382
10383 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10384 (plus (plus (mult) (mult)) (foo)) so that we can
10385 take advantage of the factoring cases below. */
10386 if (TYPE_OVERFLOW_WRAPS (type)
10387 && (((TREE_CODE (arg0) == PLUS_EXPR
10388 || TREE_CODE (arg0) == MINUS_EXPR)
10389 && TREE_CODE (arg1) == MULT_EXPR)
10390 || ((TREE_CODE (arg1) == PLUS_EXPR
10391 || TREE_CODE (arg1) == MINUS_EXPR)
10392 && TREE_CODE (arg0) == MULT_EXPR)))
10393 {
10394 tree parg0, parg1, parg, marg;
10395 enum tree_code pcode;
10396
10397 if (TREE_CODE (arg1) == MULT_EXPR)
10398 parg = arg0, marg = arg1;
10399 else
10400 parg = arg1, marg = arg0;
10401 pcode = TREE_CODE (parg);
10402 parg0 = TREE_OPERAND (parg, 0);
10403 parg1 = TREE_OPERAND (parg, 1);
10404 STRIP_NOPS (parg0);
10405 STRIP_NOPS (parg1);
10406
10407 if (TREE_CODE (parg0) == MULT_EXPR
10408 && TREE_CODE (parg1) != MULT_EXPR)
10409 return fold_build2_loc (loc, pcode, type,
10410 fold_build2_loc (loc, PLUS_EXPR, type,
10411 fold_convert_loc (loc, type,
10412 parg0),
10413 fold_convert_loc (loc, type,
10414 marg)),
10415 fold_convert_loc (loc, type, parg1));
10416 if (TREE_CODE (parg0) != MULT_EXPR
10417 && TREE_CODE (parg1) == MULT_EXPR)
10418 return
10419 fold_build2_loc (loc, PLUS_EXPR, type,
10420 fold_convert_loc (loc, type, parg0),
10421 fold_build2_loc (loc, pcode, type,
10422 fold_convert_loc (loc, type, marg),
10423 fold_convert_loc (loc, type,
10424 parg1)));
10425 }
10426 }
10427 else
10428 {
10429 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10430 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10431 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10432
10433 /* Likewise if the operands are reversed. */
10434 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10435 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10436
10437 /* Convert X + -C into X - C. */
10438 if (TREE_CODE (arg1) == REAL_CST
10439 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10440 {
10441 tem = fold_negate_const (arg1, type);
10442 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10443 return fold_build2_loc (loc, MINUS_EXPR, type,
10444 fold_convert_loc (loc, type, arg0),
10445 fold_convert_loc (loc, type, tem));
10446 }
10447
10448 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10449 to __complex__ ( x, y ). This is not the same for SNaNs or
10450 if signed zeros are involved. */
10451 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10452 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10453 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10454 {
10455 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10456 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10457 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10458 bool arg0rz = false, arg0iz = false;
10459 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10460 || (arg0i && (arg0iz = real_zerop (arg0i))))
10461 {
10462 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10463 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10464 if (arg0rz && arg1i && real_zerop (arg1i))
10465 {
10466 tree rp = arg1r ? arg1r
10467 : build1 (REALPART_EXPR, rtype, arg1);
10468 tree ip = arg0i ? arg0i
10469 : build1 (IMAGPART_EXPR, rtype, arg0);
10470 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10471 }
10472 else if (arg0iz && arg1r && real_zerop (arg1r))
10473 {
10474 tree rp = arg0r ? arg0r
10475 : build1 (REALPART_EXPR, rtype, arg0);
10476 tree ip = arg1i ? arg1i
10477 : build1 (IMAGPART_EXPR, rtype, arg1);
10478 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10479 }
10480 }
10481 }
10482
10483 if (flag_unsafe_math_optimizations
10484 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10485 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10486 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10487 return tem;
10488
10489 /* Convert x+x into x*2.0. */
10490 if (operand_equal_p (arg0, arg1, 0)
10491 && SCALAR_FLOAT_TYPE_P (type))
10492 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
10493 build_real (type, dconst2));
10494
10495 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10496 We associate floats only if the user has specified
10497 -fassociative-math. */
10498 if (flag_associative_math
10499 && TREE_CODE (arg1) == PLUS_EXPR
10500 && TREE_CODE (arg0) != MULT_EXPR)
10501 {
10502 tree tree10 = TREE_OPERAND (arg1, 0);
10503 tree tree11 = TREE_OPERAND (arg1, 1);
10504 if (TREE_CODE (tree11) == MULT_EXPR
10505 && TREE_CODE (tree10) == MULT_EXPR)
10506 {
10507 tree tree0;
10508 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10509 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10510 }
10511 }
10512 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10513 We associate floats only if the user has specified
10514 -fassociative-math. */
10515 if (flag_associative_math
10516 && TREE_CODE (arg0) == PLUS_EXPR
10517 && TREE_CODE (arg1) != MULT_EXPR)
10518 {
10519 tree tree00 = TREE_OPERAND (arg0, 0);
10520 tree tree01 = TREE_OPERAND (arg0, 1);
10521 if (TREE_CODE (tree01) == MULT_EXPR
10522 && TREE_CODE (tree00) == MULT_EXPR)
10523 {
10524 tree tree0;
10525 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10526 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10527 }
10528 }
10529 }
10530
10531 bit_rotate:
10532 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10533 is a rotate of A by C1 bits. */
10534 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10535 is a rotate of A by B bits. */
10536 {
10537 enum tree_code code0, code1;
10538 tree rtype;
10539 code0 = TREE_CODE (arg0);
10540 code1 = TREE_CODE (arg1);
10541 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10542 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10543 && operand_equal_p (TREE_OPERAND (arg0, 0),
10544 TREE_OPERAND (arg1, 0), 0)
10545 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10546 TYPE_UNSIGNED (rtype))
10547 /* Only create rotates in complete modes. Other cases are not
10548 expanded properly. */
10549 && (element_precision (rtype)
10550 == element_precision (TYPE_MODE (rtype))))
10551 {
10552 tree tree01, tree11;
10553 enum tree_code code01, code11;
10554
10555 tree01 = TREE_OPERAND (arg0, 1);
10556 tree11 = TREE_OPERAND (arg1, 1);
10557 STRIP_NOPS (tree01);
10558 STRIP_NOPS (tree11);
10559 code01 = TREE_CODE (tree01);
10560 code11 = TREE_CODE (tree11);
10561 if (code01 == INTEGER_CST
10562 && code11 == INTEGER_CST
10563 && (wi::to_widest (tree01) + wi::to_widest (tree11)
10564 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10565 {
10566 tem = build2_loc (loc, LROTATE_EXPR,
10567 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10568 TREE_OPERAND (arg0, 0),
10569 code0 == LSHIFT_EXPR ? tree01 : tree11);
10570 return fold_convert_loc (loc, type, tem);
10571 }
10572 else if (code11 == MINUS_EXPR)
10573 {
10574 tree tree110, tree111;
10575 tree110 = TREE_OPERAND (tree11, 0);
10576 tree111 = TREE_OPERAND (tree11, 1);
10577 STRIP_NOPS (tree110);
10578 STRIP_NOPS (tree111);
10579 if (TREE_CODE (tree110) == INTEGER_CST
10580 && 0 == compare_tree_int (tree110,
10581 element_precision
10582 (TREE_TYPE (TREE_OPERAND
10583 (arg0, 0))))
10584 && operand_equal_p (tree01, tree111, 0))
10585 return
10586 fold_convert_loc (loc, type,
10587 build2 ((code0 == LSHIFT_EXPR
10588 ? LROTATE_EXPR
10589 : RROTATE_EXPR),
10590 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10591 TREE_OPERAND (arg0, 0), tree01));
10592 }
10593 else if (code01 == MINUS_EXPR)
10594 {
10595 tree tree010, tree011;
10596 tree010 = TREE_OPERAND (tree01, 0);
10597 tree011 = TREE_OPERAND (tree01, 1);
10598 STRIP_NOPS (tree010);
10599 STRIP_NOPS (tree011);
10600 if (TREE_CODE (tree010) == INTEGER_CST
10601 && 0 == compare_tree_int (tree010,
10602 element_precision
10603 (TREE_TYPE (TREE_OPERAND
10604 (arg0, 0))))
10605 && operand_equal_p (tree11, tree011, 0))
10606 return fold_convert_loc
10607 (loc, type,
10608 build2 ((code0 != LSHIFT_EXPR
10609 ? LROTATE_EXPR
10610 : RROTATE_EXPR),
10611 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10612 TREE_OPERAND (arg0, 0), tree11));
10613 }
10614 }
10615 }
10616
10617 associate:
10618 /* In most languages, can't associate operations on floats through
10619 parentheses. Rather than remember where the parentheses were, we
10620 don't associate floats at all, unless the user has specified
10621 -fassociative-math.
10622 And, we need to make sure type is not saturating. */
10623
10624 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10625 && !TYPE_SATURATING (type))
10626 {
10627 tree var0, con0, lit0, minus_lit0;
10628 tree var1, con1, lit1, minus_lit1;
10629 tree atype = type;
10630 bool ok = true;
10631
10632 /* Split both trees into variables, constants, and literals. Then
10633 associate each group together, the constants with literals,
10634 then the result with variables. This increases the chances of
10635 literals being recombined later and of generating relocatable
10636 expressions for the sum of a constant and literal. */
10637 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10638 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10639 code == MINUS_EXPR);
10640
10641 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10642 if (code == MINUS_EXPR)
10643 code = PLUS_EXPR;
10644
10645 /* With undefined overflow prefer doing association in a type
10646 which wraps on overflow, if that is one of the operand types. */
10647 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10648 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10649 {
10650 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10651 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10652 atype = TREE_TYPE (arg0);
10653 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10654 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10655 atype = TREE_TYPE (arg1);
10656 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10657 }
10658
10659 /* With undefined overflow we can only associate constants with one
10660 variable, and constants whose association doesn't overflow. */
10661 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10662 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
10663 {
10664 if (var0 && var1)
10665 {
10666 tree tmp0 = var0;
10667 tree tmp1 = var1;
10668
10669 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10670 tmp0 = TREE_OPERAND (tmp0, 0);
10671 if (CONVERT_EXPR_P (tmp0)
10672 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10673 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10674 <= TYPE_PRECISION (atype)))
10675 tmp0 = TREE_OPERAND (tmp0, 0);
10676 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10677 tmp1 = TREE_OPERAND (tmp1, 0);
10678 if (CONVERT_EXPR_P (tmp1)
10679 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10680 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10681 <= TYPE_PRECISION (atype)))
10682 tmp1 = TREE_OPERAND (tmp1, 0);
10683 /* The only case we can still associate with two variables
10684 is if they are the same, modulo negation and bit-pattern
10685 preserving conversions. */
10686 if (!operand_equal_p (tmp0, tmp1, 0))
10687 ok = false;
10688 }
10689 }
10690
10691 /* Only do something if we found more than two objects. Otherwise,
10692 nothing has changed and we risk infinite recursion. */
10693 if (ok
10694 && (2 < ((var0 != 0) + (var1 != 0)
10695 + (con0 != 0) + (con1 != 0)
10696 + (lit0 != 0) + (lit1 != 0)
10697 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10698 {
10699 bool any_overflows = false;
10700 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
10701 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
10702 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
10703 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
10704 var0 = associate_trees (loc, var0, var1, code, atype);
10705 con0 = associate_trees (loc, con0, con1, code, atype);
10706 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10707 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10708 code, atype);
10709
10710 /* Preserve the MINUS_EXPR if the negative part of the literal is
10711 greater than the positive part. Otherwise, the multiplicative
10712 folding code (i.e extract_muldiv) may be fooled in case
10713 unsigned constants are subtracted, like in the following
10714 example: ((X*2 + 4) - 8U)/2. */
10715 if (minus_lit0 && lit0)
10716 {
10717 if (TREE_CODE (lit0) == INTEGER_CST
10718 && TREE_CODE (minus_lit0) == INTEGER_CST
10719 && tree_int_cst_lt (lit0, minus_lit0))
10720 {
10721 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10722 MINUS_EXPR, atype);
10723 lit0 = 0;
10724 }
10725 else
10726 {
10727 lit0 = associate_trees (loc, lit0, minus_lit0,
10728 MINUS_EXPR, atype);
10729 minus_lit0 = 0;
10730 }
10731 }
10732
10733 /* Don't introduce overflows through reassociation. */
10734 if (!any_overflows
10735 && ((lit0 && TREE_OVERFLOW (lit0))
10736 || (minus_lit0 && TREE_OVERFLOW (minus_lit0))))
10737 return NULL_TREE;
10738
10739 if (minus_lit0)
10740 {
10741 if (con0 == 0)
10742 return
10743 fold_convert_loc (loc, type,
10744 associate_trees (loc, var0, minus_lit0,
10745 MINUS_EXPR, atype));
10746 else
10747 {
10748 con0 = associate_trees (loc, con0, minus_lit0,
10749 MINUS_EXPR, atype);
10750 return
10751 fold_convert_loc (loc, type,
10752 associate_trees (loc, var0, con0,
10753 PLUS_EXPR, atype));
10754 }
10755 }
10756
10757 con0 = associate_trees (loc, con0, lit0, code, atype);
10758 return
10759 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10760 code, atype));
10761 }
10762 }
10763
10764 return NULL_TREE;
10765
10766 case MINUS_EXPR:
10767 /* Pointer simplifications for subtraction, simple reassociations. */
10768 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10769 {
10770 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10771 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10772 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10773 {
10774 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10775 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10776 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10777 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10778 return fold_build2_loc (loc, PLUS_EXPR, type,
10779 fold_build2_loc (loc, MINUS_EXPR, type,
10780 arg00, arg10),
10781 fold_build2_loc (loc, MINUS_EXPR, type,
10782 arg01, arg11));
10783 }
10784 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10785 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10786 {
10787 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10788 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10789 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10790 fold_convert_loc (loc, type, arg1));
10791 if (tmp)
10792 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10793 }
10794 }
10795 /* A - (-B) -> A + B */
10796 if (TREE_CODE (arg1) == NEGATE_EXPR)
10797 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10798 fold_convert_loc (loc, type,
10799 TREE_OPERAND (arg1, 0)));
10800 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10801 if (TREE_CODE (arg0) == NEGATE_EXPR
10802 && negate_expr_p (arg1)
10803 && reorder_operands_p (arg0, arg1))
10804 return fold_build2_loc (loc, MINUS_EXPR, type,
10805 fold_convert_loc (loc, type,
10806 negate_expr (arg1)),
10807 fold_convert_loc (loc, type,
10808 TREE_OPERAND (arg0, 0)));
10809 /* Convert -A - 1 to ~A. */
10810 if (TREE_CODE (type) != COMPLEX_TYPE
10811 && TREE_CODE (arg0) == NEGATE_EXPR
10812 && integer_onep (arg1)
10813 && !TYPE_OVERFLOW_TRAPS (type))
10814 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10815 fold_convert_loc (loc, type,
10816 TREE_OPERAND (arg0, 0)));
10817
10818 /* Convert -1 - A to ~A. */
10819 if (TREE_CODE (type) != COMPLEX_TYPE
10820 && integer_all_onesp (arg0))
10821 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10822
10823
10824 /* X - (X / Y) * Y is X % Y. */
10825 if ((INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10826 && TREE_CODE (arg1) == MULT_EXPR
10827 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10828 && operand_equal_p (arg0,
10829 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10830 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10831 TREE_OPERAND (arg1, 1), 0))
10832 return
10833 fold_convert_loc (loc, type,
10834 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10835 arg0, TREE_OPERAND (arg1, 1)));
10836
10837 if (! FLOAT_TYPE_P (type))
10838 {
10839 if (integer_zerop (arg0))
10840 return negate_expr (fold_convert_loc (loc, type, arg1));
10841 if (integer_zerop (arg1))
10842 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10843
10844 /* Fold A - (A & B) into ~B & A. */
10845 if (!TREE_SIDE_EFFECTS (arg0)
10846 && TREE_CODE (arg1) == BIT_AND_EXPR)
10847 {
10848 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10849 {
10850 tree arg10 = fold_convert_loc (loc, type,
10851 TREE_OPERAND (arg1, 0));
10852 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10853 fold_build1_loc (loc, BIT_NOT_EXPR,
10854 type, arg10),
10855 fold_convert_loc (loc, type, arg0));
10856 }
10857 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10858 {
10859 tree arg11 = fold_convert_loc (loc,
10860 type, TREE_OPERAND (arg1, 1));
10861 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10862 fold_build1_loc (loc, BIT_NOT_EXPR,
10863 type, arg11),
10864 fold_convert_loc (loc, type, arg0));
10865 }
10866 }
10867
10868 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10869 any power of 2 minus 1. */
10870 if (TREE_CODE (arg0) == BIT_AND_EXPR
10871 && TREE_CODE (arg1) == BIT_AND_EXPR
10872 && operand_equal_p (TREE_OPERAND (arg0, 0),
10873 TREE_OPERAND (arg1, 0), 0))
10874 {
10875 tree mask0 = TREE_OPERAND (arg0, 1);
10876 tree mask1 = TREE_OPERAND (arg1, 1);
10877 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10878
10879 if (operand_equal_p (tem, mask1, 0))
10880 {
10881 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10882 TREE_OPERAND (arg0, 0), mask1);
10883 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10884 }
10885 }
10886 }
10887
10888 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10889 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10890 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10891
10892 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10893 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10894 (-ARG1 + ARG0) reduces to -ARG1. */
10895 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10896 return negate_expr (fold_convert_loc (loc, type, arg1));
10897
10898 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10899 __complex__ ( x, -y ). This is not the same for SNaNs or if
10900 signed zeros are involved. */
10901 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10902 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10903 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10904 {
10905 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10906 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10907 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10908 bool arg0rz = false, arg0iz = false;
10909 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10910 || (arg0i && (arg0iz = real_zerop (arg0i))))
10911 {
10912 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10913 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10914 if (arg0rz && arg1i && real_zerop (arg1i))
10915 {
10916 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10917 arg1r ? arg1r
10918 : build1 (REALPART_EXPR, rtype, arg1));
10919 tree ip = arg0i ? arg0i
10920 : build1 (IMAGPART_EXPR, rtype, arg0);
10921 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10922 }
10923 else if (arg0iz && arg1r && real_zerop (arg1r))
10924 {
10925 tree rp = arg0r ? arg0r
10926 : build1 (REALPART_EXPR, rtype, arg0);
10927 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10928 arg1i ? arg1i
10929 : build1 (IMAGPART_EXPR, rtype, arg1));
10930 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10931 }
10932 }
10933 }
10934
10935 /* Fold &x - &x. This can happen from &x.foo - &x.
10936 This is unsafe for certain floats even in non-IEEE formats.
10937 In IEEE, it is unsafe because it does wrong for NaNs.
10938 Also note that operand_equal_p is always false if an operand
10939 is volatile. */
10940
10941 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10942 && operand_equal_p (arg0, arg1, 0))
10943 return build_zero_cst (type);
10944
10945 /* A - B -> A + (-B) if B is easily negatable. */
10946 if (negate_expr_p (arg1)
10947 && ((FLOAT_TYPE_P (type)
10948 /* Avoid this transformation if B is a positive REAL_CST. */
10949 && (TREE_CODE (arg1) != REAL_CST
10950 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10951 || INTEGRAL_TYPE_P (type)))
10952 return fold_build2_loc (loc, PLUS_EXPR, type,
10953 fold_convert_loc (loc, type, arg0),
10954 fold_convert_loc (loc, type,
10955 negate_expr (arg1)));
10956
10957 /* Try folding difference of addresses. */
10958 {
10959 HOST_WIDE_INT diff;
10960
10961 if ((TREE_CODE (arg0) == ADDR_EXPR
10962 || TREE_CODE (arg1) == ADDR_EXPR)
10963 && ptr_difference_const (arg0, arg1, &diff))
10964 return build_int_cst_type (type, diff);
10965 }
10966
10967 /* Fold &a[i] - &a[j] to i-j. */
10968 if (TREE_CODE (arg0) == ADDR_EXPR
10969 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10970 && TREE_CODE (arg1) == ADDR_EXPR
10971 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10972 {
10973 tree tem = fold_addr_of_array_ref_difference (loc, type,
10974 TREE_OPERAND (arg0, 0),
10975 TREE_OPERAND (arg1, 0));
10976 if (tem)
10977 return tem;
10978 }
10979
10980 if (FLOAT_TYPE_P (type)
10981 && flag_unsafe_math_optimizations
10982 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10983 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10984 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10985 return tem;
10986
10987 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10988 one. Make sure the type is not saturating and has the signedness of
10989 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10990 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10991 if ((TREE_CODE (arg0) == MULT_EXPR
10992 || TREE_CODE (arg1) == MULT_EXPR)
10993 && !TYPE_SATURATING (type)
10994 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10995 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10996 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10997 {
10998 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10999 if (tem)
11000 return tem;
11001 }
11002
11003 goto associate;
11004
11005 case MULT_EXPR:
11006 /* (-A) * (-B) -> A * B */
11007 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11008 return fold_build2_loc (loc, MULT_EXPR, type,
11009 fold_convert_loc (loc, type,
11010 TREE_OPERAND (arg0, 0)),
11011 fold_convert_loc (loc, type,
11012 negate_expr (arg1)));
11013 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11014 return fold_build2_loc (loc, MULT_EXPR, type,
11015 fold_convert_loc (loc, type,
11016 negate_expr (arg0)),
11017 fold_convert_loc (loc, type,
11018 TREE_OPERAND (arg1, 0)));
11019
11020 if (! FLOAT_TYPE_P (type))
11021 {
11022 if (integer_zerop (arg1))
11023 return omit_one_operand_loc (loc, type, arg1, arg0);
11024 if (integer_onep (arg1))
11025 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11026 /* Transform x * -1 into -x. Make sure to do the negation
11027 on the original operand with conversions not stripped
11028 because we can only strip non-sign-changing conversions. */
11029 if (integer_minus_onep (arg1))
11030 return fold_convert_loc (loc, type, negate_expr (op0));
11031 /* Transform x * -C into -x * C if x is easily negatable. */
11032 if (TREE_CODE (arg1) == INTEGER_CST
11033 && tree_int_cst_sgn (arg1) == -1
11034 && negate_expr_p (arg0)
11035 && (tem = negate_expr (arg1)) != arg1
11036 && !TREE_OVERFLOW (tem))
11037 return fold_build2_loc (loc, MULT_EXPR, type,
11038 fold_convert_loc (loc, type,
11039 negate_expr (arg0)),
11040 tem);
11041
11042 /* (a * (1 << b)) is (a << b) */
11043 if (TREE_CODE (arg1) == LSHIFT_EXPR
11044 && integer_onep (TREE_OPERAND (arg1, 0)))
11045 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
11046 TREE_OPERAND (arg1, 1));
11047 if (TREE_CODE (arg0) == LSHIFT_EXPR
11048 && integer_onep (TREE_OPERAND (arg0, 0)))
11049 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
11050 TREE_OPERAND (arg0, 1));
11051
11052 /* (A + A) * C -> A * 2 * C */
11053 if (TREE_CODE (arg0) == PLUS_EXPR
11054 && TREE_CODE (arg1) == INTEGER_CST
11055 && operand_equal_p (TREE_OPERAND (arg0, 0),
11056 TREE_OPERAND (arg0, 1), 0))
11057 return fold_build2_loc (loc, MULT_EXPR, type,
11058 omit_one_operand_loc (loc, type,
11059 TREE_OPERAND (arg0, 0),
11060 TREE_OPERAND (arg0, 1)),
11061 fold_build2_loc (loc, MULT_EXPR, type,
11062 build_int_cst (type, 2) , arg1));
11063
11064 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
11065 sign-changing only. */
11066 if (TREE_CODE (arg1) == INTEGER_CST
11067 && TREE_CODE (arg0) == EXACT_DIV_EXPR
11068 && operand_equal_p (arg1, TREE_OPERAND (arg0, 1), 0))
11069 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11070
11071 strict_overflow_p = false;
11072 if (TREE_CODE (arg1) == INTEGER_CST
11073 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11074 &strict_overflow_p)))
11075 {
11076 if (strict_overflow_p)
11077 fold_overflow_warning (("assuming signed overflow does not "
11078 "occur when simplifying "
11079 "multiplication"),
11080 WARN_STRICT_OVERFLOW_MISC);
11081 return fold_convert_loc (loc, type, tem);
11082 }
11083
11084 /* Optimize z * conj(z) for integer complex numbers. */
11085 if (TREE_CODE (arg0) == CONJ_EXPR
11086 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11087 return fold_mult_zconjz (loc, type, arg1);
11088 if (TREE_CODE (arg1) == CONJ_EXPR
11089 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11090 return fold_mult_zconjz (loc, type, arg0);
11091 }
11092 else
11093 {
11094 /* Maybe fold x * 0 to 0. The expressions aren't the same
11095 when x is NaN, since x * 0 is also NaN. Nor are they the
11096 same in modes with signed zeros, since multiplying a
11097 negative value by 0 gives -0, not +0. */
11098 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11099 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11100 && real_zerop (arg1))
11101 return omit_one_operand_loc (loc, type, arg1, arg0);
11102 /* In IEEE floating point, x*1 is not equivalent to x for snans.
11103 Likewise for complex arithmetic with signed zeros. */
11104 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11105 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11106 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11107 && real_onep (arg1))
11108 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11109
11110 /* Transform x * -1.0 into -x. */
11111 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11112 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11113 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11114 && real_minus_onep (arg1))
11115 return fold_convert_loc (loc, type, negate_expr (arg0));
11116
11117 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
11118 the result for floating point types due to rounding so it is applied
11119 only if -fassociative-math was specify. */
11120 if (flag_associative_math
11121 && TREE_CODE (arg0) == RDIV_EXPR
11122 && TREE_CODE (arg1) == REAL_CST
11123 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
11124 {
11125 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
11126 arg1);
11127 if (tem)
11128 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11129 TREE_OPERAND (arg0, 1));
11130 }
11131
11132 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
11133 if (operand_equal_p (arg0, arg1, 0))
11134 {
11135 tree tem = fold_strip_sign_ops (arg0);
11136 if (tem != NULL_TREE)
11137 {
11138 tem = fold_convert_loc (loc, type, tem);
11139 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
11140 }
11141 }
11142
11143 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
11144 This is not the same for NaNs or if signed zeros are
11145 involved. */
11146 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11147 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11148 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11149 && TREE_CODE (arg1) == COMPLEX_CST
11150 && real_zerop (TREE_REALPART (arg1)))
11151 {
11152 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11153 if (real_onep (TREE_IMAGPART (arg1)))
11154 return
11155 fold_build2_loc (loc, COMPLEX_EXPR, type,
11156 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
11157 rtype, arg0)),
11158 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
11159 else if (real_minus_onep (TREE_IMAGPART (arg1)))
11160 return
11161 fold_build2_loc (loc, COMPLEX_EXPR, type,
11162 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
11163 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
11164 rtype, arg0)));
11165 }
11166
11167 /* Optimize z * conj(z) for floating point complex numbers.
11168 Guarded by flag_unsafe_math_optimizations as non-finite
11169 imaginary components don't produce scalar results. */
11170 if (flag_unsafe_math_optimizations
11171 && TREE_CODE (arg0) == CONJ_EXPR
11172 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11173 return fold_mult_zconjz (loc, type, arg1);
11174 if (flag_unsafe_math_optimizations
11175 && TREE_CODE (arg1) == CONJ_EXPR
11176 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11177 return fold_mult_zconjz (loc, type, arg0);
11178
11179 if (flag_unsafe_math_optimizations)
11180 {
11181 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11182 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11183
11184 /* Optimizations of root(...)*root(...). */
11185 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
11186 {
11187 tree rootfn, arg;
11188 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11189 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11190
11191 /* Optimize sqrt(x)*sqrt(x) as x. */
11192 if (BUILTIN_SQRT_P (fcode0)
11193 && operand_equal_p (arg00, arg10, 0)
11194 && ! HONOR_SNANS (TYPE_MODE (type)))
11195 return arg00;
11196
11197 /* Optimize root(x)*root(y) as root(x*y). */
11198 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11199 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
11200 return build_call_expr_loc (loc, rootfn, 1, arg);
11201 }
11202
11203 /* Optimize expN(x)*expN(y) as expN(x+y). */
11204 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
11205 {
11206 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11207 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11208 CALL_EXPR_ARG (arg0, 0),
11209 CALL_EXPR_ARG (arg1, 0));
11210 return build_call_expr_loc (loc, expfn, 1, arg);
11211 }
11212
11213 /* Optimizations of pow(...)*pow(...). */
11214 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
11215 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
11216 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
11217 {
11218 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11219 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11220 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11221 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11222
11223 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
11224 if (operand_equal_p (arg01, arg11, 0))
11225 {
11226 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11227 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
11228 arg00, arg10);
11229 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
11230 }
11231
11232 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
11233 if (operand_equal_p (arg00, arg10, 0))
11234 {
11235 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11236 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11237 arg01, arg11);
11238 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
11239 }
11240 }
11241
11242 /* Optimize tan(x)*cos(x) as sin(x). */
11243 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
11244 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
11245 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
11246 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
11247 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
11248 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
11249 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11250 CALL_EXPR_ARG (arg1, 0), 0))
11251 {
11252 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
11253
11254 if (sinfn != NULL_TREE)
11255 return build_call_expr_loc (loc, sinfn, 1,
11256 CALL_EXPR_ARG (arg0, 0));
11257 }
11258
11259 /* Optimize x*pow(x,c) as pow(x,c+1). */
11260 if (fcode1 == BUILT_IN_POW
11261 || fcode1 == BUILT_IN_POWF
11262 || fcode1 == BUILT_IN_POWL)
11263 {
11264 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11265 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11266 if (TREE_CODE (arg11) == REAL_CST
11267 && !TREE_OVERFLOW (arg11)
11268 && operand_equal_p (arg0, arg10, 0))
11269 {
11270 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11271 REAL_VALUE_TYPE c;
11272 tree arg;
11273
11274 c = TREE_REAL_CST (arg11);
11275 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11276 arg = build_real (type, c);
11277 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11278 }
11279 }
11280
11281 /* Optimize pow(x,c)*x as pow(x,c+1). */
11282 if (fcode0 == BUILT_IN_POW
11283 || fcode0 == BUILT_IN_POWF
11284 || fcode0 == BUILT_IN_POWL)
11285 {
11286 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11287 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11288 if (TREE_CODE (arg01) == REAL_CST
11289 && !TREE_OVERFLOW (arg01)
11290 && operand_equal_p (arg1, arg00, 0))
11291 {
11292 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11293 REAL_VALUE_TYPE c;
11294 tree arg;
11295
11296 c = TREE_REAL_CST (arg01);
11297 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11298 arg = build_real (type, c);
11299 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11300 }
11301 }
11302
11303 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
11304 if (!in_gimple_form
11305 && optimize
11306 && operand_equal_p (arg0, arg1, 0))
11307 {
11308 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
11309
11310 if (powfn)
11311 {
11312 tree arg = build_real (type, dconst2);
11313 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11314 }
11315 }
11316 }
11317 }
11318 goto associate;
11319
11320 case BIT_IOR_EXPR:
11321 bit_ior:
11322 if (integer_all_onesp (arg1))
11323 return omit_one_operand_loc (loc, type, arg1, arg0);
11324 if (integer_zerop (arg1))
11325 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11326 if (operand_equal_p (arg0, arg1, 0))
11327 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11328
11329 /* ~X | X is -1. */
11330 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11331 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11332 {
11333 t1 = build_zero_cst (type);
11334 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11335 return omit_one_operand_loc (loc, type, t1, arg1);
11336 }
11337
11338 /* X | ~X is -1. */
11339 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11340 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11341 {
11342 t1 = build_zero_cst (type);
11343 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11344 return omit_one_operand_loc (loc, type, t1, arg0);
11345 }
11346
11347 /* Canonicalize (X & C1) | C2. */
11348 if (TREE_CODE (arg0) == BIT_AND_EXPR
11349 && TREE_CODE (arg1) == INTEGER_CST
11350 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11351 {
11352 int width = TYPE_PRECISION (type), w;
11353 wide_int c1 = TREE_OPERAND (arg0, 1);
11354 wide_int c2 = arg1;
11355
11356 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11357 if ((c1 & c2) == c1)
11358 return omit_one_operand_loc (loc, type, arg1,
11359 TREE_OPERAND (arg0, 0));
11360
11361 wide_int msk = wi::mask (width, false,
11362 TYPE_PRECISION (TREE_TYPE (arg1)));
11363
11364 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11365 if (msk.and_not (c1 | c2) == 0)
11366 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11367 TREE_OPERAND (arg0, 0), arg1);
11368
11369 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11370 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11371 mode which allows further optimizations. */
11372 c1 &= msk;
11373 c2 &= msk;
11374 wide_int c3 = c1.and_not (c2);
11375 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
11376 {
11377 wide_int mask = wi::mask (w, false,
11378 TYPE_PRECISION (type));
11379 if (((c1 | c2) & mask) == mask && c1.and_not (mask) == 0)
11380 {
11381 c3 = mask;
11382 break;
11383 }
11384 }
11385
11386 if (c3 != c1)
11387 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11388 fold_build2_loc (loc, BIT_AND_EXPR, type,
11389 TREE_OPERAND (arg0, 0),
11390 wide_int_to_tree (type,
11391 c3)),
11392 arg1);
11393 }
11394
11395 /* (X & Y) | Y is (X, Y). */
11396 if (TREE_CODE (arg0) == BIT_AND_EXPR
11397 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11398 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11399 /* (X & Y) | X is (Y, X). */
11400 if (TREE_CODE (arg0) == BIT_AND_EXPR
11401 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11402 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11403 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11404 /* X | (X & Y) is (Y, X). */
11405 if (TREE_CODE (arg1) == BIT_AND_EXPR
11406 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11407 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11408 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11409 /* X | (Y & X) is (Y, X). */
11410 if (TREE_CODE (arg1) == BIT_AND_EXPR
11411 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11412 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11413 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11414
11415 /* (X & ~Y) | (~X & Y) is X ^ Y */
11416 if (TREE_CODE (arg0) == BIT_AND_EXPR
11417 && TREE_CODE (arg1) == BIT_AND_EXPR)
11418 {
11419 tree a0, a1, l0, l1, n0, n1;
11420
11421 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11422 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11423
11424 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11425 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11426
11427 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
11428 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
11429
11430 if ((operand_equal_p (n0, a0, 0)
11431 && operand_equal_p (n1, a1, 0))
11432 || (operand_equal_p (n0, a1, 0)
11433 && operand_equal_p (n1, a0, 0)))
11434 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
11435 }
11436
11437 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11438 if (t1 != NULL_TREE)
11439 return t1;
11440
11441 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11442
11443 This results in more efficient code for machines without a NAND
11444 instruction. Combine will canonicalize to the first form
11445 which will allow use of NAND instructions provided by the
11446 backend if they exist. */
11447 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11448 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11449 {
11450 return
11451 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11452 build2 (BIT_AND_EXPR, type,
11453 fold_convert_loc (loc, type,
11454 TREE_OPERAND (arg0, 0)),
11455 fold_convert_loc (loc, type,
11456 TREE_OPERAND (arg1, 0))));
11457 }
11458
11459 /* See if this can be simplified into a rotate first. If that
11460 is unsuccessful continue in the association code. */
11461 goto bit_rotate;
11462
11463 case BIT_XOR_EXPR:
11464 if (integer_zerop (arg1))
11465 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11466 if (integer_all_onesp (arg1))
11467 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
11468 if (operand_equal_p (arg0, arg1, 0))
11469 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11470
11471 /* ~X ^ X is -1. */
11472 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11473 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11474 {
11475 t1 = build_zero_cst (type);
11476 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11477 return omit_one_operand_loc (loc, type, t1, arg1);
11478 }
11479
11480 /* X ^ ~X is -1. */
11481 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11482 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11483 {
11484 t1 = build_zero_cst (type);
11485 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11486 return omit_one_operand_loc (loc, type, t1, arg0);
11487 }
11488
11489 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11490 with a constant, and the two constants have no bits in common,
11491 we should treat this as a BIT_IOR_EXPR since this may produce more
11492 simplifications. */
11493 if (TREE_CODE (arg0) == BIT_AND_EXPR
11494 && TREE_CODE (arg1) == BIT_AND_EXPR
11495 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11496 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11497 && wi::bit_and (TREE_OPERAND (arg0, 1),
11498 TREE_OPERAND (arg1, 1)) == 0)
11499 {
11500 code = BIT_IOR_EXPR;
11501 goto bit_ior;
11502 }
11503
11504 /* (X | Y) ^ X -> Y & ~ X*/
11505 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11506 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11507 {
11508 tree t2 = TREE_OPERAND (arg0, 1);
11509 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11510 arg1);
11511 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11512 fold_convert_loc (loc, type, t2),
11513 fold_convert_loc (loc, type, t1));
11514 return t1;
11515 }
11516
11517 /* (Y | X) ^ X -> Y & ~ X*/
11518 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11519 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11520 {
11521 tree t2 = TREE_OPERAND (arg0, 0);
11522 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11523 arg1);
11524 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11525 fold_convert_loc (loc, type, t2),
11526 fold_convert_loc (loc, type, t1));
11527 return t1;
11528 }
11529
11530 /* X ^ (X | Y) -> Y & ~ X*/
11531 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11532 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11533 {
11534 tree t2 = TREE_OPERAND (arg1, 1);
11535 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11536 arg0);
11537 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11538 fold_convert_loc (loc, type, t2),
11539 fold_convert_loc (loc, type, t1));
11540 return t1;
11541 }
11542
11543 /* X ^ (Y | X) -> Y & ~ X*/
11544 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11545 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11546 {
11547 tree t2 = TREE_OPERAND (arg1, 0);
11548 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11549 arg0);
11550 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11551 fold_convert_loc (loc, type, t2),
11552 fold_convert_loc (loc, type, t1));
11553 return t1;
11554 }
11555
11556 /* Convert ~X ^ ~Y to X ^ Y. */
11557 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11558 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11559 return fold_build2_loc (loc, code, type,
11560 fold_convert_loc (loc, type,
11561 TREE_OPERAND (arg0, 0)),
11562 fold_convert_loc (loc, type,
11563 TREE_OPERAND (arg1, 0)));
11564
11565 /* Convert ~X ^ C to X ^ ~C. */
11566 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11567 && TREE_CODE (arg1) == INTEGER_CST)
11568 return fold_build2_loc (loc, code, type,
11569 fold_convert_loc (loc, type,
11570 TREE_OPERAND (arg0, 0)),
11571 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11572
11573 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11574 if (TREE_CODE (arg0) == BIT_AND_EXPR
11575 && integer_onep (TREE_OPERAND (arg0, 1))
11576 && integer_onep (arg1))
11577 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11578 build_zero_cst (TREE_TYPE (arg0)));
11579
11580 /* Fold (X & Y) ^ Y as ~X & Y. */
11581 if (TREE_CODE (arg0) == BIT_AND_EXPR
11582 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11583 {
11584 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11585 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11586 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11587 fold_convert_loc (loc, type, arg1));
11588 }
11589 /* Fold (X & Y) ^ X as ~Y & X. */
11590 if (TREE_CODE (arg0) == BIT_AND_EXPR
11591 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11592 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11593 {
11594 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11595 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11596 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11597 fold_convert_loc (loc, type, arg1));
11598 }
11599 /* Fold X ^ (X & Y) as X & ~Y. */
11600 if (TREE_CODE (arg1) == BIT_AND_EXPR
11601 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11602 {
11603 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11604 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11605 fold_convert_loc (loc, type, arg0),
11606 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11607 }
11608 /* Fold X ^ (Y & X) as ~Y & X. */
11609 if (TREE_CODE (arg1) == BIT_AND_EXPR
11610 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11611 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11612 {
11613 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11614 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11615 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11616 fold_convert_loc (loc, type, arg0));
11617 }
11618
11619 /* See if this can be simplified into a rotate first. If that
11620 is unsuccessful continue in the association code. */
11621 goto bit_rotate;
11622
11623 case BIT_AND_EXPR:
11624 if (integer_all_onesp (arg1))
11625 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11626 if (integer_zerop (arg1))
11627 return omit_one_operand_loc (loc, type, arg1, arg0);
11628 if (operand_equal_p (arg0, arg1, 0))
11629 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11630
11631 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11632 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11633 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11634 || (TREE_CODE (arg0) == EQ_EXPR
11635 && integer_zerop (TREE_OPERAND (arg0, 1))))
11636 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11637 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11638
11639 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11640 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11641 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11642 || (TREE_CODE (arg1) == EQ_EXPR
11643 && integer_zerop (TREE_OPERAND (arg1, 1))))
11644 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11645 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11646
11647 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11648 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11649 && TREE_CODE (arg1) == INTEGER_CST
11650 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11651 {
11652 tree tmp1 = fold_convert_loc (loc, type, arg1);
11653 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11654 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11655 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
11656 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
11657 return
11658 fold_convert_loc (loc, type,
11659 fold_build2_loc (loc, BIT_IOR_EXPR,
11660 type, tmp2, tmp3));
11661 }
11662
11663 /* (X | Y) & Y is (X, Y). */
11664 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11665 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11666 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11667 /* (X | Y) & X is (Y, X). */
11668 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11669 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11670 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11671 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11672 /* X & (X | Y) is (Y, X). */
11673 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11674 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11675 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11676 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11677 /* X & (Y | X) is (Y, X). */
11678 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11679 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11680 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11681 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11682
11683 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11684 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11685 && integer_onep (TREE_OPERAND (arg0, 1))
11686 && integer_onep (arg1))
11687 {
11688 tree tem2;
11689 tem = TREE_OPERAND (arg0, 0);
11690 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11691 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11692 tem, tem2);
11693 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11694 build_zero_cst (TREE_TYPE (tem)));
11695 }
11696 /* Fold ~X & 1 as (X & 1) == 0. */
11697 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11698 && integer_onep (arg1))
11699 {
11700 tree tem2;
11701 tem = TREE_OPERAND (arg0, 0);
11702 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11703 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11704 tem, tem2);
11705 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11706 build_zero_cst (TREE_TYPE (tem)));
11707 }
11708 /* Fold !X & 1 as X == 0. */
11709 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11710 && integer_onep (arg1))
11711 {
11712 tem = TREE_OPERAND (arg0, 0);
11713 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11714 build_zero_cst (TREE_TYPE (tem)));
11715 }
11716
11717 /* Fold (X ^ Y) & Y as ~X & Y. */
11718 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11719 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11720 {
11721 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11722 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11723 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11724 fold_convert_loc (loc, type, arg1));
11725 }
11726 /* Fold (X ^ Y) & X as ~Y & X. */
11727 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11728 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11729 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11730 {
11731 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11732 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11733 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11734 fold_convert_loc (loc, type, arg1));
11735 }
11736 /* Fold X & (X ^ Y) as X & ~Y. */
11737 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11738 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11739 {
11740 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11741 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11742 fold_convert_loc (loc, type, arg0),
11743 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11744 }
11745 /* Fold X & (Y ^ X) as ~Y & X. */
11746 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11747 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11748 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11749 {
11750 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11751 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11752 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11753 fold_convert_loc (loc, type, arg0));
11754 }
11755
11756 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11757 multiple of 1 << CST. */
11758 if (TREE_CODE (arg1) == INTEGER_CST)
11759 {
11760 wide_int cst1 = arg1;
11761 wide_int ncst1 = -cst1;
11762 if ((cst1 & ncst1) == ncst1
11763 && multiple_of_p (type, arg0,
11764 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
11765 return fold_convert_loc (loc, type, arg0);
11766 }
11767
11768 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11769 bits from CST2. */
11770 if (TREE_CODE (arg1) == INTEGER_CST
11771 && TREE_CODE (arg0) == MULT_EXPR
11772 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11773 {
11774 wide_int warg1 = arg1;
11775 wide_int masked = mask_with_tz (type, warg1, TREE_OPERAND (arg0, 1));
11776
11777 if (masked == 0)
11778 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11779 arg0, arg1);
11780 else if (masked != warg1)
11781 {
11782 /* Avoid the transform if arg1 is a mask of some
11783 mode which allows further optimizations. */
11784 int pop = wi::popcount (warg1);
11785 if (!(pop >= BITS_PER_UNIT
11786 && exact_log2 (pop) != -1
11787 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
11788 return fold_build2_loc (loc, code, type, op0,
11789 wide_int_to_tree (type, masked));
11790 }
11791 }
11792
11793 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11794 ((A & N) + B) & M -> (A + B) & M
11795 Similarly if (N & M) == 0,
11796 ((A | N) + B) & M -> (A + B) & M
11797 and for - instead of + (or unary - instead of +)
11798 and/or ^ instead of |.
11799 If B is constant and (B & M) == 0, fold into A & M. */
11800 if (TREE_CODE (arg1) == INTEGER_CST)
11801 {
11802 wide_int cst1 = arg1;
11803 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
11804 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11805 && (TREE_CODE (arg0) == PLUS_EXPR
11806 || TREE_CODE (arg0) == MINUS_EXPR
11807 || TREE_CODE (arg0) == NEGATE_EXPR)
11808 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11809 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11810 {
11811 tree pmop[2];
11812 int which = 0;
11813 wide_int cst0;
11814
11815 /* Now we know that arg0 is (C + D) or (C - D) or
11816 -C and arg1 (M) is == (1LL << cst) - 1.
11817 Store C into PMOP[0] and D into PMOP[1]. */
11818 pmop[0] = TREE_OPERAND (arg0, 0);
11819 pmop[1] = NULL;
11820 if (TREE_CODE (arg0) != NEGATE_EXPR)
11821 {
11822 pmop[1] = TREE_OPERAND (arg0, 1);
11823 which = 1;
11824 }
11825
11826 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
11827 which = -1;
11828
11829 for (; which >= 0; which--)
11830 switch (TREE_CODE (pmop[which]))
11831 {
11832 case BIT_AND_EXPR:
11833 case BIT_IOR_EXPR:
11834 case BIT_XOR_EXPR:
11835 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11836 != INTEGER_CST)
11837 break;
11838 cst0 = TREE_OPERAND (pmop[which], 1);
11839 cst0 &= cst1;
11840 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11841 {
11842 if (cst0 != cst1)
11843 break;
11844 }
11845 else if (cst0 != 0)
11846 break;
11847 /* If C or D is of the form (A & N) where
11848 (N & M) == M, or of the form (A | N) or
11849 (A ^ N) where (N & M) == 0, replace it with A. */
11850 pmop[which] = TREE_OPERAND (pmop[which], 0);
11851 break;
11852 case INTEGER_CST:
11853 /* If C or D is a N where (N & M) == 0, it can be
11854 omitted (assumed 0). */
11855 if ((TREE_CODE (arg0) == PLUS_EXPR
11856 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11857 && (cst1 & pmop[which]) == 0)
11858 pmop[which] = NULL;
11859 break;
11860 default:
11861 break;
11862 }
11863
11864 /* Only build anything new if we optimized one or both arguments
11865 above. */
11866 if (pmop[0] != TREE_OPERAND (arg0, 0)
11867 || (TREE_CODE (arg0) != NEGATE_EXPR
11868 && pmop[1] != TREE_OPERAND (arg0, 1)))
11869 {
11870 tree utype = TREE_TYPE (arg0);
11871 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11872 {
11873 /* Perform the operations in a type that has defined
11874 overflow behavior. */
11875 utype = unsigned_type_for (TREE_TYPE (arg0));
11876 if (pmop[0] != NULL)
11877 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11878 if (pmop[1] != NULL)
11879 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11880 }
11881
11882 if (TREE_CODE (arg0) == NEGATE_EXPR)
11883 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11884 else if (TREE_CODE (arg0) == PLUS_EXPR)
11885 {
11886 if (pmop[0] != NULL && pmop[1] != NULL)
11887 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11888 pmop[0], pmop[1]);
11889 else if (pmop[0] != NULL)
11890 tem = pmop[0];
11891 else if (pmop[1] != NULL)
11892 tem = pmop[1];
11893 else
11894 return build_int_cst (type, 0);
11895 }
11896 else if (pmop[0] == NULL)
11897 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11898 else
11899 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11900 pmop[0], pmop[1]);
11901 /* TEM is now the new binary +, - or unary - replacement. */
11902 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11903 fold_convert_loc (loc, utype, arg1));
11904 return fold_convert_loc (loc, type, tem);
11905 }
11906 }
11907 }
11908
11909 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11910 if (t1 != NULL_TREE)
11911 return t1;
11912 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11913 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11914 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11915 {
11916 prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11917
11918 wide_int mask = wide_int::from (arg1, prec, UNSIGNED);
11919 if (mask == -1)
11920 return
11921 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11922 }
11923
11924 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11925
11926 This results in more efficient code for machines without a NOR
11927 instruction. Combine will canonicalize to the first form
11928 which will allow use of NOR instructions provided by the
11929 backend if they exist. */
11930 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11931 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11932 {
11933 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11934 build2 (BIT_IOR_EXPR, type,
11935 fold_convert_loc (loc, type,
11936 TREE_OPERAND (arg0, 0)),
11937 fold_convert_loc (loc, type,
11938 TREE_OPERAND (arg1, 0))));
11939 }
11940
11941 /* If arg0 is derived from the address of an object or function, we may
11942 be able to fold this expression using the object or function's
11943 alignment. */
11944 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && tree_fits_uhwi_p (arg1))
11945 {
11946 unsigned HOST_WIDE_INT modulus, residue;
11947 unsigned HOST_WIDE_INT low = tree_to_uhwi (arg1);
11948
11949 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11950 integer_onep (arg1));
11951
11952 /* This works because modulus is a power of 2. If this weren't the
11953 case, we'd have to replace it by its greatest power-of-2
11954 divisor: modulus & -modulus. */
11955 if (low < modulus)
11956 return build_int_cst (type, residue & low);
11957 }
11958
11959 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11960 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11961 if the new mask might be further optimized. */
11962 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11963 || TREE_CODE (arg0) == RSHIFT_EXPR)
11964 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11965 && TREE_CODE (arg1) == INTEGER_CST
11966 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
11967 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) > 0
11968 && (tree_to_uhwi (TREE_OPERAND (arg0, 1))
11969 < TYPE_PRECISION (TREE_TYPE (arg0))))
11970 {
11971 unsigned int shiftc = tree_to_uhwi (TREE_OPERAND (arg0, 1));
11972 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (arg1);
11973 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11974 tree shift_type = TREE_TYPE (arg0);
11975
11976 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11977 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11978 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11979 && TYPE_PRECISION (TREE_TYPE (arg0))
11980 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg0))))
11981 {
11982 prec = TYPE_PRECISION (TREE_TYPE (arg0));
11983 tree arg00 = TREE_OPERAND (arg0, 0);
11984 /* See if more bits can be proven as zero because of
11985 zero extension. */
11986 if (TREE_CODE (arg00) == NOP_EXPR
11987 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11988 {
11989 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11990 if (TYPE_PRECISION (inner_type)
11991 == GET_MODE_PRECISION (TYPE_MODE (inner_type))
11992 && TYPE_PRECISION (inner_type) < prec)
11993 {
11994 prec = TYPE_PRECISION (inner_type);
11995 /* See if we can shorten the right shift. */
11996 if (shiftc < prec)
11997 shift_type = inner_type;
11998 /* Otherwise X >> C1 is all zeros, so we'll optimize
11999 it into (X, 0) later on by making sure zerobits
12000 is all ones. */
12001 }
12002 }
12003 zerobits = ~(unsigned HOST_WIDE_INT) 0;
12004 if (shiftc < prec)
12005 {
12006 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
12007 zerobits <<= prec - shiftc;
12008 }
12009 /* For arithmetic shift if sign bit could be set, zerobits
12010 can contain actually sign bits, so no transformation is
12011 possible, unless MASK masks them all away. In that
12012 case the shift needs to be converted into logical shift. */
12013 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
12014 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
12015 {
12016 if ((mask & zerobits) == 0)
12017 shift_type = unsigned_type_for (TREE_TYPE (arg0));
12018 else
12019 zerobits = 0;
12020 }
12021 }
12022
12023 /* ((X << 16) & 0xff00) is (X, 0). */
12024 if ((mask & zerobits) == mask)
12025 return omit_one_operand_loc (loc, type,
12026 build_int_cst (type, 0), arg0);
12027
12028 newmask = mask | zerobits;
12029 if (newmask != mask && (newmask & (newmask + 1)) == 0)
12030 {
12031 /* Only do the transformation if NEWMASK is some integer
12032 mode's mask. */
12033 for (prec = BITS_PER_UNIT;
12034 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
12035 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
12036 break;
12037 if (prec < HOST_BITS_PER_WIDE_INT
12038 || newmask == ~(unsigned HOST_WIDE_INT) 0)
12039 {
12040 tree newmaskt;
12041
12042 if (shift_type != TREE_TYPE (arg0))
12043 {
12044 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
12045 fold_convert_loc (loc, shift_type,
12046 TREE_OPERAND (arg0, 0)),
12047 TREE_OPERAND (arg0, 1));
12048 tem = fold_convert_loc (loc, type, tem);
12049 }
12050 else
12051 tem = op0;
12052 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
12053 if (!tree_int_cst_equal (newmaskt, arg1))
12054 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
12055 }
12056 }
12057 }
12058
12059 goto associate;
12060
12061 case RDIV_EXPR:
12062 /* Don't touch a floating-point divide by zero unless the mode
12063 of the constant can represent infinity. */
12064 if (TREE_CODE (arg1) == REAL_CST
12065 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
12066 && real_zerop (arg1))
12067 return NULL_TREE;
12068
12069 /* Optimize A / A to 1.0 if we don't care about
12070 NaNs or Infinities. Skip the transformation
12071 for non-real operands. */
12072 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
12073 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
12074 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
12075 && operand_equal_p (arg0, arg1, 0))
12076 {
12077 tree r = build_real (TREE_TYPE (arg0), dconst1);
12078
12079 return omit_two_operands_loc (loc, type, r, arg0, arg1);
12080 }
12081
12082 /* The complex version of the above A / A optimization. */
12083 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
12084 && operand_equal_p (arg0, arg1, 0))
12085 {
12086 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
12087 if (! HONOR_NANS (TYPE_MODE (elem_type))
12088 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
12089 {
12090 tree r = build_real (elem_type, dconst1);
12091 /* omit_two_operands will call fold_convert for us. */
12092 return omit_two_operands_loc (loc, type, r, arg0, arg1);
12093 }
12094 }
12095
12096 /* (-A) / (-B) -> A / B */
12097 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
12098 return fold_build2_loc (loc, RDIV_EXPR, type,
12099 TREE_OPERAND (arg0, 0),
12100 negate_expr (arg1));
12101 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
12102 return fold_build2_loc (loc, RDIV_EXPR, type,
12103 negate_expr (arg0),
12104 TREE_OPERAND (arg1, 0));
12105
12106 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
12107 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
12108 && real_onep (arg1))
12109 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12110
12111 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
12112 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
12113 && real_minus_onep (arg1))
12114 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
12115 negate_expr (arg0)));
12116
12117 /* If ARG1 is a constant, we can convert this to a multiply by the
12118 reciprocal. This does not have the same rounding properties,
12119 so only do this if -freciprocal-math. We can actually
12120 always safely do it if ARG1 is a power of two, but it's hard to
12121 tell if it is or not in a portable manner. */
12122 if (optimize
12123 && (TREE_CODE (arg1) == REAL_CST
12124 || (TREE_CODE (arg1) == COMPLEX_CST
12125 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1)))
12126 || (TREE_CODE (arg1) == VECTOR_CST
12127 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1)))))
12128 {
12129 if (flag_reciprocal_math
12130 && 0 != (tem = const_binop (code, build_one_cst (type), arg1)))
12131 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
12132 /* Find the reciprocal if optimizing and the result is exact.
12133 TODO: Complex reciprocal not implemented. */
12134 if (TREE_CODE (arg1) != COMPLEX_CST)
12135 {
12136 tree inverse = exact_inverse (TREE_TYPE (arg0), arg1);
12137
12138 if (inverse)
12139 return fold_build2_loc (loc, MULT_EXPR, type, arg0, inverse);
12140 }
12141 }
12142 /* Convert A/B/C to A/(B*C). */
12143 if (flag_reciprocal_math
12144 && TREE_CODE (arg0) == RDIV_EXPR)
12145 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
12146 fold_build2_loc (loc, MULT_EXPR, type,
12147 TREE_OPERAND (arg0, 1), arg1));
12148
12149 /* Convert A/(B/C) to (A/B)*C. */
12150 if (flag_reciprocal_math
12151 && TREE_CODE (arg1) == RDIV_EXPR)
12152 return fold_build2_loc (loc, MULT_EXPR, type,
12153 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
12154 TREE_OPERAND (arg1, 0)),
12155 TREE_OPERAND (arg1, 1));
12156
12157 /* Convert C1/(X*C2) into (C1/C2)/X. */
12158 if (flag_reciprocal_math
12159 && TREE_CODE (arg1) == MULT_EXPR
12160 && TREE_CODE (arg0) == REAL_CST
12161 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
12162 {
12163 tree tem = const_binop (RDIV_EXPR, arg0,
12164 TREE_OPERAND (arg1, 1));
12165 if (tem)
12166 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
12167 TREE_OPERAND (arg1, 0));
12168 }
12169
12170 if (flag_unsafe_math_optimizations)
12171 {
12172 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
12173 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
12174
12175 /* Optimize sin(x)/cos(x) as tan(x). */
12176 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
12177 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
12178 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
12179 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
12180 CALL_EXPR_ARG (arg1, 0), 0))
12181 {
12182 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
12183
12184 if (tanfn != NULL_TREE)
12185 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
12186 }
12187
12188 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
12189 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
12190 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
12191 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
12192 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
12193 CALL_EXPR_ARG (arg1, 0), 0))
12194 {
12195 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
12196
12197 if (tanfn != NULL_TREE)
12198 {
12199 tree tmp = build_call_expr_loc (loc, tanfn, 1,
12200 CALL_EXPR_ARG (arg0, 0));
12201 return fold_build2_loc (loc, RDIV_EXPR, type,
12202 build_real (type, dconst1), tmp);
12203 }
12204 }
12205
12206 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
12207 NaNs or Infinities. */
12208 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
12209 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
12210 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
12211 {
12212 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12213 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12214
12215 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12216 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12217 && operand_equal_p (arg00, arg01, 0))
12218 {
12219 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12220
12221 if (cosfn != NULL_TREE)
12222 return build_call_expr_loc (loc, cosfn, 1, arg00);
12223 }
12224 }
12225
12226 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
12227 NaNs or Infinities. */
12228 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
12229 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
12230 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
12231 {
12232 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12233 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12234
12235 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12236 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12237 && operand_equal_p (arg00, arg01, 0))
12238 {
12239 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12240
12241 if (cosfn != NULL_TREE)
12242 {
12243 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
12244 return fold_build2_loc (loc, RDIV_EXPR, type,
12245 build_real (type, dconst1),
12246 tmp);
12247 }
12248 }
12249 }
12250
12251 /* Optimize pow(x,c)/x as pow(x,c-1). */
12252 if (fcode0 == BUILT_IN_POW
12253 || fcode0 == BUILT_IN_POWF
12254 || fcode0 == BUILT_IN_POWL)
12255 {
12256 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12257 tree arg01 = CALL_EXPR_ARG (arg0, 1);
12258 if (TREE_CODE (arg01) == REAL_CST
12259 && !TREE_OVERFLOW (arg01)
12260 && operand_equal_p (arg1, arg00, 0))
12261 {
12262 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
12263 REAL_VALUE_TYPE c;
12264 tree arg;
12265
12266 c = TREE_REAL_CST (arg01);
12267 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
12268 arg = build_real (type, c);
12269 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
12270 }
12271 }
12272
12273 /* Optimize a/root(b/c) into a*root(c/b). */
12274 if (BUILTIN_ROOT_P (fcode1))
12275 {
12276 tree rootarg = CALL_EXPR_ARG (arg1, 0);
12277
12278 if (TREE_CODE (rootarg) == RDIV_EXPR)
12279 {
12280 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12281 tree b = TREE_OPERAND (rootarg, 0);
12282 tree c = TREE_OPERAND (rootarg, 1);
12283
12284 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
12285
12286 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
12287 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
12288 }
12289 }
12290
12291 /* Optimize x/expN(y) into x*expN(-y). */
12292 if (BUILTIN_EXPONENT_P (fcode1))
12293 {
12294 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12295 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
12296 arg1 = build_call_expr_loc (loc,
12297 expfn, 1,
12298 fold_convert_loc (loc, type, arg));
12299 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12300 }
12301
12302 /* Optimize x/pow(y,z) into x*pow(y,-z). */
12303 if (fcode1 == BUILT_IN_POW
12304 || fcode1 == BUILT_IN_POWF
12305 || fcode1 == BUILT_IN_POWL)
12306 {
12307 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12308 tree arg10 = CALL_EXPR_ARG (arg1, 0);
12309 tree arg11 = CALL_EXPR_ARG (arg1, 1);
12310 tree neg11 = fold_convert_loc (loc, type,
12311 negate_expr (arg11));
12312 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
12313 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12314 }
12315 }
12316 return NULL_TREE;
12317
12318 case TRUNC_DIV_EXPR:
12319 /* Optimize (X & (-A)) / A where A is a power of 2,
12320 to X >> log2(A) */
12321 if (TREE_CODE (arg0) == BIT_AND_EXPR
12322 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
12323 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
12324 {
12325 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
12326 arg1, TREE_OPERAND (arg0, 1));
12327 if (sum && integer_zerop (sum)) {
12328 tree pow2 = build_int_cst (integer_type_node,
12329 wi::exact_log2 (arg1));
12330 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12331 TREE_OPERAND (arg0, 0), pow2);
12332 }
12333 }
12334
12335 /* Fall through */
12336
12337 case FLOOR_DIV_EXPR:
12338 /* Simplify A / (B << N) where A and B are positive and B is
12339 a power of 2, to A >> (N + log2(B)). */
12340 strict_overflow_p = false;
12341 if (TREE_CODE (arg1) == LSHIFT_EXPR
12342 && (TYPE_UNSIGNED (type)
12343 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12344 {
12345 tree sval = TREE_OPERAND (arg1, 0);
12346 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
12347 {
12348 tree sh_cnt = TREE_OPERAND (arg1, 1);
12349 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
12350 wi::exact_log2 (sval));
12351
12352 if (strict_overflow_p)
12353 fold_overflow_warning (("assuming signed overflow does not "
12354 "occur when simplifying A / (B << N)"),
12355 WARN_STRICT_OVERFLOW_MISC);
12356
12357 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
12358 sh_cnt, pow2);
12359 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12360 fold_convert_loc (loc, type, arg0), sh_cnt);
12361 }
12362 }
12363
12364 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12365 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12366 if (INTEGRAL_TYPE_P (type)
12367 && TYPE_UNSIGNED (type)
12368 && code == FLOOR_DIV_EXPR)
12369 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
12370
12371 /* Fall through */
12372
12373 case ROUND_DIV_EXPR:
12374 case CEIL_DIV_EXPR:
12375 case EXACT_DIV_EXPR:
12376 if (integer_onep (arg1))
12377 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12378 if (integer_zerop (arg1))
12379 return NULL_TREE;
12380 /* X / -1 is -X. */
12381 if (!TYPE_UNSIGNED (type)
12382 && TREE_CODE (arg1) == INTEGER_CST
12383 && wi::eq_p (arg1, -1))
12384 return fold_convert_loc (loc, type, negate_expr (arg0));
12385
12386 /* Convert -A / -B to A / B when the type is signed and overflow is
12387 undefined. */
12388 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12389 && TREE_CODE (arg0) == NEGATE_EXPR
12390 && negate_expr_p (arg1))
12391 {
12392 if (INTEGRAL_TYPE_P (type))
12393 fold_overflow_warning (("assuming signed overflow does not occur "
12394 "when distributing negation across "
12395 "division"),
12396 WARN_STRICT_OVERFLOW_MISC);
12397 return fold_build2_loc (loc, code, type,
12398 fold_convert_loc (loc, type,
12399 TREE_OPERAND (arg0, 0)),
12400 fold_convert_loc (loc, type,
12401 negate_expr (arg1)));
12402 }
12403 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12404 && TREE_CODE (arg1) == NEGATE_EXPR
12405 && negate_expr_p (arg0))
12406 {
12407 if (INTEGRAL_TYPE_P (type))
12408 fold_overflow_warning (("assuming signed overflow does not occur "
12409 "when distributing negation across "
12410 "division"),
12411 WARN_STRICT_OVERFLOW_MISC);
12412 return fold_build2_loc (loc, code, type,
12413 fold_convert_loc (loc, type,
12414 negate_expr (arg0)),
12415 fold_convert_loc (loc, type,
12416 TREE_OPERAND (arg1, 0)));
12417 }
12418
12419 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12420 operation, EXACT_DIV_EXPR.
12421
12422 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12423 At one time others generated faster code, it's not clear if they do
12424 after the last round to changes to the DIV code in expmed.c. */
12425 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12426 && multiple_of_p (type, arg0, arg1))
12427 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
12428
12429 strict_overflow_p = false;
12430 if (TREE_CODE (arg1) == INTEGER_CST
12431 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12432 &strict_overflow_p)))
12433 {
12434 if (strict_overflow_p)
12435 fold_overflow_warning (("assuming signed overflow does not occur "
12436 "when simplifying division"),
12437 WARN_STRICT_OVERFLOW_MISC);
12438 return fold_convert_loc (loc, type, tem);
12439 }
12440
12441 return NULL_TREE;
12442
12443 case CEIL_MOD_EXPR:
12444 case FLOOR_MOD_EXPR:
12445 case ROUND_MOD_EXPR:
12446 case TRUNC_MOD_EXPR:
12447 /* X % 1 is always zero, but be sure to preserve any side
12448 effects in X. */
12449 if (integer_onep (arg1))
12450 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12451
12452 /* X % 0, return X % 0 unchanged so that we can get the
12453 proper warnings and errors. */
12454 if (integer_zerop (arg1))
12455 return NULL_TREE;
12456
12457 /* 0 % X is always zero, but be sure to preserve any side
12458 effects in X. Place this after checking for X == 0. */
12459 if (integer_zerop (arg0))
12460 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12461
12462 /* X % -1 is zero. */
12463 if (!TYPE_UNSIGNED (type)
12464 && TREE_CODE (arg1) == INTEGER_CST
12465 && wi::eq_p (arg1, -1))
12466 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12467
12468 /* X % -C is the same as X % C. */
12469 if (code == TRUNC_MOD_EXPR
12470 && TYPE_SIGN (type) == SIGNED
12471 && TREE_CODE (arg1) == INTEGER_CST
12472 && !TREE_OVERFLOW (arg1)
12473 && wi::neg_p (arg1)
12474 && !TYPE_OVERFLOW_TRAPS (type)
12475 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12476 && !sign_bit_p (arg1, arg1))
12477 return fold_build2_loc (loc, code, type,
12478 fold_convert_loc (loc, type, arg0),
12479 fold_convert_loc (loc, type,
12480 negate_expr (arg1)));
12481
12482 /* X % -Y is the same as X % Y. */
12483 if (code == TRUNC_MOD_EXPR
12484 && !TYPE_UNSIGNED (type)
12485 && TREE_CODE (arg1) == NEGATE_EXPR
12486 && !TYPE_OVERFLOW_TRAPS (type))
12487 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
12488 fold_convert_loc (loc, type,
12489 TREE_OPERAND (arg1, 0)));
12490
12491 strict_overflow_p = false;
12492 if (TREE_CODE (arg1) == INTEGER_CST
12493 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12494 &strict_overflow_p)))
12495 {
12496 if (strict_overflow_p)
12497 fold_overflow_warning (("assuming signed overflow does not occur "
12498 "when simplifying modulus"),
12499 WARN_STRICT_OVERFLOW_MISC);
12500 return fold_convert_loc (loc, type, tem);
12501 }
12502
12503 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12504 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12505 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
12506 && (TYPE_UNSIGNED (type)
12507 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12508 {
12509 tree c = arg1;
12510 /* Also optimize A % (C << N) where C is a power of 2,
12511 to A & ((C << N) - 1). */
12512 if (TREE_CODE (arg1) == LSHIFT_EXPR)
12513 c = TREE_OPERAND (arg1, 0);
12514
12515 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
12516 {
12517 tree mask
12518 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
12519 build_int_cst (TREE_TYPE (arg1), 1));
12520 if (strict_overflow_p)
12521 fold_overflow_warning (("assuming signed overflow does not "
12522 "occur when simplifying "
12523 "X % (power of two)"),
12524 WARN_STRICT_OVERFLOW_MISC);
12525 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12526 fold_convert_loc (loc, type, arg0),
12527 fold_convert_loc (loc, type, mask));
12528 }
12529 }
12530
12531 return NULL_TREE;
12532
12533 case LROTATE_EXPR:
12534 case RROTATE_EXPR:
12535 if (integer_all_onesp (arg0))
12536 return omit_one_operand_loc (loc, type, arg0, arg1);
12537 goto shift;
12538
12539 case RSHIFT_EXPR:
12540 /* Optimize -1 >> x for arithmetic right shifts. */
12541 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
12542 && tree_expr_nonnegative_p (arg1))
12543 return omit_one_operand_loc (loc, type, arg0, arg1);
12544 /* ... fall through ... */
12545
12546 case LSHIFT_EXPR:
12547 shift:
12548 if (integer_zerop (arg1))
12549 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12550 if (integer_zerop (arg0))
12551 return omit_one_operand_loc (loc, type, arg0, arg1);
12552
12553 /* Prefer vector1 << scalar to vector1 << vector2
12554 if vector2 is uniform. */
12555 if (VECTOR_TYPE_P (TREE_TYPE (arg1))
12556 && (tem = uniform_vector_p (arg1)) != NULL_TREE)
12557 return fold_build2_loc (loc, code, type, op0, tem);
12558
12559 /* Since negative shift count is not well-defined,
12560 don't try to compute it in the compiler. */
12561 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12562 return NULL_TREE;
12563
12564 prec = element_precision (type);
12565
12566 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12567 if (TREE_CODE (op0) == code && tree_fits_uhwi_p (arg1)
12568 && tree_to_uhwi (arg1) < prec
12569 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12570 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12571 {
12572 unsigned int low = (tree_to_uhwi (TREE_OPERAND (arg0, 1))
12573 + tree_to_uhwi (arg1));
12574
12575 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12576 being well defined. */
12577 if (low >= prec)
12578 {
12579 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12580 low = low % prec;
12581 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12582 return omit_one_operand_loc (loc, type, build_zero_cst (type),
12583 TREE_OPERAND (arg0, 0));
12584 else
12585 low = prec - 1;
12586 }
12587
12588 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12589 build_int_cst (TREE_TYPE (arg1), low));
12590 }
12591
12592 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12593 into x & ((unsigned)-1 >> c) for unsigned types. */
12594 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12595 || (TYPE_UNSIGNED (type)
12596 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12597 && tree_fits_uhwi_p (arg1)
12598 && tree_to_uhwi (arg1) < prec
12599 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12600 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12601 {
12602 HOST_WIDE_INT low0 = tree_to_uhwi (TREE_OPERAND (arg0, 1));
12603 HOST_WIDE_INT low1 = tree_to_uhwi (arg1);
12604 tree lshift;
12605 tree arg00;
12606
12607 if (low0 == low1)
12608 {
12609 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12610
12611 lshift = build_minus_one_cst (type);
12612 lshift = const_binop (code, lshift, arg1);
12613
12614 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12615 }
12616 }
12617
12618 /* Rewrite an LROTATE_EXPR by a constant into an
12619 RROTATE_EXPR by a new constant. */
12620 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12621 {
12622 tree tem = build_int_cst (TREE_TYPE (arg1), prec);
12623 tem = const_binop (MINUS_EXPR, tem, arg1);
12624 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12625 }
12626
12627 /* If we have a rotate of a bit operation with the rotate count and
12628 the second operand of the bit operation both constant,
12629 permute the two operations. */
12630 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12631 && (TREE_CODE (arg0) == BIT_AND_EXPR
12632 || TREE_CODE (arg0) == BIT_IOR_EXPR
12633 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12634 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12635 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12636 fold_build2_loc (loc, code, type,
12637 TREE_OPERAND (arg0, 0), arg1),
12638 fold_build2_loc (loc, code, type,
12639 TREE_OPERAND (arg0, 1), arg1));
12640
12641 /* Two consecutive rotates adding up to the some integer
12642 multiple of the precision of the type can be ignored. */
12643 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12644 && TREE_CODE (arg0) == RROTATE_EXPR
12645 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12646 && wi::umod_trunc (wi::add (arg1, TREE_OPERAND (arg0, 1)),
12647 prec) == 0)
12648 return TREE_OPERAND (arg0, 0);
12649
12650 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12651 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12652 if the latter can be further optimized. */
12653 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12654 && TREE_CODE (arg0) == BIT_AND_EXPR
12655 && TREE_CODE (arg1) == INTEGER_CST
12656 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12657 {
12658 tree mask = fold_build2_loc (loc, code, type,
12659 fold_convert_loc (loc, type,
12660 TREE_OPERAND (arg0, 1)),
12661 arg1);
12662 tree shift = fold_build2_loc (loc, code, type,
12663 fold_convert_loc (loc, type,
12664 TREE_OPERAND (arg0, 0)),
12665 arg1);
12666 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12667 if (tem)
12668 return tem;
12669 }
12670
12671 return NULL_TREE;
12672
12673 case MIN_EXPR:
12674 if (operand_equal_p (arg0, arg1, 0))
12675 return omit_one_operand_loc (loc, type, arg0, arg1);
12676 if (INTEGRAL_TYPE_P (type)
12677 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12678 return omit_one_operand_loc (loc, type, arg1, arg0);
12679 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12680 if (tem)
12681 return tem;
12682 goto associate;
12683
12684 case MAX_EXPR:
12685 if (operand_equal_p (arg0, arg1, 0))
12686 return omit_one_operand_loc (loc, type, arg0, arg1);
12687 if (INTEGRAL_TYPE_P (type)
12688 && TYPE_MAX_VALUE (type)
12689 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12690 return omit_one_operand_loc (loc, type, arg1, arg0);
12691 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12692 if (tem)
12693 return tem;
12694 goto associate;
12695
12696 case TRUTH_ANDIF_EXPR:
12697 /* Note that the operands of this must be ints
12698 and their values must be 0 or 1.
12699 ("true" is a fixed value perhaps depending on the language.) */
12700 /* If first arg is constant zero, return it. */
12701 if (integer_zerop (arg0))
12702 return fold_convert_loc (loc, type, arg0);
12703 case TRUTH_AND_EXPR:
12704 /* If either arg is constant true, drop it. */
12705 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12706 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12707 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12708 /* Preserve sequence points. */
12709 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12710 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12711 /* If second arg is constant zero, result is zero, but first arg
12712 must be evaluated. */
12713 if (integer_zerop (arg1))
12714 return omit_one_operand_loc (loc, type, arg1, arg0);
12715 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12716 case will be handled here. */
12717 if (integer_zerop (arg0))
12718 return omit_one_operand_loc (loc, type, arg0, arg1);
12719
12720 /* !X && X is always false. */
12721 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12722 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12723 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12724 /* X && !X is always false. */
12725 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12726 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12727 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12728
12729 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12730 means A >= Y && A != MAX, but in this case we know that
12731 A < X <= MAX. */
12732
12733 if (!TREE_SIDE_EFFECTS (arg0)
12734 && !TREE_SIDE_EFFECTS (arg1))
12735 {
12736 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12737 if (tem && !operand_equal_p (tem, arg0, 0))
12738 return fold_build2_loc (loc, code, type, tem, arg1);
12739
12740 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12741 if (tem && !operand_equal_p (tem, arg1, 0))
12742 return fold_build2_loc (loc, code, type, arg0, tem);
12743 }
12744
12745 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12746 != NULL_TREE)
12747 return tem;
12748
12749 return NULL_TREE;
12750
12751 case TRUTH_ORIF_EXPR:
12752 /* Note that the operands of this must be ints
12753 and their values must be 0 or true.
12754 ("true" is a fixed value perhaps depending on the language.) */
12755 /* If first arg is constant true, return it. */
12756 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12757 return fold_convert_loc (loc, type, arg0);
12758 case TRUTH_OR_EXPR:
12759 /* If either arg is constant zero, drop it. */
12760 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12761 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12762 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12763 /* Preserve sequence points. */
12764 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12765 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12766 /* If second arg is constant true, result is true, but we must
12767 evaluate first arg. */
12768 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12769 return omit_one_operand_loc (loc, type, arg1, arg0);
12770 /* Likewise for first arg, but note this only occurs here for
12771 TRUTH_OR_EXPR. */
12772 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12773 return omit_one_operand_loc (loc, type, arg0, arg1);
12774
12775 /* !X || X is always true. */
12776 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12777 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12778 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12779 /* X || !X is always true. */
12780 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12781 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12782 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12783
12784 /* (X && !Y) || (!X && Y) is X ^ Y */
12785 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12786 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12787 {
12788 tree a0, a1, l0, l1, n0, n1;
12789
12790 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12791 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12792
12793 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12794 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12795
12796 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12797 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12798
12799 if ((operand_equal_p (n0, a0, 0)
12800 && operand_equal_p (n1, a1, 0))
12801 || (operand_equal_p (n0, a1, 0)
12802 && operand_equal_p (n1, a0, 0)))
12803 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12804 }
12805
12806 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12807 != NULL_TREE)
12808 return tem;
12809
12810 return NULL_TREE;
12811
12812 case TRUTH_XOR_EXPR:
12813 /* If the second arg is constant zero, drop it. */
12814 if (integer_zerop (arg1))
12815 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12816 /* If the second arg is constant true, this is a logical inversion. */
12817 if (integer_onep (arg1))
12818 {
12819 tem = invert_truthvalue_loc (loc, arg0);
12820 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12821 }
12822 /* Identical arguments cancel to zero. */
12823 if (operand_equal_p (arg0, arg1, 0))
12824 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12825
12826 /* !X ^ X is always true. */
12827 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12828 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12829 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12830
12831 /* X ^ !X is always true. */
12832 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12833 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12834 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12835
12836 return NULL_TREE;
12837
12838 case EQ_EXPR:
12839 case NE_EXPR:
12840 STRIP_NOPS (arg0);
12841 STRIP_NOPS (arg1);
12842
12843 tem = fold_comparison (loc, code, type, op0, op1);
12844 if (tem != NULL_TREE)
12845 return tem;
12846
12847 /* bool_var != 0 becomes bool_var. */
12848 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12849 && code == NE_EXPR)
12850 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12851
12852 /* bool_var == 1 becomes bool_var. */
12853 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12854 && code == EQ_EXPR)
12855 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12856
12857 /* bool_var != 1 becomes !bool_var. */
12858 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12859 && code == NE_EXPR)
12860 return fold_convert_loc (loc, type,
12861 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12862 TREE_TYPE (arg0), arg0));
12863
12864 /* bool_var == 0 becomes !bool_var. */
12865 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12866 && code == EQ_EXPR)
12867 return fold_convert_loc (loc, type,
12868 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12869 TREE_TYPE (arg0), arg0));
12870
12871 /* !exp != 0 becomes !exp */
12872 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12873 && code == NE_EXPR)
12874 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12875
12876 /* If this is an equality comparison of the address of two non-weak,
12877 unaliased symbols neither of which are extern (since we do not
12878 have access to attributes for externs), then we know the result. */
12879 if (TREE_CODE (arg0) == ADDR_EXPR
12880 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12881 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12882 && ! lookup_attribute ("alias",
12883 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12884 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12885 && TREE_CODE (arg1) == ADDR_EXPR
12886 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12887 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12888 && ! lookup_attribute ("alias",
12889 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12890 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12891 {
12892 /* We know that we're looking at the address of two
12893 non-weak, unaliased, static _DECL nodes.
12894
12895 It is both wasteful and incorrect to call operand_equal_p
12896 to compare the two ADDR_EXPR nodes. It is wasteful in that
12897 all we need to do is test pointer equality for the arguments
12898 to the two ADDR_EXPR nodes. It is incorrect to use
12899 operand_equal_p as that function is NOT equivalent to a
12900 C equality test. It can in fact return false for two
12901 objects which would test as equal using the C equality
12902 operator. */
12903 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12904 return constant_boolean_node (equal
12905 ? code == EQ_EXPR : code != EQ_EXPR,
12906 type);
12907 }
12908
12909 /* Similarly for a NEGATE_EXPR. */
12910 if (TREE_CODE (arg0) == NEGATE_EXPR
12911 && TREE_CODE (arg1) == INTEGER_CST
12912 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12913 arg1)))
12914 && TREE_CODE (tem) == INTEGER_CST
12915 && !TREE_OVERFLOW (tem))
12916 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12917
12918 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12919 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12920 && TREE_CODE (arg1) == INTEGER_CST
12921 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12922 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12923 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12924 fold_convert_loc (loc,
12925 TREE_TYPE (arg0),
12926 arg1),
12927 TREE_OPERAND (arg0, 1)));
12928
12929 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12930 if ((TREE_CODE (arg0) == PLUS_EXPR
12931 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12932 || TREE_CODE (arg0) == MINUS_EXPR)
12933 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12934 0)),
12935 arg1, 0)
12936 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12937 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12938 {
12939 tree val = TREE_OPERAND (arg0, 1);
12940 return omit_two_operands_loc (loc, type,
12941 fold_build2_loc (loc, code, type,
12942 val,
12943 build_int_cst (TREE_TYPE (val),
12944 0)),
12945 TREE_OPERAND (arg0, 0), arg1);
12946 }
12947
12948 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12949 if (TREE_CODE (arg0) == MINUS_EXPR
12950 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12951 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12952 1)),
12953 arg1, 0)
12954 && wi::extract_uhwi (TREE_OPERAND (arg0, 0), 0, 1) == 1)
12955 {
12956 return omit_two_operands_loc (loc, type,
12957 code == NE_EXPR
12958 ? boolean_true_node : boolean_false_node,
12959 TREE_OPERAND (arg0, 1), arg1);
12960 }
12961
12962 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12963 if (TREE_CODE (arg0) == ABS_EXPR
12964 && (integer_zerop (arg1) || real_zerop (arg1)))
12965 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12966
12967 /* If this is an EQ or NE comparison with zero and ARG0 is
12968 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12969 two operations, but the latter can be done in one less insn
12970 on machines that have only two-operand insns or on which a
12971 constant cannot be the first operand. */
12972 if (TREE_CODE (arg0) == BIT_AND_EXPR
12973 && integer_zerop (arg1))
12974 {
12975 tree arg00 = TREE_OPERAND (arg0, 0);
12976 tree arg01 = TREE_OPERAND (arg0, 1);
12977 if (TREE_CODE (arg00) == LSHIFT_EXPR
12978 && integer_onep (TREE_OPERAND (arg00, 0)))
12979 {
12980 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12981 arg01, TREE_OPERAND (arg00, 1));
12982 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12983 build_int_cst (TREE_TYPE (arg0), 1));
12984 return fold_build2_loc (loc, code, type,
12985 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12986 arg1);
12987 }
12988 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12989 && integer_onep (TREE_OPERAND (arg01, 0)))
12990 {
12991 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12992 arg00, TREE_OPERAND (arg01, 1));
12993 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12994 build_int_cst (TREE_TYPE (arg0), 1));
12995 return fold_build2_loc (loc, code, type,
12996 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12997 arg1);
12998 }
12999 }
13000
13001 /* If this is an NE or EQ comparison of zero against the result of a
13002 signed MOD operation whose second operand is a power of 2, make
13003 the MOD operation unsigned since it is simpler and equivalent. */
13004 if (integer_zerop (arg1)
13005 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
13006 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
13007 || TREE_CODE (arg0) == CEIL_MOD_EXPR
13008 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
13009 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
13010 && integer_pow2p (TREE_OPERAND (arg0, 1)))
13011 {
13012 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
13013 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
13014 fold_convert_loc (loc, newtype,
13015 TREE_OPERAND (arg0, 0)),
13016 fold_convert_loc (loc, newtype,
13017 TREE_OPERAND (arg0, 1)));
13018
13019 return fold_build2_loc (loc, code, type, newmod,
13020 fold_convert_loc (loc, newtype, arg1));
13021 }
13022
13023 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
13024 C1 is a valid shift constant, and C2 is a power of two, i.e.
13025 a single bit. */
13026 if (TREE_CODE (arg0) == BIT_AND_EXPR
13027 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
13028 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
13029 == INTEGER_CST
13030 && integer_pow2p (TREE_OPERAND (arg0, 1))
13031 && integer_zerop (arg1))
13032 {
13033 tree itype = TREE_TYPE (arg0);
13034 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
13035 prec = TYPE_PRECISION (itype);
13036
13037 /* Check for a valid shift count. */
13038 if (wi::ltu_p (arg001, prec))
13039 {
13040 tree arg01 = TREE_OPERAND (arg0, 1);
13041 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
13042 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
13043 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
13044 can be rewritten as (X & (C2 << C1)) != 0. */
13045 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
13046 {
13047 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
13048 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
13049 return fold_build2_loc (loc, code, type, tem,
13050 fold_convert_loc (loc, itype, arg1));
13051 }
13052 /* Otherwise, for signed (arithmetic) shifts,
13053 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
13054 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
13055 else if (!TYPE_UNSIGNED (itype))
13056 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
13057 arg000, build_int_cst (itype, 0));
13058 /* Otherwise, of unsigned (logical) shifts,
13059 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
13060 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
13061 else
13062 return omit_one_operand_loc (loc, type,
13063 code == EQ_EXPR ? integer_one_node
13064 : integer_zero_node,
13065 arg000);
13066 }
13067 }
13068
13069 /* If we have (A & C) == C where C is a power of 2, convert this into
13070 (A & C) != 0. Similarly for NE_EXPR. */
13071 if (TREE_CODE (arg0) == BIT_AND_EXPR
13072 && integer_pow2p (TREE_OPERAND (arg0, 1))
13073 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
13074 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13075 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
13076 integer_zero_node));
13077
13078 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
13079 bit, then fold the expression into A < 0 or A >= 0. */
13080 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
13081 if (tem)
13082 return tem;
13083
13084 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
13085 Similarly for NE_EXPR. */
13086 if (TREE_CODE (arg0) == BIT_AND_EXPR
13087 && TREE_CODE (arg1) == INTEGER_CST
13088 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13089 {
13090 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
13091 TREE_TYPE (TREE_OPERAND (arg0, 1)),
13092 TREE_OPERAND (arg0, 1));
13093 tree dandnotc
13094 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13095 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
13096 notc);
13097 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
13098 if (integer_nonzerop (dandnotc))
13099 return omit_one_operand_loc (loc, type, rslt, arg0);
13100 }
13101
13102 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
13103 Similarly for NE_EXPR. */
13104 if (TREE_CODE (arg0) == BIT_IOR_EXPR
13105 && TREE_CODE (arg1) == INTEGER_CST
13106 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13107 {
13108 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
13109 tree candnotd
13110 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13111 TREE_OPERAND (arg0, 1),
13112 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
13113 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
13114 if (integer_nonzerop (candnotd))
13115 return omit_one_operand_loc (loc, type, rslt, arg0);
13116 }
13117
13118 /* If this is a comparison of a field, we may be able to simplify it. */
13119 if ((TREE_CODE (arg0) == COMPONENT_REF
13120 || TREE_CODE (arg0) == BIT_FIELD_REF)
13121 /* Handle the constant case even without -O
13122 to make sure the warnings are given. */
13123 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
13124 {
13125 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
13126 if (t1)
13127 return t1;
13128 }
13129
13130 /* Optimize comparisons of strlen vs zero to a compare of the
13131 first character of the string vs zero. To wit,
13132 strlen(ptr) == 0 => *ptr == 0
13133 strlen(ptr) != 0 => *ptr != 0
13134 Other cases should reduce to one of these two (or a constant)
13135 due to the return value of strlen being unsigned. */
13136 if (TREE_CODE (arg0) == CALL_EXPR
13137 && integer_zerop (arg1))
13138 {
13139 tree fndecl = get_callee_fndecl (arg0);
13140
13141 if (fndecl
13142 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
13143 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
13144 && call_expr_nargs (arg0) == 1
13145 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
13146 {
13147 tree iref = build_fold_indirect_ref_loc (loc,
13148 CALL_EXPR_ARG (arg0, 0));
13149 return fold_build2_loc (loc, code, type, iref,
13150 build_int_cst (TREE_TYPE (iref), 0));
13151 }
13152 }
13153
13154 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
13155 of X. Similarly fold (X >> C) == 0 into X >= 0. */
13156 if (TREE_CODE (arg0) == RSHIFT_EXPR
13157 && integer_zerop (arg1)
13158 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13159 {
13160 tree arg00 = TREE_OPERAND (arg0, 0);
13161 tree arg01 = TREE_OPERAND (arg0, 1);
13162 tree itype = TREE_TYPE (arg00);
13163 if (wi::eq_p (arg01, TYPE_PRECISION (itype) - 1))
13164 {
13165 if (TYPE_UNSIGNED (itype))
13166 {
13167 itype = signed_type_for (itype);
13168 arg00 = fold_convert_loc (loc, itype, arg00);
13169 }
13170 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
13171 type, arg00, build_zero_cst (itype));
13172 }
13173 }
13174
13175 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
13176 if (integer_zerop (arg1)
13177 && TREE_CODE (arg0) == BIT_XOR_EXPR)
13178 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13179 TREE_OPERAND (arg0, 1));
13180
13181 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
13182 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13183 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
13184 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13185 build_zero_cst (TREE_TYPE (arg0)));
13186 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
13187 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13188 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13189 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
13190 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
13191 build_zero_cst (TREE_TYPE (arg0)));
13192
13193 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
13194 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13195 && TREE_CODE (arg1) == INTEGER_CST
13196 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13197 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13198 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
13199 TREE_OPERAND (arg0, 1), arg1));
13200
13201 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
13202 (X & C) == 0 when C is a single bit. */
13203 if (TREE_CODE (arg0) == BIT_AND_EXPR
13204 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
13205 && integer_zerop (arg1)
13206 && integer_pow2p (TREE_OPERAND (arg0, 1)))
13207 {
13208 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13209 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
13210 TREE_OPERAND (arg0, 1));
13211 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
13212 type, tem,
13213 fold_convert_loc (loc, TREE_TYPE (arg0),
13214 arg1));
13215 }
13216
13217 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
13218 constant C is a power of two, i.e. a single bit. */
13219 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13220 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13221 && integer_zerop (arg1)
13222 && integer_pow2p (TREE_OPERAND (arg0, 1))
13223 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13224 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13225 {
13226 tree arg00 = TREE_OPERAND (arg0, 0);
13227 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13228 arg00, build_int_cst (TREE_TYPE (arg00), 0));
13229 }
13230
13231 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
13232 when is C is a power of two, i.e. a single bit. */
13233 if (TREE_CODE (arg0) == BIT_AND_EXPR
13234 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
13235 && integer_zerop (arg1)
13236 && integer_pow2p (TREE_OPERAND (arg0, 1))
13237 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13238 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13239 {
13240 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
13241 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
13242 arg000, TREE_OPERAND (arg0, 1));
13243 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13244 tem, build_int_cst (TREE_TYPE (tem), 0));
13245 }
13246
13247 if (integer_zerop (arg1)
13248 && tree_expr_nonzero_p (arg0))
13249 {
13250 tree res = constant_boolean_node (code==NE_EXPR, type);
13251 return omit_one_operand_loc (loc, type, res, arg0);
13252 }
13253
13254 /* Fold -X op -Y as X op Y, where op is eq/ne. */
13255 if (TREE_CODE (arg0) == NEGATE_EXPR
13256 && TREE_CODE (arg1) == NEGATE_EXPR)
13257 return fold_build2_loc (loc, code, type,
13258 TREE_OPERAND (arg0, 0),
13259 fold_convert_loc (loc, TREE_TYPE (arg0),
13260 TREE_OPERAND (arg1, 0)));
13261
13262 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
13263 if (TREE_CODE (arg0) == BIT_AND_EXPR
13264 && TREE_CODE (arg1) == BIT_AND_EXPR)
13265 {
13266 tree arg00 = TREE_OPERAND (arg0, 0);
13267 tree arg01 = TREE_OPERAND (arg0, 1);
13268 tree arg10 = TREE_OPERAND (arg1, 0);
13269 tree arg11 = TREE_OPERAND (arg1, 1);
13270 tree itype = TREE_TYPE (arg0);
13271
13272 if (operand_equal_p (arg01, arg11, 0))
13273 return fold_build2_loc (loc, code, type,
13274 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13275 fold_build2_loc (loc,
13276 BIT_XOR_EXPR, itype,
13277 arg00, arg10),
13278 arg01),
13279 build_zero_cst (itype));
13280
13281 if (operand_equal_p (arg01, arg10, 0))
13282 return fold_build2_loc (loc, code, type,
13283 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13284 fold_build2_loc (loc,
13285 BIT_XOR_EXPR, itype,
13286 arg00, arg11),
13287 arg01),
13288 build_zero_cst (itype));
13289
13290 if (operand_equal_p (arg00, arg11, 0))
13291 return fold_build2_loc (loc, code, type,
13292 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13293 fold_build2_loc (loc,
13294 BIT_XOR_EXPR, itype,
13295 arg01, arg10),
13296 arg00),
13297 build_zero_cst (itype));
13298
13299 if (operand_equal_p (arg00, arg10, 0))
13300 return fold_build2_loc (loc, code, type,
13301 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13302 fold_build2_loc (loc,
13303 BIT_XOR_EXPR, itype,
13304 arg01, arg11),
13305 arg00),
13306 build_zero_cst (itype));
13307 }
13308
13309 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13310 && TREE_CODE (arg1) == BIT_XOR_EXPR)
13311 {
13312 tree arg00 = TREE_OPERAND (arg0, 0);
13313 tree arg01 = TREE_OPERAND (arg0, 1);
13314 tree arg10 = TREE_OPERAND (arg1, 0);
13315 tree arg11 = TREE_OPERAND (arg1, 1);
13316 tree itype = TREE_TYPE (arg0);
13317
13318 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13319 operand_equal_p guarantees no side-effects so we don't need
13320 to use omit_one_operand on Z. */
13321 if (operand_equal_p (arg01, arg11, 0))
13322 return fold_build2_loc (loc, code, type, arg00,
13323 fold_convert_loc (loc, TREE_TYPE (arg00),
13324 arg10));
13325 if (operand_equal_p (arg01, arg10, 0))
13326 return fold_build2_loc (loc, code, type, arg00,
13327 fold_convert_loc (loc, TREE_TYPE (arg00),
13328 arg11));
13329 if (operand_equal_p (arg00, arg11, 0))
13330 return fold_build2_loc (loc, code, type, arg01,
13331 fold_convert_loc (loc, TREE_TYPE (arg01),
13332 arg10));
13333 if (operand_equal_p (arg00, arg10, 0))
13334 return fold_build2_loc (loc, code, type, arg01,
13335 fold_convert_loc (loc, TREE_TYPE (arg01),
13336 arg11));
13337
13338 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13339 if (TREE_CODE (arg01) == INTEGER_CST
13340 && TREE_CODE (arg11) == INTEGER_CST)
13341 {
13342 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
13343 fold_convert_loc (loc, itype, arg11));
13344 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
13345 return fold_build2_loc (loc, code, type, tem,
13346 fold_convert_loc (loc, itype, arg10));
13347 }
13348 }
13349
13350 /* Attempt to simplify equality/inequality comparisons of complex
13351 values. Only lower the comparison if the result is known or
13352 can be simplified to a single scalar comparison. */
13353 if ((TREE_CODE (arg0) == COMPLEX_EXPR
13354 || TREE_CODE (arg0) == COMPLEX_CST)
13355 && (TREE_CODE (arg1) == COMPLEX_EXPR
13356 || TREE_CODE (arg1) == COMPLEX_CST))
13357 {
13358 tree real0, imag0, real1, imag1;
13359 tree rcond, icond;
13360
13361 if (TREE_CODE (arg0) == COMPLEX_EXPR)
13362 {
13363 real0 = TREE_OPERAND (arg0, 0);
13364 imag0 = TREE_OPERAND (arg0, 1);
13365 }
13366 else
13367 {
13368 real0 = TREE_REALPART (arg0);
13369 imag0 = TREE_IMAGPART (arg0);
13370 }
13371
13372 if (TREE_CODE (arg1) == COMPLEX_EXPR)
13373 {
13374 real1 = TREE_OPERAND (arg1, 0);
13375 imag1 = TREE_OPERAND (arg1, 1);
13376 }
13377 else
13378 {
13379 real1 = TREE_REALPART (arg1);
13380 imag1 = TREE_IMAGPART (arg1);
13381 }
13382
13383 rcond = fold_binary_loc (loc, code, type, real0, real1);
13384 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
13385 {
13386 if (integer_zerop (rcond))
13387 {
13388 if (code == EQ_EXPR)
13389 return omit_two_operands_loc (loc, type, boolean_false_node,
13390 imag0, imag1);
13391 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
13392 }
13393 else
13394 {
13395 if (code == NE_EXPR)
13396 return omit_two_operands_loc (loc, type, boolean_true_node,
13397 imag0, imag1);
13398 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
13399 }
13400 }
13401
13402 icond = fold_binary_loc (loc, code, type, imag0, imag1);
13403 if (icond && TREE_CODE (icond) == INTEGER_CST)
13404 {
13405 if (integer_zerop (icond))
13406 {
13407 if (code == EQ_EXPR)
13408 return omit_two_operands_loc (loc, type, boolean_false_node,
13409 real0, real1);
13410 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
13411 }
13412 else
13413 {
13414 if (code == NE_EXPR)
13415 return omit_two_operands_loc (loc, type, boolean_true_node,
13416 real0, real1);
13417 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
13418 }
13419 }
13420 }
13421
13422 return NULL_TREE;
13423
13424 case LT_EXPR:
13425 case GT_EXPR:
13426 case LE_EXPR:
13427 case GE_EXPR:
13428 tem = fold_comparison (loc, code, type, op0, op1);
13429 if (tem != NULL_TREE)
13430 return tem;
13431
13432 /* Transform comparisons of the form X +- C CMP X. */
13433 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
13434 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13435 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
13436 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
13437 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13438 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
13439 {
13440 tree arg01 = TREE_OPERAND (arg0, 1);
13441 enum tree_code code0 = TREE_CODE (arg0);
13442 int is_positive;
13443
13444 if (TREE_CODE (arg01) == REAL_CST)
13445 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
13446 else
13447 is_positive = tree_int_cst_sgn (arg01);
13448
13449 /* (X - c) > X becomes false. */
13450 if (code == GT_EXPR
13451 && ((code0 == MINUS_EXPR && is_positive >= 0)
13452 || (code0 == PLUS_EXPR && is_positive <= 0)))
13453 {
13454 if (TREE_CODE (arg01) == INTEGER_CST
13455 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13456 fold_overflow_warning (("assuming signed overflow does not "
13457 "occur when assuming that (X - c) > X "
13458 "is always false"),
13459 WARN_STRICT_OVERFLOW_ALL);
13460 return constant_boolean_node (0, type);
13461 }
13462
13463 /* Likewise (X + c) < X becomes false. */
13464 if (code == LT_EXPR
13465 && ((code0 == PLUS_EXPR && is_positive >= 0)
13466 || (code0 == MINUS_EXPR && is_positive <= 0)))
13467 {
13468 if (TREE_CODE (arg01) == INTEGER_CST
13469 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13470 fold_overflow_warning (("assuming signed overflow does not "
13471 "occur when assuming that "
13472 "(X + c) < X is always false"),
13473 WARN_STRICT_OVERFLOW_ALL);
13474 return constant_boolean_node (0, type);
13475 }
13476
13477 /* Convert (X - c) <= X to true. */
13478 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13479 && code == LE_EXPR
13480 && ((code0 == MINUS_EXPR && is_positive >= 0)
13481 || (code0 == PLUS_EXPR && is_positive <= 0)))
13482 {
13483 if (TREE_CODE (arg01) == INTEGER_CST
13484 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13485 fold_overflow_warning (("assuming signed overflow does not "
13486 "occur when assuming that "
13487 "(X - c) <= X is always true"),
13488 WARN_STRICT_OVERFLOW_ALL);
13489 return constant_boolean_node (1, type);
13490 }
13491
13492 /* Convert (X + c) >= X to true. */
13493 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13494 && code == GE_EXPR
13495 && ((code0 == PLUS_EXPR && is_positive >= 0)
13496 || (code0 == MINUS_EXPR && is_positive <= 0)))
13497 {
13498 if (TREE_CODE (arg01) == INTEGER_CST
13499 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13500 fold_overflow_warning (("assuming signed overflow does not "
13501 "occur when assuming that "
13502 "(X + c) >= X is always true"),
13503 WARN_STRICT_OVERFLOW_ALL);
13504 return constant_boolean_node (1, type);
13505 }
13506
13507 if (TREE_CODE (arg01) == INTEGER_CST)
13508 {
13509 /* Convert X + c > X and X - c < X to true for integers. */
13510 if (code == GT_EXPR
13511 && ((code0 == PLUS_EXPR && is_positive > 0)
13512 || (code0 == MINUS_EXPR && is_positive < 0)))
13513 {
13514 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13515 fold_overflow_warning (("assuming signed overflow does "
13516 "not occur when assuming that "
13517 "(X + c) > X is always true"),
13518 WARN_STRICT_OVERFLOW_ALL);
13519 return constant_boolean_node (1, type);
13520 }
13521
13522 if (code == LT_EXPR
13523 && ((code0 == MINUS_EXPR && is_positive > 0)
13524 || (code0 == PLUS_EXPR && is_positive < 0)))
13525 {
13526 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13527 fold_overflow_warning (("assuming signed overflow does "
13528 "not occur when assuming that "
13529 "(X - c) < X is always true"),
13530 WARN_STRICT_OVERFLOW_ALL);
13531 return constant_boolean_node (1, type);
13532 }
13533
13534 /* Convert X + c <= X and X - c >= X to false for integers. */
13535 if (code == LE_EXPR
13536 && ((code0 == PLUS_EXPR && is_positive > 0)
13537 || (code0 == MINUS_EXPR && is_positive < 0)))
13538 {
13539 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13540 fold_overflow_warning (("assuming signed overflow does "
13541 "not occur when assuming that "
13542 "(X + c) <= X is always false"),
13543 WARN_STRICT_OVERFLOW_ALL);
13544 return constant_boolean_node (0, type);
13545 }
13546
13547 if (code == GE_EXPR
13548 && ((code0 == MINUS_EXPR && is_positive > 0)
13549 || (code0 == PLUS_EXPR && is_positive < 0)))
13550 {
13551 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13552 fold_overflow_warning (("assuming signed overflow does "
13553 "not occur when assuming that "
13554 "(X - c) >= X is always false"),
13555 WARN_STRICT_OVERFLOW_ALL);
13556 return constant_boolean_node (0, type);
13557 }
13558 }
13559 }
13560
13561 /* Comparisons with the highest or lowest possible integer of
13562 the specified precision will have known values. */
13563 {
13564 tree arg1_type = TREE_TYPE (arg1);
13565 unsigned int prec = TYPE_PRECISION (arg1_type);
13566
13567 if (TREE_CODE (arg1) == INTEGER_CST
13568 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13569 {
13570 wide_int max = wi::max_value (arg1_type);
13571 wide_int signed_max = wi::max_value (prec, SIGNED);
13572 wide_int min = wi::min_value (arg1_type);
13573
13574 if (wi::eq_p (arg1, max))
13575 switch (code)
13576 {
13577 case GT_EXPR:
13578 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13579
13580 case GE_EXPR:
13581 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13582
13583 case LE_EXPR:
13584 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13585
13586 case LT_EXPR:
13587 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13588
13589 /* The GE_EXPR and LT_EXPR cases above are not normally
13590 reached because of previous transformations. */
13591
13592 default:
13593 break;
13594 }
13595 else if (wi::eq_p (arg1, max - 1))
13596 switch (code)
13597 {
13598 case GT_EXPR:
13599 arg1 = const_binop (PLUS_EXPR, arg1,
13600 build_int_cst (TREE_TYPE (arg1), 1));
13601 return fold_build2_loc (loc, EQ_EXPR, type,
13602 fold_convert_loc (loc,
13603 TREE_TYPE (arg1), arg0),
13604 arg1);
13605 case LE_EXPR:
13606 arg1 = const_binop (PLUS_EXPR, arg1,
13607 build_int_cst (TREE_TYPE (arg1), 1));
13608 return fold_build2_loc (loc, NE_EXPR, type,
13609 fold_convert_loc (loc, TREE_TYPE (arg1),
13610 arg0),
13611 arg1);
13612 default:
13613 break;
13614 }
13615 else if (wi::eq_p (arg1, min))
13616 switch (code)
13617 {
13618 case LT_EXPR:
13619 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13620
13621 case LE_EXPR:
13622 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13623
13624 case GE_EXPR:
13625 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13626
13627 case GT_EXPR:
13628 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13629
13630 default:
13631 break;
13632 }
13633 else if (wi::eq_p (arg1, min + 1))
13634 switch (code)
13635 {
13636 case GE_EXPR:
13637 arg1 = const_binop (MINUS_EXPR, arg1,
13638 build_int_cst (TREE_TYPE (arg1), 1));
13639 return fold_build2_loc (loc, NE_EXPR, type,
13640 fold_convert_loc (loc,
13641 TREE_TYPE (arg1), arg0),
13642 arg1);
13643 case LT_EXPR:
13644 arg1 = const_binop (MINUS_EXPR, arg1,
13645 build_int_cst (TREE_TYPE (arg1), 1));
13646 return fold_build2_loc (loc, EQ_EXPR, type,
13647 fold_convert_loc (loc, TREE_TYPE (arg1),
13648 arg0),
13649 arg1);
13650 default:
13651 break;
13652 }
13653
13654 else if (wi::eq_p (arg1, signed_max)
13655 && TYPE_UNSIGNED (arg1_type)
13656 /* We will flip the signedness of the comparison operator
13657 associated with the mode of arg1, so the sign bit is
13658 specified by this mode. Check that arg1 is the signed
13659 max associated with this sign bit. */
13660 && prec == GET_MODE_PRECISION (TYPE_MODE (arg1_type))
13661 /* signed_type does not work on pointer types. */
13662 && INTEGRAL_TYPE_P (arg1_type))
13663 {
13664 /* The following case also applies to X < signed_max+1
13665 and X >= signed_max+1 because previous transformations. */
13666 if (code == LE_EXPR || code == GT_EXPR)
13667 {
13668 tree st = signed_type_for (arg1_type);
13669 return fold_build2_loc (loc,
13670 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13671 type, fold_convert_loc (loc, st, arg0),
13672 build_int_cst (st, 0));
13673 }
13674 }
13675 }
13676 }
13677
13678 /* If we are comparing an ABS_EXPR with a constant, we can
13679 convert all the cases into explicit comparisons, but they may
13680 well not be faster than doing the ABS and one comparison.
13681 But ABS (X) <= C is a range comparison, which becomes a subtraction
13682 and a comparison, and is probably faster. */
13683 if (code == LE_EXPR
13684 && TREE_CODE (arg1) == INTEGER_CST
13685 && TREE_CODE (arg0) == ABS_EXPR
13686 && ! TREE_SIDE_EFFECTS (arg0)
13687 && (0 != (tem = negate_expr (arg1)))
13688 && TREE_CODE (tem) == INTEGER_CST
13689 && !TREE_OVERFLOW (tem))
13690 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13691 build2 (GE_EXPR, type,
13692 TREE_OPERAND (arg0, 0), tem),
13693 build2 (LE_EXPR, type,
13694 TREE_OPERAND (arg0, 0), arg1));
13695
13696 /* Convert ABS_EXPR<x> >= 0 to true. */
13697 strict_overflow_p = false;
13698 if (code == GE_EXPR
13699 && (integer_zerop (arg1)
13700 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13701 && real_zerop (arg1)))
13702 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13703 {
13704 if (strict_overflow_p)
13705 fold_overflow_warning (("assuming signed overflow does not occur "
13706 "when simplifying comparison of "
13707 "absolute value and zero"),
13708 WARN_STRICT_OVERFLOW_CONDITIONAL);
13709 return omit_one_operand_loc (loc, type,
13710 constant_boolean_node (true, type),
13711 arg0);
13712 }
13713
13714 /* Convert ABS_EXPR<x> < 0 to false. */
13715 strict_overflow_p = false;
13716 if (code == LT_EXPR
13717 && (integer_zerop (arg1) || real_zerop (arg1))
13718 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13719 {
13720 if (strict_overflow_p)
13721 fold_overflow_warning (("assuming signed overflow does not occur "
13722 "when simplifying comparison of "
13723 "absolute value and zero"),
13724 WARN_STRICT_OVERFLOW_CONDITIONAL);
13725 return omit_one_operand_loc (loc, type,
13726 constant_boolean_node (false, type),
13727 arg0);
13728 }
13729
13730 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13731 and similarly for >= into !=. */
13732 if ((code == LT_EXPR || code == GE_EXPR)
13733 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13734 && TREE_CODE (arg1) == LSHIFT_EXPR
13735 && integer_onep (TREE_OPERAND (arg1, 0)))
13736 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13737 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13738 TREE_OPERAND (arg1, 1)),
13739 build_zero_cst (TREE_TYPE (arg0)));
13740
13741 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13742 otherwise Y might be >= # of bits in X's type and thus e.g.
13743 (unsigned char) (1 << Y) for Y 15 might be 0.
13744 If the cast is widening, then 1 << Y should have unsigned type,
13745 otherwise if Y is number of bits in the signed shift type minus 1,
13746 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13747 31 might be 0xffffffff80000000. */
13748 if ((code == LT_EXPR || code == GE_EXPR)
13749 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13750 && CONVERT_EXPR_P (arg1)
13751 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13752 && (TYPE_PRECISION (TREE_TYPE (arg1))
13753 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0))))
13754 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
13755 || (TYPE_PRECISION (TREE_TYPE (arg1))
13756 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
13757 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13758 {
13759 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13760 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13761 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13762 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13763 build_zero_cst (TREE_TYPE (arg0)));
13764 }
13765
13766 return NULL_TREE;
13767
13768 case UNORDERED_EXPR:
13769 case ORDERED_EXPR:
13770 case UNLT_EXPR:
13771 case UNLE_EXPR:
13772 case UNGT_EXPR:
13773 case UNGE_EXPR:
13774 case UNEQ_EXPR:
13775 case LTGT_EXPR:
13776 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13777 {
13778 t1 = fold_relational_const (code, type, arg0, arg1);
13779 if (t1 != NULL_TREE)
13780 return t1;
13781 }
13782
13783 /* If the first operand is NaN, the result is constant. */
13784 if (TREE_CODE (arg0) == REAL_CST
13785 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13786 && (code != LTGT_EXPR || ! flag_trapping_math))
13787 {
13788 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13789 ? integer_zero_node
13790 : integer_one_node;
13791 return omit_one_operand_loc (loc, type, t1, arg1);
13792 }
13793
13794 /* If the second operand is NaN, the result is constant. */
13795 if (TREE_CODE (arg1) == REAL_CST
13796 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13797 && (code != LTGT_EXPR || ! flag_trapping_math))
13798 {
13799 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13800 ? integer_zero_node
13801 : integer_one_node;
13802 return omit_one_operand_loc (loc, type, t1, arg0);
13803 }
13804
13805 /* Simplify unordered comparison of something with itself. */
13806 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13807 && operand_equal_p (arg0, arg1, 0))
13808 return constant_boolean_node (1, type);
13809
13810 if (code == LTGT_EXPR
13811 && !flag_trapping_math
13812 && operand_equal_p (arg0, arg1, 0))
13813 return constant_boolean_node (0, type);
13814
13815 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13816 {
13817 tree targ0 = strip_float_extensions (arg0);
13818 tree targ1 = strip_float_extensions (arg1);
13819 tree newtype = TREE_TYPE (targ0);
13820
13821 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13822 newtype = TREE_TYPE (targ1);
13823
13824 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13825 return fold_build2_loc (loc, code, type,
13826 fold_convert_loc (loc, newtype, targ0),
13827 fold_convert_loc (loc, newtype, targ1));
13828 }
13829
13830 return NULL_TREE;
13831
13832 case COMPOUND_EXPR:
13833 /* When pedantic, a compound expression can be neither an lvalue
13834 nor an integer constant expression. */
13835 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13836 return NULL_TREE;
13837 /* Don't let (0, 0) be null pointer constant. */
13838 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13839 : fold_convert_loc (loc, type, arg1);
13840 return pedantic_non_lvalue_loc (loc, tem);
13841
13842 case COMPLEX_EXPR:
13843 if ((TREE_CODE (arg0) == REAL_CST
13844 && TREE_CODE (arg1) == REAL_CST)
13845 || (TREE_CODE (arg0) == INTEGER_CST
13846 && TREE_CODE (arg1) == INTEGER_CST))
13847 return build_complex (type, arg0, arg1);
13848 if (TREE_CODE (arg0) == REALPART_EXPR
13849 && TREE_CODE (arg1) == IMAGPART_EXPR
13850 && TREE_TYPE (TREE_OPERAND (arg0, 0)) == type
13851 && operand_equal_p (TREE_OPERAND (arg0, 0),
13852 TREE_OPERAND (arg1, 0), 0))
13853 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
13854 TREE_OPERAND (arg1, 0));
13855 return NULL_TREE;
13856
13857 case ASSERT_EXPR:
13858 /* An ASSERT_EXPR should never be passed to fold_binary. */
13859 gcc_unreachable ();
13860
13861 case VEC_PACK_TRUNC_EXPR:
13862 case VEC_PACK_FIX_TRUNC_EXPR:
13863 {
13864 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13865 tree *elts;
13866
13867 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts / 2
13868 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2);
13869 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13870 return NULL_TREE;
13871
13872 elts = XALLOCAVEC (tree, nelts);
13873 if (!vec_cst_ctor_to_array (arg0, elts)
13874 || !vec_cst_ctor_to_array (arg1, elts + nelts / 2))
13875 return NULL_TREE;
13876
13877 for (i = 0; i < nelts; i++)
13878 {
13879 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
13880 ? NOP_EXPR : FIX_TRUNC_EXPR,
13881 TREE_TYPE (type), elts[i]);
13882 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
13883 return NULL_TREE;
13884 }
13885
13886 return build_vector (type, elts);
13887 }
13888
13889 case VEC_WIDEN_MULT_LO_EXPR:
13890 case VEC_WIDEN_MULT_HI_EXPR:
13891 case VEC_WIDEN_MULT_EVEN_EXPR:
13892 case VEC_WIDEN_MULT_ODD_EXPR:
13893 {
13894 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
13895 unsigned int out, ofs, scale;
13896 tree *elts;
13897
13898 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2
13899 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2);
13900 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13901 return NULL_TREE;
13902
13903 elts = XALLOCAVEC (tree, nelts * 4);
13904 if (!vec_cst_ctor_to_array (arg0, elts)
13905 || !vec_cst_ctor_to_array (arg1, elts + nelts * 2))
13906 return NULL_TREE;
13907
13908 if (code == VEC_WIDEN_MULT_LO_EXPR)
13909 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
13910 else if (code == VEC_WIDEN_MULT_HI_EXPR)
13911 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
13912 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
13913 scale = 1, ofs = 0;
13914 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
13915 scale = 1, ofs = 1;
13916
13917 for (out = 0; out < nelts; out++)
13918 {
13919 unsigned int in1 = (out << scale) + ofs;
13920 unsigned int in2 = in1 + nelts * 2;
13921 tree t1, t2;
13922
13923 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
13924 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
13925
13926 if (t1 == NULL_TREE || t2 == NULL_TREE)
13927 return NULL_TREE;
13928 elts[out] = const_binop (MULT_EXPR, t1, t2);
13929 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
13930 return NULL_TREE;
13931 }
13932
13933 return build_vector (type, elts);
13934 }
13935
13936 default:
13937 return NULL_TREE;
13938 } /* switch (code) */
13939 }
13940
13941 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13942 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13943 of GOTO_EXPR. */
13944
13945 static tree
13946 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13947 {
13948 switch (TREE_CODE (*tp))
13949 {
13950 case LABEL_EXPR:
13951 return *tp;
13952
13953 case GOTO_EXPR:
13954 *walk_subtrees = 0;
13955
13956 /* ... fall through ... */
13957
13958 default:
13959 return NULL_TREE;
13960 }
13961 }
13962
13963 /* Return whether the sub-tree ST contains a label which is accessible from
13964 outside the sub-tree. */
13965
13966 static bool
13967 contains_label_p (tree st)
13968 {
13969 return
13970 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13971 }
13972
13973 /* Fold a ternary expression of code CODE and type TYPE with operands
13974 OP0, OP1, and OP2. Return the folded expression if folding is
13975 successful. Otherwise, return NULL_TREE. */
13976
13977 tree
13978 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13979 tree op0, tree op1, tree op2)
13980 {
13981 tree tem;
13982 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13983 enum tree_code_class kind = TREE_CODE_CLASS (code);
13984
13985 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13986 && TREE_CODE_LENGTH (code) == 3);
13987
13988 /* Strip any conversions that don't change the mode. This is safe
13989 for every expression, except for a comparison expression because
13990 its signedness is derived from its operands. So, in the latter
13991 case, only strip conversions that don't change the signedness.
13992
13993 Note that this is done as an internal manipulation within the
13994 constant folder, in order to find the simplest representation of
13995 the arguments so that their form can be studied. In any cases,
13996 the appropriate type conversions should be put back in the tree
13997 that will get out of the constant folder. */
13998 if (op0)
13999 {
14000 arg0 = op0;
14001 STRIP_NOPS (arg0);
14002 }
14003
14004 if (op1)
14005 {
14006 arg1 = op1;
14007 STRIP_NOPS (arg1);
14008 }
14009
14010 if (op2)
14011 {
14012 arg2 = op2;
14013 STRIP_NOPS (arg2);
14014 }
14015
14016 switch (code)
14017 {
14018 case COMPONENT_REF:
14019 if (TREE_CODE (arg0) == CONSTRUCTOR
14020 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
14021 {
14022 unsigned HOST_WIDE_INT idx;
14023 tree field, value;
14024 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
14025 if (field == arg1)
14026 return value;
14027 }
14028 return NULL_TREE;
14029
14030 case COND_EXPR:
14031 case VEC_COND_EXPR:
14032 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
14033 so all simple results must be passed through pedantic_non_lvalue. */
14034 if (TREE_CODE (arg0) == INTEGER_CST)
14035 {
14036 tree unused_op = integer_zerop (arg0) ? op1 : op2;
14037 tem = integer_zerop (arg0) ? op2 : op1;
14038 /* Only optimize constant conditions when the selected branch
14039 has the same type as the COND_EXPR. This avoids optimizing
14040 away "c ? x : throw", where the throw has a void type.
14041 Avoid throwing away that operand which contains label. */
14042 if ((!TREE_SIDE_EFFECTS (unused_op)
14043 || !contains_label_p (unused_op))
14044 && (! VOID_TYPE_P (TREE_TYPE (tem))
14045 || VOID_TYPE_P (type)))
14046 return pedantic_non_lvalue_loc (loc, tem);
14047 return NULL_TREE;
14048 }
14049 else if (TREE_CODE (arg0) == VECTOR_CST)
14050 {
14051 if (integer_all_onesp (arg0))
14052 return pedantic_omit_one_operand_loc (loc, type, arg1, arg2);
14053 if (integer_zerop (arg0))
14054 return pedantic_omit_one_operand_loc (loc, type, arg2, arg1);
14055
14056 if ((TREE_CODE (arg1) == VECTOR_CST
14057 || TREE_CODE (arg1) == CONSTRUCTOR)
14058 && (TREE_CODE (arg2) == VECTOR_CST
14059 || TREE_CODE (arg2) == CONSTRUCTOR))
14060 {
14061 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
14062 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14063 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
14064 for (i = 0; i < nelts; i++)
14065 {
14066 tree val = VECTOR_CST_ELT (arg0, i);
14067 if (integer_all_onesp (val))
14068 sel[i] = i;
14069 else if (integer_zerop (val))
14070 sel[i] = nelts + i;
14071 else /* Currently unreachable. */
14072 return NULL_TREE;
14073 }
14074 tree t = fold_vec_perm (type, arg1, arg2, sel);
14075 if (t != NULL_TREE)
14076 return t;
14077 }
14078 }
14079
14080 if (operand_equal_p (arg1, op2, 0))
14081 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
14082
14083 /* If we have A op B ? A : C, we may be able to convert this to a
14084 simpler expression, depending on the operation and the values
14085 of B and C. Signed zeros prevent all of these transformations,
14086 for reasons given above each one.
14087
14088 Also try swapping the arguments and inverting the conditional. */
14089 if (COMPARISON_CLASS_P (arg0)
14090 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
14091 arg1, TREE_OPERAND (arg0, 1))
14092 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
14093 {
14094 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
14095 if (tem)
14096 return tem;
14097 }
14098
14099 if (COMPARISON_CLASS_P (arg0)
14100 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
14101 op2,
14102 TREE_OPERAND (arg0, 1))
14103 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
14104 {
14105 location_t loc0 = expr_location_or (arg0, loc);
14106 tem = fold_invert_truthvalue (loc0, arg0);
14107 if (tem && COMPARISON_CLASS_P (tem))
14108 {
14109 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
14110 if (tem)
14111 return tem;
14112 }
14113 }
14114
14115 /* If the second operand is simpler than the third, swap them
14116 since that produces better jump optimization results. */
14117 if (truth_value_p (TREE_CODE (arg0))
14118 && tree_swap_operands_p (op1, op2, false))
14119 {
14120 location_t loc0 = expr_location_or (arg0, loc);
14121 /* See if this can be inverted. If it can't, possibly because
14122 it was a floating-point inequality comparison, don't do
14123 anything. */
14124 tem = fold_invert_truthvalue (loc0, arg0);
14125 if (tem)
14126 return fold_build3_loc (loc, code, type, tem, op2, op1);
14127 }
14128
14129 /* Convert A ? 1 : 0 to simply A. */
14130 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
14131 : (integer_onep (op1)
14132 && !VECTOR_TYPE_P (type)))
14133 && integer_zerop (op2)
14134 /* If we try to convert OP0 to our type, the
14135 call to fold will try to move the conversion inside
14136 a COND, which will recurse. In that case, the COND_EXPR
14137 is probably the best choice, so leave it alone. */
14138 && type == TREE_TYPE (arg0))
14139 return pedantic_non_lvalue_loc (loc, arg0);
14140
14141 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
14142 over COND_EXPR in cases such as floating point comparisons. */
14143 if (integer_zerop (op1)
14144 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
14145 : (integer_onep (op2)
14146 && !VECTOR_TYPE_P (type)))
14147 && truth_value_p (TREE_CODE (arg0)))
14148 return pedantic_non_lvalue_loc (loc,
14149 fold_convert_loc (loc, type,
14150 invert_truthvalue_loc (loc,
14151 arg0)));
14152
14153 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
14154 if (TREE_CODE (arg0) == LT_EXPR
14155 && integer_zerop (TREE_OPERAND (arg0, 1))
14156 && integer_zerop (op2)
14157 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
14158 {
14159 /* sign_bit_p looks through both zero and sign extensions,
14160 but for this optimization only sign extensions are
14161 usable. */
14162 tree tem2 = TREE_OPERAND (arg0, 0);
14163 while (tem != tem2)
14164 {
14165 if (TREE_CODE (tem2) != NOP_EXPR
14166 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
14167 {
14168 tem = NULL_TREE;
14169 break;
14170 }
14171 tem2 = TREE_OPERAND (tem2, 0);
14172 }
14173 /* sign_bit_p only checks ARG1 bits within A's precision.
14174 If <sign bit of A> has wider type than A, bits outside
14175 of A's precision in <sign bit of A> need to be checked.
14176 If they are all 0, this optimization needs to be done
14177 in unsigned A's type, if they are all 1 in signed A's type,
14178 otherwise this can't be done. */
14179 if (tem
14180 && TYPE_PRECISION (TREE_TYPE (tem))
14181 < TYPE_PRECISION (TREE_TYPE (arg1))
14182 && TYPE_PRECISION (TREE_TYPE (tem))
14183 < TYPE_PRECISION (type))
14184 {
14185 int inner_width, outer_width;
14186 tree tem_type;
14187
14188 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
14189 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
14190 if (outer_width > TYPE_PRECISION (type))
14191 outer_width = TYPE_PRECISION (type);
14192
14193 wide_int mask = wi::shifted_mask
14194 (inner_width, outer_width - inner_width, false,
14195 TYPE_PRECISION (TREE_TYPE (arg1)));
14196
14197 wide_int common = mask & arg1;
14198 if (common == mask)
14199 {
14200 tem_type = signed_type_for (TREE_TYPE (tem));
14201 tem = fold_convert_loc (loc, tem_type, tem);
14202 }
14203 else if (common == 0)
14204 {
14205 tem_type = unsigned_type_for (TREE_TYPE (tem));
14206 tem = fold_convert_loc (loc, tem_type, tem);
14207 }
14208 else
14209 tem = NULL;
14210 }
14211
14212 if (tem)
14213 return
14214 fold_convert_loc (loc, type,
14215 fold_build2_loc (loc, BIT_AND_EXPR,
14216 TREE_TYPE (tem), tem,
14217 fold_convert_loc (loc,
14218 TREE_TYPE (tem),
14219 arg1)));
14220 }
14221
14222 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
14223 already handled above. */
14224 if (TREE_CODE (arg0) == BIT_AND_EXPR
14225 && integer_onep (TREE_OPERAND (arg0, 1))
14226 && integer_zerop (op2)
14227 && integer_pow2p (arg1))
14228 {
14229 tree tem = TREE_OPERAND (arg0, 0);
14230 STRIP_NOPS (tem);
14231 if (TREE_CODE (tem) == RSHIFT_EXPR
14232 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
14233 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
14234 tree_to_uhwi (TREE_OPERAND (tem, 1)))
14235 return fold_build2_loc (loc, BIT_AND_EXPR, type,
14236 TREE_OPERAND (tem, 0), arg1);
14237 }
14238
14239 /* A & N ? N : 0 is simply A & N if N is a power of two. This
14240 is probably obsolete because the first operand should be a
14241 truth value (that's why we have the two cases above), but let's
14242 leave it in until we can confirm this for all front-ends. */
14243 if (integer_zerop (op2)
14244 && TREE_CODE (arg0) == NE_EXPR
14245 && integer_zerop (TREE_OPERAND (arg0, 1))
14246 && integer_pow2p (arg1)
14247 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
14248 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
14249 arg1, OEP_ONLY_CONST))
14250 return pedantic_non_lvalue_loc (loc,
14251 fold_convert_loc (loc, type,
14252 TREE_OPERAND (arg0, 0)));
14253
14254 /* Disable the transformations below for vectors, since
14255 fold_binary_op_with_conditional_arg may undo them immediately,
14256 yielding an infinite loop. */
14257 if (code == VEC_COND_EXPR)
14258 return NULL_TREE;
14259
14260 /* Convert A ? B : 0 into A && B if A and B are truth values. */
14261 if (integer_zerop (op2)
14262 && truth_value_p (TREE_CODE (arg0))
14263 && truth_value_p (TREE_CODE (arg1))
14264 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14265 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
14266 : TRUTH_ANDIF_EXPR,
14267 type, fold_convert_loc (loc, type, arg0), arg1);
14268
14269 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
14270 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
14271 && truth_value_p (TREE_CODE (arg0))
14272 && truth_value_p (TREE_CODE (arg1))
14273 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14274 {
14275 location_t loc0 = expr_location_or (arg0, loc);
14276 /* Only perform transformation if ARG0 is easily inverted. */
14277 tem = fold_invert_truthvalue (loc0, arg0);
14278 if (tem)
14279 return fold_build2_loc (loc, code == VEC_COND_EXPR
14280 ? BIT_IOR_EXPR
14281 : TRUTH_ORIF_EXPR,
14282 type, fold_convert_loc (loc, type, tem),
14283 arg1);
14284 }
14285
14286 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
14287 if (integer_zerop (arg1)
14288 && truth_value_p (TREE_CODE (arg0))
14289 && truth_value_p (TREE_CODE (op2))
14290 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14291 {
14292 location_t loc0 = expr_location_or (arg0, loc);
14293 /* Only perform transformation if ARG0 is easily inverted. */
14294 tem = fold_invert_truthvalue (loc0, arg0);
14295 if (tem)
14296 return fold_build2_loc (loc, code == VEC_COND_EXPR
14297 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
14298 type, fold_convert_loc (loc, type, tem),
14299 op2);
14300 }
14301
14302 /* Convert A ? 1 : B into A || B if A and B are truth values. */
14303 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
14304 && truth_value_p (TREE_CODE (arg0))
14305 && truth_value_p (TREE_CODE (op2))
14306 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14307 return fold_build2_loc (loc, code == VEC_COND_EXPR
14308 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
14309 type, fold_convert_loc (loc, type, arg0), op2);
14310
14311 return NULL_TREE;
14312
14313 case CALL_EXPR:
14314 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
14315 of fold_ternary on them. */
14316 gcc_unreachable ();
14317
14318 case BIT_FIELD_REF:
14319 if ((TREE_CODE (arg0) == VECTOR_CST
14320 || (TREE_CODE (arg0) == CONSTRUCTOR
14321 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
14322 && (type == TREE_TYPE (TREE_TYPE (arg0))
14323 || (TREE_CODE (type) == VECTOR_TYPE
14324 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
14325 {
14326 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
14327 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
14328 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
14329 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
14330
14331 if (n != 0
14332 && (idx % width) == 0
14333 && (n % width) == 0
14334 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
14335 {
14336 idx = idx / width;
14337 n = n / width;
14338
14339 if (TREE_CODE (arg0) == VECTOR_CST)
14340 {
14341 if (n == 1)
14342 return VECTOR_CST_ELT (arg0, idx);
14343
14344 tree *vals = XALLOCAVEC (tree, n);
14345 for (unsigned i = 0; i < n; ++i)
14346 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
14347 return build_vector (type, vals);
14348 }
14349
14350 /* Constructor elements can be subvectors. */
14351 unsigned HOST_WIDE_INT k = 1;
14352 if (CONSTRUCTOR_NELTS (arg0) != 0)
14353 {
14354 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
14355 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
14356 k = TYPE_VECTOR_SUBPARTS (cons_elem);
14357 }
14358
14359 /* We keep an exact subset of the constructor elements. */
14360 if ((idx % k) == 0 && (n % k) == 0)
14361 {
14362 if (CONSTRUCTOR_NELTS (arg0) == 0)
14363 return build_constructor (type, NULL);
14364 idx /= k;
14365 n /= k;
14366 if (n == 1)
14367 {
14368 if (idx < CONSTRUCTOR_NELTS (arg0))
14369 return CONSTRUCTOR_ELT (arg0, idx)->value;
14370 return build_zero_cst (type);
14371 }
14372
14373 vec<constructor_elt, va_gc> *vals;
14374 vec_alloc (vals, n);
14375 for (unsigned i = 0;
14376 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
14377 ++i)
14378 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
14379 CONSTRUCTOR_ELT
14380 (arg0, idx + i)->value);
14381 return build_constructor (type, vals);
14382 }
14383 /* The bitfield references a single constructor element. */
14384 else if (idx + n <= (idx / k + 1) * k)
14385 {
14386 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
14387 return build_zero_cst (type);
14388 else if (n == k)
14389 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
14390 else
14391 return fold_build3_loc (loc, code, type,
14392 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
14393 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
14394 }
14395 }
14396 }
14397
14398 /* A bit-field-ref that referenced the full argument can be stripped. */
14399 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
14400 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1)
14401 && integer_zerop (op2))
14402 return fold_convert_loc (loc, type, arg0);
14403
14404 /* On constants we can use native encode/interpret to constant
14405 fold (nearly) all BIT_FIELD_REFs. */
14406 if (CONSTANT_CLASS_P (arg0)
14407 && can_native_interpret_type_p (type)
14408 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0)))
14409 /* This limitation should not be necessary, we just need to
14410 round this up to mode size. */
14411 && tree_to_uhwi (op1) % BITS_PER_UNIT == 0
14412 /* Need bit-shifting of the buffer to relax the following. */
14413 && tree_to_uhwi (op2) % BITS_PER_UNIT == 0)
14414 {
14415 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
14416 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
14417 unsigned HOST_WIDE_INT clen;
14418 clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0)));
14419 /* ??? We cannot tell native_encode_expr to start at
14420 some random byte only. So limit us to a reasonable amount
14421 of work. */
14422 if (clen <= 4096)
14423 {
14424 unsigned char *b = XALLOCAVEC (unsigned char, clen);
14425 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
14426 if (len > 0
14427 && len * BITS_PER_UNIT >= bitpos + bitsize)
14428 {
14429 tree v = native_interpret_expr (type,
14430 b + bitpos / BITS_PER_UNIT,
14431 bitsize / BITS_PER_UNIT);
14432 if (v)
14433 return v;
14434 }
14435 }
14436 }
14437
14438 return NULL_TREE;
14439
14440 case FMA_EXPR:
14441 /* For integers we can decompose the FMA if possible. */
14442 if (TREE_CODE (arg0) == INTEGER_CST
14443 && TREE_CODE (arg1) == INTEGER_CST)
14444 return fold_build2_loc (loc, PLUS_EXPR, type,
14445 const_binop (MULT_EXPR, arg0, arg1), arg2);
14446 if (integer_zerop (arg2))
14447 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
14448
14449 return fold_fma (loc, type, arg0, arg1, arg2);
14450
14451 case VEC_PERM_EXPR:
14452 if (TREE_CODE (arg2) == VECTOR_CST)
14453 {
14454 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask;
14455 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14456 bool need_mask_canon = false;
14457 bool all_in_vec0 = true;
14458 bool all_in_vec1 = true;
14459 bool maybe_identity = true;
14460 bool single_arg = (op0 == op1);
14461 bool changed = false;
14462
14463 mask = single_arg ? (nelts - 1) : (2 * nelts - 1);
14464 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
14465 for (i = 0; i < nelts; i++)
14466 {
14467 tree val = VECTOR_CST_ELT (arg2, i);
14468 if (TREE_CODE (val) != INTEGER_CST)
14469 return NULL_TREE;
14470
14471 /* Make sure that the perm value is in an acceptable
14472 range. */
14473 wide_int t = val;
14474 if (wi::gtu_p (t, mask))
14475 {
14476 need_mask_canon = true;
14477 sel[i] = t.to_uhwi () & mask;
14478 }
14479 else
14480 sel[i] = t.to_uhwi ();
14481
14482 if (sel[i] < nelts)
14483 all_in_vec1 = false;
14484 else
14485 all_in_vec0 = false;
14486
14487 if ((sel[i] & (nelts-1)) != i)
14488 maybe_identity = false;
14489 }
14490
14491 if (maybe_identity)
14492 {
14493 if (all_in_vec0)
14494 return op0;
14495 if (all_in_vec1)
14496 return op1;
14497 }
14498
14499 if (all_in_vec0)
14500 op1 = op0;
14501 else if (all_in_vec1)
14502 {
14503 op0 = op1;
14504 for (i = 0; i < nelts; i++)
14505 sel[i] -= nelts;
14506 need_mask_canon = true;
14507 }
14508
14509 if ((TREE_CODE (op0) == VECTOR_CST
14510 || TREE_CODE (op0) == CONSTRUCTOR)
14511 && (TREE_CODE (op1) == VECTOR_CST
14512 || TREE_CODE (op1) == CONSTRUCTOR))
14513 {
14514 tree t = fold_vec_perm (type, op0, op1, sel);
14515 if (t != NULL_TREE)
14516 return t;
14517 }
14518
14519 if (op0 == op1 && !single_arg)
14520 changed = true;
14521
14522 if (need_mask_canon && arg2 == op2)
14523 {
14524 tree *tsel = XALLOCAVEC (tree, nelts);
14525 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
14526 for (i = 0; i < nelts; i++)
14527 tsel[i] = build_int_cst (eltype, sel[i]);
14528 op2 = build_vector (TREE_TYPE (arg2), tsel);
14529 changed = true;
14530 }
14531
14532 if (changed)
14533 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
14534 }
14535 return NULL_TREE;
14536
14537 default:
14538 return NULL_TREE;
14539 } /* switch (code) */
14540 }
14541
14542 /* Perform constant folding and related simplification of EXPR.
14543 The related simplifications include x*1 => x, x*0 => 0, etc.,
14544 and application of the associative law.
14545 NOP_EXPR conversions may be removed freely (as long as we
14546 are careful not to change the type of the overall expression).
14547 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14548 but we can constant-fold them if they have constant operands. */
14549
14550 #ifdef ENABLE_FOLD_CHECKING
14551 # define fold(x) fold_1 (x)
14552 static tree fold_1 (tree);
14553 static
14554 #endif
14555 tree
14556 fold (tree expr)
14557 {
14558 const tree t = expr;
14559 enum tree_code code = TREE_CODE (t);
14560 enum tree_code_class kind = TREE_CODE_CLASS (code);
14561 tree tem;
14562 location_t loc = EXPR_LOCATION (expr);
14563
14564 /* Return right away if a constant. */
14565 if (kind == tcc_constant)
14566 return t;
14567
14568 /* CALL_EXPR-like objects with variable numbers of operands are
14569 treated specially. */
14570 if (kind == tcc_vl_exp)
14571 {
14572 if (code == CALL_EXPR)
14573 {
14574 tem = fold_call_expr (loc, expr, false);
14575 return tem ? tem : expr;
14576 }
14577 return expr;
14578 }
14579
14580 if (IS_EXPR_CODE_CLASS (kind))
14581 {
14582 tree type = TREE_TYPE (t);
14583 tree op0, op1, op2;
14584
14585 switch (TREE_CODE_LENGTH (code))
14586 {
14587 case 1:
14588 op0 = TREE_OPERAND (t, 0);
14589 tem = fold_unary_loc (loc, code, type, op0);
14590 return tem ? tem : expr;
14591 case 2:
14592 op0 = TREE_OPERAND (t, 0);
14593 op1 = TREE_OPERAND (t, 1);
14594 tem = fold_binary_loc (loc, code, type, op0, op1);
14595 return tem ? tem : expr;
14596 case 3:
14597 op0 = TREE_OPERAND (t, 0);
14598 op1 = TREE_OPERAND (t, 1);
14599 op2 = TREE_OPERAND (t, 2);
14600 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14601 return tem ? tem : expr;
14602 default:
14603 break;
14604 }
14605 }
14606
14607 switch (code)
14608 {
14609 case ARRAY_REF:
14610 {
14611 tree op0 = TREE_OPERAND (t, 0);
14612 tree op1 = TREE_OPERAND (t, 1);
14613
14614 if (TREE_CODE (op1) == INTEGER_CST
14615 && TREE_CODE (op0) == CONSTRUCTOR
14616 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
14617 {
14618 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
14619 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
14620 unsigned HOST_WIDE_INT begin = 0;
14621
14622 /* Find a matching index by means of a binary search. */
14623 while (begin != end)
14624 {
14625 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
14626 tree index = (*elts)[middle].index;
14627
14628 if (TREE_CODE (index) == INTEGER_CST
14629 && tree_int_cst_lt (index, op1))
14630 begin = middle + 1;
14631 else if (TREE_CODE (index) == INTEGER_CST
14632 && tree_int_cst_lt (op1, index))
14633 end = middle;
14634 else if (TREE_CODE (index) == RANGE_EXPR
14635 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
14636 begin = middle + 1;
14637 else if (TREE_CODE (index) == RANGE_EXPR
14638 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14639 end = middle;
14640 else
14641 return (*elts)[middle].value;
14642 }
14643 }
14644
14645 return t;
14646 }
14647
14648 /* Return a VECTOR_CST if possible. */
14649 case CONSTRUCTOR:
14650 {
14651 tree type = TREE_TYPE (t);
14652 if (TREE_CODE (type) != VECTOR_TYPE)
14653 return t;
14654
14655 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
14656 unsigned HOST_WIDE_INT idx, pos = 0;
14657 tree value;
14658
14659 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
14660 {
14661 if (!CONSTANT_CLASS_P (value))
14662 return t;
14663 if (TREE_CODE (value) == VECTOR_CST)
14664 {
14665 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
14666 vec[pos++] = VECTOR_CST_ELT (value, i);
14667 }
14668 else
14669 vec[pos++] = value;
14670 }
14671 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
14672 vec[pos] = build_zero_cst (TREE_TYPE (type));
14673
14674 return build_vector (type, vec);
14675 }
14676
14677 case CONST_DECL:
14678 return fold (DECL_INITIAL (t));
14679
14680 default:
14681 return t;
14682 } /* switch (code) */
14683 }
14684
14685 #ifdef ENABLE_FOLD_CHECKING
14686 #undef fold
14687
14688 static void fold_checksum_tree (const_tree, struct md5_ctx *,
14689 hash_table<pointer_hash<tree_node> > *);
14690 static void fold_check_failed (const_tree, const_tree);
14691 void print_fold_checksum (const_tree);
14692
14693 /* When --enable-checking=fold, compute a digest of expr before
14694 and after actual fold call to see if fold did not accidentally
14695 change original expr. */
14696
14697 tree
14698 fold (tree expr)
14699 {
14700 tree ret;
14701 struct md5_ctx ctx;
14702 unsigned char checksum_before[16], checksum_after[16];
14703 hash_table<pointer_hash<tree_node> > ht (32);
14704
14705 md5_init_ctx (&ctx);
14706 fold_checksum_tree (expr, &ctx, &ht);
14707 md5_finish_ctx (&ctx, checksum_before);
14708 ht.empty ();
14709
14710 ret = fold_1 (expr);
14711
14712 md5_init_ctx (&ctx);
14713 fold_checksum_tree (expr, &ctx, &ht);
14714 md5_finish_ctx (&ctx, checksum_after);
14715
14716 if (memcmp (checksum_before, checksum_after, 16))
14717 fold_check_failed (expr, ret);
14718
14719 return ret;
14720 }
14721
14722 void
14723 print_fold_checksum (const_tree expr)
14724 {
14725 struct md5_ctx ctx;
14726 unsigned char checksum[16], cnt;
14727 hash_table<pointer_hash<tree_node> > ht (32);
14728
14729 md5_init_ctx (&ctx);
14730 fold_checksum_tree (expr, &ctx, &ht);
14731 md5_finish_ctx (&ctx, checksum);
14732 for (cnt = 0; cnt < 16; ++cnt)
14733 fprintf (stderr, "%02x", checksum[cnt]);
14734 putc ('\n', stderr);
14735 }
14736
14737 static void
14738 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14739 {
14740 internal_error ("fold check: original tree changed by fold");
14741 }
14742
14743 static void
14744 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
14745 hash_table<pointer_hash <tree_node> > *ht)
14746 {
14747 tree_node **slot;
14748 enum tree_code code;
14749 union tree_node buf;
14750 int i, len;
14751
14752 recursive_label:
14753 if (expr == NULL)
14754 return;
14755 slot = ht->find_slot (expr, INSERT);
14756 if (*slot != NULL)
14757 return;
14758 *slot = CONST_CAST_TREE (expr);
14759 code = TREE_CODE (expr);
14760 if (TREE_CODE_CLASS (code) == tcc_declaration
14761 && DECL_ASSEMBLER_NAME_SET_P (expr))
14762 {
14763 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14764 memcpy ((char *) &buf, expr, tree_size (expr));
14765 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14766 expr = (tree) &buf;
14767 }
14768 else if (TREE_CODE_CLASS (code) == tcc_type
14769 && (TYPE_POINTER_TO (expr)
14770 || TYPE_REFERENCE_TO (expr)
14771 || TYPE_CACHED_VALUES_P (expr)
14772 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14773 || TYPE_NEXT_VARIANT (expr)))
14774 {
14775 /* Allow these fields to be modified. */
14776 tree tmp;
14777 memcpy ((char *) &buf, expr, tree_size (expr));
14778 expr = tmp = (tree) &buf;
14779 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14780 TYPE_POINTER_TO (tmp) = NULL;
14781 TYPE_REFERENCE_TO (tmp) = NULL;
14782 TYPE_NEXT_VARIANT (tmp) = NULL;
14783 if (TYPE_CACHED_VALUES_P (tmp))
14784 {
14785 TYPE_CACHED_VALUES_P (tmp) = 0;
14786 TYPE_CACHED_VALUES (tmp) = NULL;
14787 }
14788 }
14789 md5_process_bytes (expr, tree_size (expr), ctx);
14790 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14791 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14792 if (TREE_CODE_CLASS (code) != tcc_type
14793 && TREE_CODE_CLASS (code) != tcc_declaration
14794 && code != TREE_LIST
14795 && code != SSA_NAME
14796 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14797 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14798 switch (TREE_CODE_CLASS (code))
14799 {
14800 case tcc_constant:
14801 switch (code)
14802 {
14803 case STRING_CST:
14804 md5_process_bytes (TREE_STRING_POINTER (expr),
14805 TREE_STRING_LENGTH (expr), ctx);
14806 break;
14807 case COMPLEX_CST:
14808 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14809 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14810 break;
14811 case VECTOR_CST:
14812 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
14813 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
14814 break;
14815 default:
14816 break;
14817 }
14818 break;
14819 case tcc_exceptional:
14820 switch (code)
14821 {
14822 case TREE_LIST:
14823 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14824 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14825 expr = TREE_CHAIN (expr);
14826 goto recursive_label;
14827 break;
14828 case TREE_VEC:
14829 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14830 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14831 break;
14832 default:
14833 break;
14834 }
14835 break;
14836 case tcc_expression:
14837 case tcc_reference:
14838 case tcc_comparison:
14839 case tcc_unary:
14840 case tcc_binary:
14841 case tcc_statement:
14842 case tcc_vl_exp:
14843 len = TREE_OPERAND_LENGTH (expr);
14844 for (i = 0; i < len; ++i)
14845 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14846 break;
14847 case tcc_declaration:
14848 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14849 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14850 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14851 {
14852 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14853 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14854 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14855 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14856 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14857 }
14858
14859 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14860 {
14861 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14862 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14863 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
14864 }
14865 break;
14866 case tcc_type:
14867 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14868 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14869 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14870 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14871 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14872 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14873 if (INTEGRAL_TYPE_P (expr)
14874 || SCALAR_FLOAT_TYPE_P (expr))
14875 {
14876 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14877 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14878 }
14879 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14880 if (TREE_CODE (expr) == RECORD_TYPE
14881 || TREE_CODE (expr) == UNION_TYPE
14882 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14883 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14884 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14885 break;
14886 default:
14887 break;
14888 }
14889 }
14890
14891 /* Helper function for outputting the checksum of a tree T. When
14892 debugging with gdb, you can "define mynext" to be "next" followed
14893 by "call debug_fold_checksum (op0)", then just trace down till the
14894 outputs differ. */
14895
14896 DEBUG_FUNCTION void
14897 debug_fold_checksum (const_tree t)
14898 {
14899 int i;
14900 unsigned char checksum[16];
14901 struct md5_ctx ctx;
14902 hash_table<pointer_hash<tree_node> > ht (32);
14903
14904 md5_init_ctx (&ctx);
14905 fold_checksum_tree (t, &ctx, &ht);
14906 md5_finish_ctx (&ctx, checksum);
14907 ht.empty ();
14908
14909 for (i = 0; i < 16; i++)
14910 fprintf (stderr, "%d ", checksum[i]);
14911
14912 fprintf (stderr, "\n");
14913 }
14914
14915 #endif
14916
14917 /* Fold a unary tree expression with code CODE of type TYPE with an
14918 operand OP0. LOC is the location of the resulting expression.
14919 Return a folded expression if successful. Otherwise, return a tree
14920 expression with code CODE of type TYPE with an operand OP0. */
14921
14922 tree
14923 fold_build1_stat_loc (location_t loc,
14924 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14925 {
14926 tree tem;
14927 #ifdef ENABLE_FOLD_CHECKING
14928 unsigned char checksum_before[16], checksum_after[16];
14929 struct md5_ctx ctx;
14930 hash_table<pointer_hash<tree_node> > ht (32);
14931
14932 md5_init_ctx (&ctx);
14933 fold_checksum_tree (op0, &ctx, &ht);
14934 md5_finish_ctx (&ctx, checksum_before);
14935 ht.empty ();
14936 #endif
14937
14938 tem = fold_unary_loc (loc, code, type, op0);
14939 if (!tem)
14940 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
14941
14942 #ifdef ENABLE_FOLD_CHECKING
14943 md5_init_ctx (&ctx);
14944 fold_checksum_tree (op0, &ctx, &ht);
14945 md5_finish_ctx (&ctx, checksum_after);
14946
14947 if (memcmp (checksum_before, checksum_after, 16))
14948 fold_check_failed (op0, tem);
14949 #endif
14950 return tem;
14951 }
14952
14953 /* Fold a binary tree expression with code CODE of type TYPE with
14954 operands OP0 and OP1. LOC is the location of the resulting
14955 expression. Return a folded expression if successful. Otherwise,
14956 return a tree expression with code CODE of type TYPE with operands
14957 OP0 and OP1. */
14958
14959 tree
14960 fold_build2_stat_loc (location_t loc,
14961 enum tree_code code, tree type, tree op0, tree op1
14962 MEM_STAT_DECL)
14963 {
14964 tree tem;
14965 #ifdef ENABLE_FOLD_CHECKING
14966 unsigned char checksum_before_op0[16],
14967 checksum_before_op1[16],
14968 checksum_after_op0[16],
14969 checksum_after_op1[16];
14970 struct md5_ctx ctx;
14971 hash_table<pointer_hash<tree_node> > ht (32);
14972
14973 md5_init_ctx (&ctx);
14974 fold_checksum_tree (op0, &ctx, &ht);
14975 md5_finish_ctx (&ctx, checksum_before_op0);
14976 ht.empty ();
14977
14978 md5_init_ctx (&ctx);
14979 fold_checksum_tree (op1, &ctx, &ht);
14980 md5_finish_ctx (&ctx, checksum_before_op1);
14981 ht.empty ();
14982 #endif
14983
14984 tem = fold_binary_loc (loc, code, type, op0, op1);
14985 if (!tem)
14986 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
14987
14988 #ifdef ENABLE_FOLD_CHECKING
14989 md5_init_ctx (&ctx);
14990 fold_checksum_tree (op0, &ctx, &ht);
14991 md5_finish_ctx (&ctx, checksum_after_op0);
14992 ht.empty ();
14993
14994 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14995 fold_check_failed (op0, tem);
14996
14997 md5_init_ctx (&ctx);
14998 fold_checksum_tree (op1, &ctx, &ht);
14999 md5_finish_ctx (&ctx, checksum_after_op1);
15000
15001 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
15002 fold_check_failed (op1, tem);
15003 #endif
15004 return tem;
15005 }
15006
15007 /* Fold a ternary tree expression with code CODE of type TYPE with
15008 operands OP0, OP1, and OP2. Return a folded expression if
15009 successful. Otherwise, return a tree expression with code CODE of
15010 type TYPE with operands OP0, OP1, and OP2. */
15011
15012 tree
15013 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
15014 tree op0, tree op1, tree op2 MEM_STAT_DECL)
15015 {
15016 tree tem;
15017 #ifdef ENABLE_FOLD_CHECKING
15018 unsigned char checksum_before_op0[16],
15019 checksum_before_op1[16],
15020 checksum_before_op2[16],
15021 checksum_after_op0[16],
15022 checksum_after_op1[16],
15023 checksum_after_op2[16];
15024 struct md5_ctx ctx;
15025 hash_table<pointer_hash<tree_node> > ht (32);
15026
15027 md5_init_ctx (&ctx);
15028 fold_checksum_tree (op0, &ctx, &ht);
15029 md5_finish_ctx (&ctx, checksum_before_op0);
15030 ht.empty ();
15031
15032 md5_init_ctx (&ctx);
15033 fold_checksum_tree (op1, &ctx, &ht);
15034 md5_finish_ctx (&ctx, checksum_before_op1);
15035 ht.empty ();
15036
15037 md5_init_ctx (&ctx);
15038 fold_checksum_tree (op2, &ctx, &ht);
15039 md5_finish_ctx (&ctx, checksum_before_op2);
15040 ht.empty ();
15041 #endif
15042
15043 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
15044 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
15045 if (!tem)
15046 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
15047
15048 #ifdef ENABLE_FOLD_CHECKING
15049 md5_init_ctx (&ctx);
15050 fold_checksum_tree (op0, &ctx, &ht);
15051 md5_finish_ctx (&ctx, checksum_after_op0);
15052 ht.empty ();
15053
15054 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
15055 fold_check_failed (op0, tem);
15056
15057 md5_init_ctx (&ctx);
15058 fold_checksum_tree (op1, &ctx, &ht);
15059 md5_finish_ctx (&ctx, checksum_after_op1);
15060 ht.empty ();
15061
15062 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
15063 fold_check_failed (op1, tem);
15064
15065 md5_init_ctx (&ctx);
15066 fold_checksum_tree (op2, &ctx, &ht);
15067 md5_finish_ctx (&ctx, checksum_after_op2);
15068
15069 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
15070 fold_check_failed (op2, tem);
15071 #endif
15072 return tem;
15073 }
15074
15075 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
15076 arguments in ARGARRAY, and a null static chain.
15077 Return a folded expression if successful. Otherwise, return a CALL_EXPR
15078 of type TYPE from the given operands as constructed by build_call_array. */
15079
15080 tree
15081 fold_build_call_array_loc (location_t loc, tree type, tree fn,
15082 int nargs, tree *argarray)
15083 {
15084 tree tem;
15085 #ifdef ENABLE_FOLD_CHECKING
15086 unsigned char checksum_before_fn[16],
15087 checksum_before_arglist[16],
15088 checksum_after_fn[16],
15089 checksum_after_arglist[16];
15090 struct md5_ctx ctx;
15091 hash_table<pointer_hash<tree_node> > ht (32);
15092 int i;
15093
15094 md5_init_ctx (&ctx);
15095 fold_checksum_tree (fn, &ctx, &ht);
15096 md5_finish_ctx (&ctx, checksum_before_fn);
15097 ht.empty ();
15098
15099 md5_init_ctx (&ctx);
15100 for (i = 0; i < nargs; i++)
15101 fold_checksum_tree (argarray[i], &ctx, &ht);
15102 md5_finish_ctx (&ctx, checksum_before_arglist);
15103 ht.empty ();
15104 #endif
15105
15106 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
15107
15108 #ifdef ENABLE_FOLD_CHECKING
15109 md5_init_ctx (&ctx);
15110 fold_checksum_tree (fn, &ctx, &ht);
15111 md5_finish_ctx (&ctx, checksum_after_fn);
15112 ht.empty ();
15113
15114 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
15115 fold_check_failed (fn, tem);
15116
15117 md5_init_ctx (&ctx);
15118 for (i = 0; i < nargs; i++)
15119 fold_checksum_tree (argarray[i], &ctx, &ht);
15120 md5_finish_ctx (&ctx, checksum_after_arglist);
15121
15122 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
15123 fold_check_failed (NULL_TREE, tem);
15124 #endif
15125 return tem;
15126 }
15127
15128 /* Perform constant folding and related simplification of initializer
15129 expression EXPR. These behave identically to "fold_buildN" but ignore
15130 potential run-time traps and exceptions that fold must preserve. */
15131
15132 #define START_FOLD_INIT \
15133 int saved_signaling_nans = flag_signaling_nans;\
15134 int saved_trapping_math = flag_trapping_math;\
15135 int saved_rounding_math = flag_rounding_math;\
15136 int saved_trapv = flag_trapv;\
15137 int saved_folding_initializer = folding_initializer;\
15138 flag_signaling_nans = 0;\
15139 flag_trapping_math = 0;\
15140 flag_rounding_math = 0;\
15141 flag_trapv = 0;\
15142 folding_initializer = 1;
15143
15144 #define END_FOLD_INIT \
15145 flag_signaling_nans = saved_signaling_nans;\
15146 flag_trapping_math = saved_trapping_math;\
15147 flag_rounding_math = saved_rounding_math;\
15148 flag_trapv = saved_trapv;\
15149 folding_initializer = saved_folding_initializer;
15150
15151 tree
15152 fold_build1_initializer_loc (location_t loc, enum tree_code code,
15153 tree type, tree op)
15154 {
15155 tree result;
15156 START_FOLD_INIT;
15157
15158 result = fold_build1_loc (loc, code, type, op);
15159
15160 END_FOLD_INIT;
15161 return result;
15162 }
15163
15164 tree
15165 fold_build2_initializer_loc (location_t loc, enum tree_code code,
15166 tree type, tree op0, tree op1)
15167 {
15168 tree result;
15169 START_FOLD_INIT;
15170
15171 result = fold_build2_loc (loc, code, type, op0, op1);
15172
15173 END_FOLD_INIT;
15174 return result;
15175 }
15176
15177 tree
15178 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
15179 int nargs, tree *argarray)
15180 {
15181 tree result;
15182 START_FOLD_INIT;
15183
15184 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
15185
15186 END_FOLD_INIT;
15187 return result;
15188 }
15189
15190 #undef START_FOLD_INIT
15191 #undef END_FOLD_INIT
15192
15193 /* Determine if first argument is a multiple of second argument. Return 0 if
15194 it is not, or we cannot easily determined it to be.
15195
15196 An example of the sort of thing we care about (at this point; this routine
15197 could surely be made more general, and expanded to do what the *_DIV_EXPR's
15198 fold cases do now) is discovering that
15199
15200 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15201
15202 is a multiple of
15203
15204 SAVE_EXPR (J * 8)
15205
15206 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
15207
15208 This code also handles discovering that
15209
15210 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15211
15212 is a multiple of 8 so we don't have to worry about dealing with a
15213 possible remainder.
15214
15215 Note that we *look* inside a SAVE_EXPR only to determine how it was
15216 calculated; it is not safe for fold to do much of anything else with the
15217 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
15218 at run time. For example, the latter example above *cannot* be implemented
15219 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
15220 evaluation time of the original SAVE_EXPR is not necessarily the same at
15221 the time the new expression is evaluated. The only optimization of this
15222 sort that would be valid is changing
15223
15224 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
15225
15226 divided by 8 to
15227
15228 SAVE_EXPR (I) * SAVE_EXPR (J)
15229
15230 (where the same SAVE_EXPR (J) is used in the original and the
15231 transformed version). */
15232
15233 int
15234 multiple_of_p (tree type, const_tree top, const_tree bottom)
15235 {
15236 if (operand_equal_p (top, bottom, 0))
15237 return 1;
15238
15239 if (TREE_CODE (type) != INTEGER_TYPE)
15240 return 0;
15241
15242 switch (TREE_CODE (top))
15243 {
15244 case BIT_AND_EXPR:
15245 /* Bitwise and provides a power of two multiple. If the mask is
15246 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
15247 if (!integer_pow2p (bottom))
15248 return 0;
15249 /* FALLTHRU */
15250
15251 case MULT_EXPR:
15252 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15253 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15254
15255 case PLUS_EXPR:
15256 case MINUS_EXPR:
15257 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15258 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15259
15260 case LSHIFT_EXPR:
15261 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
15262 {
15263 tree op1, t1;
15264
15265 op1 = TREE_OPERAND (top, 1);
15266 /* const_binop may not detect overflow correctly,
15267 so check for it explicitly here. */
15268 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
15269 && 0 != (t1 = fold_convert (type,
15270 const_binop (LSHIFT_EXPR,
15271 size_one_node,
15272 op1)))
15273 && !TREE_OVERFLOW (t1))
15274 return multiple_of_p (type, t1, bottom);
15275 }
15276 return 0;
15277
15278 case NOP_EXPR:
15279 /* Can't handle conversions from non-integral or wider integral type. */
15280 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
15281 || (TYPE_PRECISION (type)
15282 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
15283 return 0;
15284
15285 /* .. fall through ... */
15286
15287 case SAVE_EXPR:
15288 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
15289
15290 case COND_EXPR:
15291 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
15292 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
15293
15294 case INTEGER_CST:
15295 if (TREE_CODE (bottom) != INTEGER_CST
15296 || integer_zerop (bottom)
15297 || (TYPE_UNSIGNED (type)
15298 && (tree_int_cst_sgn (top) < 0
15299 || tree_int_cst_sgn (bottom) < 0)))
15300 return 0;
15301 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
15302 SIGNED);
15303
15304 default:
15305 return 0;
15306 }
15307 }
15308
15309 /* Return true if CODE or TYPE is known to be non-negative. */
15310
15311 static bool
15312 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
15313 {
15314 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
15315 && truth_value_p (code))
15316 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
15317 have a signed:1 type (where the value is -1 and 0). */
15318 return true;
15319 return false;
15320 }
15321
15322 /* Return true if (CODE OP0) is known to be non-negative. If the return
15323 value is based on the assumption that signed overflow is undefined,
15324 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15325 *STRICT_OVERFLOW_P. */
15326
15327 bool
15328 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15329 bool *strict_overflow_p)
15330 {
15331 if (TYPE_UNSIGNED (type))
15332 return true;
15333
15334 switch (code)
15335 {
15336 case ABS_EXPR:
15337 /* We can't return 1 if flag_wrapv is set because
15338 ABS_EXPR<INT_MIN> = INT_MIN. */
15339 if (!INTEGRAL_TYPE_P (type))
15340 return true;
15341 if (TYPE_OVERFLOW_UNDEFINED (type))
15342 {
15343 *strict_overflow_p = true;
15344 return true;
15345 }
15346 break;
15347
15348 case NON_LVALUE_EXPR:
15349 case FLOAT_EXPR:
15350 case FIX_TRUNC_EXPR:
15351 return tree_expr_nonnegative_warnv_p (op0,
15352 strict_overflow_p);
15353
15354 case NOP_EXPR:
15355 {
15356 tree inner_type = TREE_TYPE (op0);
15357 tree outer_type = type;
15358
15359 if (TREE_CODE (outer_type) == REAL_TYPE)
15360 {
15361 if (TREE_CODE (inner_type) == REAL_TYPE)
15362 return tree_expr_nonnegative_warnv_p (op0,
15363 strict_overflow_p);
15364 if (INTEGRAL_TYPE_P (inner_type))
15365 {
15366 if (TYPE_UNSIGNED (inner_type))
15367 return true;
15368 return tree_expr_nonnegative_warnv_p (op0,
15369 strict_overflow_p);
15370 }
15371 }
15372 else if (INTEGRAL_TYPE_P (outer_type))
15373 {
15374 if (TREE_CODE (inner_type) == REAL_TYPE)
15375 return tree_expr_nonnegative_warnv_p (op0,
15376 strict_overflow_p);
15377 if (INTEGRAL_TYPE_P (inner_type))
15378 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
15379 && TYPE_UNSIGNED (inner_type);
15380 }
15381 }
15382 break;
15383
15384 default:
15385 return tree_simple_nonnegative_warnv_p (code, type);
15386 }
15387
15388 /* We don't know sign of `t', so be conservative and return false. */
15389 return false;
15390 }
15391
15392 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
15393 value is based on the assumption that signed overflow is undefined,
15394 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15395 *STRICT_OVERFLOW_P. */
15396
15397 bool
15398 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15399 tree op1, bool *strict_overflow_p)
15400 {
15401 if (TYPE_UNSIGNED (type))
15402 return true;
15403
15404 switch (code)
15405 {
15406 case POINTER_PLUS_EXPR:
15407 case PLUS_EXPR:
15408 if (FLOAT_TYPE_P (type))
15409 return (tree_expr_nonnegative_warnv_p (op0,
15410 strict_overflow_p)
15411 && tree_expr_nonnegative_warnv_p (op1,
15412 strict_overflow_p));
15413
15414 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
15415 both unsigned and at least 2 bits shorter than the result. */
15416 if (TREE_CODE (type) == INTEGER_TYPE
15417 && TREE_CODE (op0) == NOP_EXPR
15418 && TREE_CODE (op1) == NOP_EXPR)
15419 {
15420 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
15421 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
15422 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
15423 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
15424 {
15425 unsigned int prec = MAX (TYPE_PRECISION (inner1),
15426 TYPE_PRECISION (inner2)) + 1;
15427 return prec < TYPE_PRECISION (type);
15428 }
15429 }
15430 break;
15431
15432 case MULT_EXPR:
15433 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
15434 {
15435 /* x * x is always non-negative for floating point x
15436 or without overflow. */
15437 if (operand_equal_p (op0, op1, 0)
15438 || (tree_expr_nonnegative_warnv_p (op0, strict_overflow_p)
15439 && tree_expr_nonnegative_warnv_p (op1, strict_overflow_p)))
15440 {
15441 if (TYPE_OVERFLOW_UNDEFINED (type))
15442 *strict_overflow_p = true;
15443 return true;
15444 }
15445 }
15446
15447 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15448 both unsigned and their total bits is shorter than the result. */
15449 if (TREE_CODE (type) == INTEGER_TYPE
15450 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
15451 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
15452 {
15453 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
15454 ? TREE_TYPE (TREE_OPERAND (op0, 0))
15455 : TREE_TYPE (op0);
15456 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
15457 ? TREE_TYPE (TREE_OPERAND (op1, 0))
15458 : TREE_TYPE (op1);
15459
15460 bool unsigned0 = TYPE_UNSIGNED (inner0);
15461 bool unsigned1 = TYPE_UNSIGNED (inner1);
15462
15463 if (TREE_CODE (op0) == INTEGER_CST)
15464 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
15465
15466 if (TREE_CODE (op1) == INTEGER_CST)
15467 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
15468
15469 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
15470 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
15471 {
15472 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
15473 ? tree_int_cst_min_precision (op0, UNSIGNED)
15474 : TYPE_PRECISION (inner0);
15475
15476 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
15477 ? tree_int_cst_min_precision (op1, UNSIGNED)
15478 : TYPE_PRECISION (inner1);
15479
15480 return precision0 + precision1 < TYPE_PRECISION (type);
15481 }
15482 }
15483 return false;
15484
15485 case BIT_AND_EXPR:
15486 case MAX_EXPR:
15487 return (tree_expr_nonnegative_warnv_p (op0,
15488 strict_overflow_p)
15489 || tree_expr_nonnegative_warnv_p (op1,
15490 strict_overflow_p));
15491
15492 case BIT_IOR_EXPR:
15493 case BIT_XOR_EXPR:
15494 case MIN_EXPR:
15495 case RDIV_EXPR:
15496 case TRUNC_DIV_EXPR:
15497 case CEIL_DIV_EXPR:
15498 case FLOOR_DIV_EXPR:
15499 case ROUND_DIV_EXPR:
15500 return (tree_expr_nonnegative_warnv_p (op0,
15501 strict_overflow_p)
15502 && tree_expr_nonnegative_warnv_p (op1,
15503 strict_overflow_p));
15504
15505 case TRUNC_MOD_EXPR:
15506 case CEIL_MOD_EXPR:
15507 case FLOOR_MOD_EXPR:
15508 case ROUND_MOD_EXPR:
15509 return tree_expr_nonnegative_warnv_p (op0,
15510 strict_overflow_p);
15511 default:
15512 return tree_simple_nonnegative_warnv_p (code, type);
15513 }
15514
15515 /* We don't know sign of `t', so be conservative and return false. */
15516 return false;
15517 }
15518
15519 /* Return true if T is known to be non-negative. If the return
15520 value is based on the assumption that signed overflow is undefined,
15521 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15522 *STRICT_OVERFLOW_P. */
15523
15524 bool
15525 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15526 {
15527 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15528 return true;
15529
15530 switch (TREE_CODE (t))
15531 {
15532 case INTEGER_CST:
15533 return tree_int_cst_sgn (t) >= 0;
15534
15535 case REAL_CST:
15536 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15537
15538 case FIXED_CST:
15539 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15540
15541 case COND_EXPR:
15542 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15543 strict_overflow_p)
15544 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
15545 strict_overflow_p));
15546 default:
15547 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15548 TREE_TYPE (t));
15549 }
15550 /* We don't know sign of `t', so be conservative and return false. */
15551 return false;
15552 }
15553
15554 /* Return true if T is known to be non-negative. If the return
15555 value is based on the assumption that signed overflow is undefined,
15556 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15557 *STRICT_OVERFLOW_P. */
15558
15559 bool
15560 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
15561 tree arg0, tree arg1, bool *strict_overflow_p)
15562 {
15563 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
15564 switch (DECL_FUNCTION_CODE (fndecl))
15565 {
15566 CASE_FLT_FN (BUILT_IN_ACOS):
15567 CASE_FLT_FN (BUILT_IN_ACOSH):
15568 CASE_FLT_FN (BUILT_IN_CABS):
15569 CASE_FLT_FN (BUILT_IN_COSH):
15570 CASE_FLT_FN (BUILT_IN_ERFC):
15571 CASE_FLT_FN (BUILT_IN_EXP):
15572 CASE_FLT_FN (BUILT_IN_EXP10):
15573 CASE_FLT_FN (BUILT_IN_EXP2):
15574 CASE_FLT_FN (BUILT_IN_FABS):
15575 CASE_FLT_FN (BUILT_IN_FDIM):
15576 CASE_FLT_FN (BUILT_IN_HYPOT):
15577 CASE_FLT_FN (BUILT_IN_POW10):
15578 CASE_INT_FN (BUILT_IN_FFS):
15579 CASE_INT_FN (BUILT_IN_PARITY):
15580 CASE_INT_FN (BUILT_IN_POPCOUNT):
15581 CASE_INT_FN (BUILT_IN_CLZ):
15582 CASE_INT_FN (BUILT_IN_CLRSB):
15583 case BUILT_IN_BSWAP32:
15584 case BUILT_IN_BSWAP64:
15585 /* Always true. */
15586 return true;
15587
15588 CASE_FLT_FN (BUILT_IN_SQRT):
15589 /* sqrt(-0.0) is -0.0. */
15590 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
15591 return true;
15592 return tree_expr_nonnegative_warnv_p (arg0,
15593 strict_overflow_p);
15594
15595 CASE_FLT_FN (BUILT_IN_ASINH):
15596 CASE_FLT_FN (BUILT_IN_ATAN):
15597 CASE_FLT_FN (BUILT_IN_ATANH):
15598 CASE_FLT_FN (BUILT_IN_CBRT):
15599 CASE_FLT_FN (BUILT_IN_CEIL):
15600 CASE_FLT_FN (BUILT_IN_ERF):
15601 CASE_FLT_FN (BUILT_IN_EXPM1):
15602 CASE_FLT_FN (BUILT_IN_FLOOR):
15603 CASE_FLT_FN (BUILT_IN_FMOD):
15604 CASE_FLT_FN (BUILT_IN_FREXP):
15605 CASE_FLT_FN (BUILT_IN_ICEIL):
15606 CASE_FLT_FN (BUILT_IN_IFLOOR):
15607 CASE_FLT_FN (BUILT_IN_IRINT):
15608 CASE_FLT_FN (BUILT_IN_IROUND):
15609 CASE_FLT_FN (BUILT_IN_LCEIL):
15610 CASE_FLT_FN (BUILT_IN_LDEXP):
15611 CASE_FLT_FN (BUILT_IN_LFLOOR):
15612 CASE_FLT_FN (BUILT_IN_LLCEIL):
15613 CASE_FLT_FN (BUILT_IN_LLFLOOR):
15614 CASE_FLT_FN (BUILT_IN_LLRINT):
15615 CASE_FLT_FN (BUILT_IN_LLROUND):
15616 CASE_FLT_FN (BUILT_IN_LRINT):
15617 CASE_FLT_FN (BUILT_IN_LROUND):
15618 CASE_FLT_FN (BUILT_IN_MODF):
15619 CASE_FLT_FN (BUILT_IN_NEARBYINT):
15620 CASE_FLT_FN (BUILT_IN_RINT):
15621 CASE_FLT_FN (BUILT_IN_ROUND):
15622 CASE_FLT_FN (BUILT_IN_SCALB):
15623 CASE_FLT_FN (BUILT_IN_SCALBLN):
15624 CASE_FLT_FN (BUILT_IN_SCALBN):
15625 CASE_FLT_FN (BUILT_IN_SIGNBIT):
15626 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
15627 CASE_FLT_FN (BUILT_IN_SINH):
15628 CASE_FLT_FN (BUILT_IN_TANH):
15629 CASE_FLT_FN (BUILT_IN_TRUNC):
15630 /* True if the 1st argument is nonnegative. */
15631 return tree_expr_nonnegative_warnv_p (arg0,
15632 strict_overflow_p);
15633
15634 CASE_FLT_FN (BUILT_IN_FMAX):
15635 /* True if the 1st OR 2nd arguments are nonnegative. */
15636 return (tree_expr_nonnegative_warnv_p (arg0,
15637 strict_overflow_p)
15638 || (tree_expr_nonnegative_warnv_p (arg1,
15639 strict_overflow_p)));
15640
15641 CASE_FLT_FN (BUILT_IN_FMIN):
15642 /* True if the 1st AND 2nd arguments are nonnegative. */
15643 return (tree_expr_nonnegative_warnv_p (arg0,
15644 strict_overflow_p)
15645 && (tree_expr_nonnegative_warnv_p (arg1,
15646 strict_overflow_p)));
15647
15648 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15649 /* True if the 2nd argument is nonnegative. */
15650 return tree_expr_nonnegative_warnv_p (arg1,
15651 strict_overflow_p);
15652
15653 CASE_FLT_FN (BUILT_IN_POWI):
15654 /* True if the 1st argument is nonnegative or the second
15655 argument is an even integer. */
15656 if (TREE_CODE (arg1) == INTEGER_CST
15657 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15658 return true;
15659 return tree_expr_nonnegative_warnv_p (arg0,
15660 strict_overflow_p);
15661
15662 CASE_FLT_FN (BUILT_IN_POW):
15663 /* True if the 1st argument is nonnegative or the second
15664 argument is an even integer valued real. */
15665 if (TREE_CODE (arg1) == REAL_CST)
15666 {
15667 REAL_VALUE_TYPE c;
15668 HOST_WIDE_INT n;
15669
15670 c = TREE_REAL_CST (arg1);
15671 n = real_to_integer (&c);
15672 if ((n & 1) == 0)
15673 {
15674 REAL_VALUE_TYPE cint;
15675 real_from_integer (&cint, VOIDmode, n, SIGNED);
15676 if (real_identical (&c, &cint))
15677 return true;
15678 }
15679 }
15680 return tree_expr_nonnegative_warnv_p (arg0,
15681 strict_overflow_p);
15682
15683 default:
15684 break;
15685 }
15686 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15687 type);
15688 }
15689
15690 /* Return true if T is known to be non-negative. If the return
15691 value is based on the assumption that signed overflow is undefined,
15692 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15693 *STRICT_OVERFLOW_P. */
15694
15695 static bool
15696 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15697 {
15698 enum tree_code code = TREE_CODE (t);
15699 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15700 return true;
15701
15702 switch (code)
15703 {
15704 case TARGET_EXPR:
15705 {
15706 tree temp = TARGET_EXPR_SLOT (t);
15707 t = TARGET_EXPR_INITIAL (t);
15708
15709 /* If the initializer is non-void, then it's a normal expression
15710 that will be assigned to the slot. */
15711 if (!VOID_TYPE_P (t))
15712 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15713
15714 /* Otherwise, the initializer sets the slot in some way. One common
15715 way is an assignment statement at the end of the initializer. */
15716 while (1)
15717 {
15718 if (TREE_CODE (t) == BIND_EXPR)
15719 t = expr_last (BIND_EXPR_BODY (t));
15720 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15721 || TREE_CODE (t) == TRY_CATCH_EXPR)
15722 t = expr_last (TREE_OPERAND (t, 0));
15723 else if (TREE_CODE (t) == STATEMENT_LIST)
15724 t = expr_last (t);
15725 else
15726 break;
15727 }
15728 if (TREE_CODE (t) == MODIFY_EXPR
15729 && TREE_OPERAND (t, 0) == temp)
15730 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15731 strict_overflow_p);
15732
15733 return false;
15734 }
15735
15736 case CALL_EXPR:
15737 {
15738 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15739 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15740
15741 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15742 get_callee_fndecl (t),
15743 arg0,
15744 arg1,
15745 strict_overflow_p);
15746 }
15747 case COMPOUND_EXPR:
15748 case MODIFY_EXPR:
15749 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15750 strict_overflow_p);
15751 case BIND_EXPR:
15752 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15753 strict_overflow_p);
15754 case SAVE_EXPR:
15755 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15756 strict_overflow_p);
15757
15758 default:
15759 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15760 TREE_TYPE (t));
15761 }
15762
15763 /* We don't know sign of `t', so be conservative and return false. */
15764 return false;
15765 }
15766
15767 /* Return true if T is known to be non-negative. If the return
15768 value is based on the assumption that signed overflow is undefined,
15769 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15770 *STRICT_OVERFLOW_P. */
15771
15772 bool
15773 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15774 {
15775 enum tree_code code;
15776 if (t == error_mark_node)
15777 return false;
15778
15779 code = TREE_CODE (t);
15780 switch (TREE_CODE_CLASS (code))
15781 {
15782 case tcc_binary:
15783 case tcc_comparison:
15784 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15785 TREE_TYPE (t),
15786 TREE_OPERAND (t, 0),
15787 TREE_OPERAND (t, 1),
15788 strict_overflow_p);
15789
15790 case tcc_unary:
15791 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15792 TREE_TYPE (t),
15793 TREE_OPERAND (t, 0),
15794 strict_overflow_p);
15795
15796 case tcc_constant:
15797 case tcc_declaration:
15798 case tcc_reference:
15799 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15800
15801 default:
15802 break;
15803 }
15804
15805 switch (code)
15806 {
15807 case TRUTH_AND_EXPR:
15808 case TRUTH_OR_EXPR:
15809 case TRUTH_XOR_EXPR:
15810 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15811 TREE_TYPE (t),
15812 TREE_OPERAND (t, 0),
15813 TREE_OPERAND (t, 1),
15814 strict_overflow_p);
15815 case TRUTH_NOT_EXPR:
15816 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15817 TREE_TYPE (t),
15818 TREE_OPERAND (t, 0),
15819 strict_overflow_p);
15820
15821 case COND_EXPR:
15822 case CONSTRUCTOR:
15823 case OBJ_TYPE_REF:
15824 case ASSERT_EXPR:
15825 case ADDR_EXPR:
15826 case WITH_SIZE_EXPR:
15827 case SSA_NAME:
15828 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15829
15830 default:
15831 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15832 }
15833 }
15834
15835 /* Return true if `t' is known to be non-negative. Handle warnings
15836 about undefined signed overflow. */
15837
15838 bool
15839 tree_expr_nonnegative_p (tree t)
15840 {
15841 bool ret, strict_overflow_p;
15842
15843 strict_overflow_p = false;
15844 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15845 if (strict_overflow_p)
15846 fold_overflow_warning (("assuming signed overflow does not occur when "
15847 "determining that expression is always "
15848 "non-negative"),
15849 WARN_STRICT_OVERFLOW_MISC);
15850 return ret;
15851 }
15852
15853
15854 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15855 For floating point we further ensure that T is not denormal.
15856 Similar logic is present in nonzero_address in rtlanal.h.
15857
15858 If the return value is based on the assumption that signed overflow
15859 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15860 change *STRICT_OVERFLOW_P. */
15861
15862 bool
15863 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15864 bool *strict_overflow_p)
15865 {
15866 switch (code)
15867 {
15868 case ABS_EXPR:
15869 return tree_expr_nonzero_warnv_p (op0,
15870 strict_overflow_p);
15871
15872 case NOP_EXPR:
15873 {
15874 tree inner_type = TREE_TYPE (op0);
15875 tree outer_type = type;
15876
15877 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15878 && tree_expr_nonzero_warnv_p (op0,
15879 strict_overflow_p));
15880 }
15881 break;
15882
15883 case NON_LVALUE_EXPR:
15884 return tree_expr_nonzero_warnv_p (op0,
15885 strict_overflow_p);
15886
15887 default:
15888 break;
15889 }
15890
15891 return false;
15892 }
15893
15894 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15895 For floating point we further ensure that T is not denormal.
15896 Similar logic is present in nonzero_address in rtlanal.h.
15897
15898 If the return value is based on the assumption that signed overflow
15899 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15900 change *STRICT_OVERFLOW_P. */
15901
15902 bool
15903 tree_binary_nonzero_warnv_p (enum tree_code code,
15904 tree type,
15905 tree op0,
15906 tree op1, bool *strict_overflow_p)
15907 {
15908 bool sub_strict_overflow_p;
15909 switch (code)
15910 {
15911 case POINTER_PLUS_EXPR:
15912 case PLUS_EXPR:
15913 if (TYPE_OVERFLOW_UNDEFINED (type))
15914 {
15915 /* With the presence of negative values it is hard
15916 to say something. */
15917 sub_strict_overflow_p = false;
15918 if (!tree_expr_nonnegative_warnv_p (op0,
15919 &sub_strict_overflow_p)
15920 || !tree_expr_nonnegative_warnv_p (op1,
15921 &sub_strict_overflow_p))
15922 return false;
15923 /* One of operands must be positive and the other non-negative. */
15924 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15925 overflows, on a twos-complement machine the sum of two
15926 nonnegative numbers can never be zero. */
15927 return (tree_expr_nonzero_warnv_p (op0,
15928 strict_overflow_p)
15929 || tree_expr_nonzero_warnv_p (op1,
15930 strict_overflow_p));
15931 }
15932 break;
15933
15934 case MULT_EXPR:
15935 if (TYPE_OVERFLOW_UNDEFINED (type))
15936 {
15937 if (tree_expr_nonzero_warnv_p (op0,
15938 strict_overflow_p)
15939 && tree_expr_nonzero_warnv_p (op1,
15940 strict_overflow_p))
15941 {
15942 *strict_overflow_p = true;
15943 return true;
15944 }
15945 }
15946 break;
15947
15948 case MIN_EXPR:
15949 sub_strict_overflow_p = false;
15950 if (tree_expr_nonzero_warnv_p (op0,
15951 &sub_strict_overflow_p)
15952 && tree_expr_nonzero_warnv_p (op1,
15953 &sub_strict_overflow_p))
15954 {
15955 if (sub_strict_overflow_p)
15956 *strict_overflow_p = true;
15957 }
15958 break;
15959
15960 case MAX_EXPR:
15961 sub_strict_overflow_p = false;
15962 if (tree_expr_nonzero_warnv_p (op0,
15963 &sub_strict_overflow_p))
15964 {
15965 if (sub_strict_overflow_p)
15966 *strict_overflow_p = true;
15967
15968 /* When both operands are nonzero, then MAX must be too. */
15969 if (tree_expr_nonzero_warnv_p (op1,
15970 strict_overflow_p))
15971 return true;
15972
15973 /* MAX where operand 0 is positive is positive. */
15974 return tree_expr_nonnegative_warnv_p (op0,
15975 strict_overflow_p);
15976 }
15977 /* MAX where operand 1 is positive is positive. */
15978 else if (tree_expr_nonzero_warnv_p (op1,
15979 &sub_strict_overflow_p)
15980 && tree_expr_nonnegative_warnv_p (op1,
15981 &sub_strict_overflow_p))
15982 {
15983 if (sub_strict_overflow_p)
15984 *strict_overflow_p = true;
15985 return true;
15986 }
15987 break;
15988
15989 case BIT_IOR_EXPR:
15990 return (tree_expr_nonzero_warnv_p (op1,
15991 strict_overflow_p)
15992 || tree_expr_nonzero_warnv_p (op0,
15993 strict_overflow_p));
15994
15995 default:
15996 break;
15997 }
15998
15999 return false;
16000 }
16001
16002 /* Return true when T is an address and is known to be nonzero.
16003 For floating point we further ensure that T is not denormal.
16004 Similar logic is present in nonzero_address in rtlanal.h.
16005
16006 If the return value is based on the assumption that signed overflow
16007 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
16008 change *STRICT_OVERFLOW_P. */
16009
16010 bool
16011 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
16012 {
16013 bool sub_strict_overflow_p;
16014 switch (TREE_CODE (t))
16015 {
16016 case INTEGER_CST:
16017 return !integer_zerop (t);
16018
16019 case ADDR_EXPR:
16020 {
16021 tree base = TREE_OPERAND (t, 0);
16022 if (!DECL_P (base))
16023 base = get_base_address (base);
16024
16025 if (!base)
16026 return false;
16027
16028 /* Weak declarations may link to NULL. Other things may also be NULL
16029 so protect with -fdelete-null-pointer-checks; but not variables
16030 allocated on the stack. */
16031 if (DECL_P (base)
16032 && (flag_delete_null_pointer_checks
16033 || (DECL_CONTEXT (base)
16034 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
16035 && auto_var_in_fn_p (base, DECL_CONTEXT (base)))))
16036 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
16037
16038 /* Constants are never weak. */
16039 if (CONSTANT_CLASS_P (base))
16040 return true;
16041
16042 return false;
16043 }
16044
16045 case COND_EXPR:
16046 sub_strict_overflow_p = false;
16047 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
16048 &sub_strict_overflow_p)
16049 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
16050 &sub_strict_overflow_p))
16051 {
16052 if (sub_strict_overflow_p)
16053 *strict_overflow_p = true;
16054 return true;
16055 }
16056 break;
16057
16058 default:
16059 break;
16060 }
16061 return false;
16062 }
16063
16064 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
16065 attempt to fold the expression to a constant without modifying TYPE,
16066 OP0 or OP1.
16067
16068 If the expression could be simplified to a constant, then return
16069 the constant. If the expression would not be simplified to a
16070 constant, then return NULL_TREE. */
16071
16072 tree
16073 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
16074 {
16075 tree tem = fold_binary (code, type, op0, op1);
16076 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
16077 }
16078
16079 /* Given the components of a unary expression CODE, TYPE and OP0,
16080 attempt to fold the expression to a constant without modifying
16081 TYPE or OP0.
16082
16083 If the expression could be simplified to a constant, then return
16084 the constant. If the expression would not be simplified to a
16085 constant, then return NULL_TREE. */
16086
16087 tree
16088 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
16089 {
16090 tree tem = fold_unary (code, type, op0);
16091 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
16092 }
16093
16094 /* If EXP represents referencing an element in a constant string
16095 (either via pointer arithmetic or array indexing), return the
16096 tree representing the value accessed, otherwise return NULL. */
16097
16098 tree
16099 fold_read_from_constant_string (tree exp)
16100 {
16101 if ((TREE_CODE (exp) == INDIRECT_REF
16102 || TREE_CODE (exp) == ARRAY_REF)
16103 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
16104 {
16105 tree exp1 = TREE_OPERAND (exp, 0);
16106 tree index;
16107 tree string;
16108 location_t loc = EXPR_LOCATION (exp);
16109
16110 if (TREE_CODE (exp) == INDIRECT_REF)
16111 string = string_constant (exp1, &index);
16112 else
16113 {
16114 tree low_bound = array_ref_low_bound (exp);
16115 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
16116
16117 /* Optimize the special-case of a zero lower bound.
16118
16119 We convert the low_bound to sizetype to avoid some problems
16120 with constant folding. (E.g. suppose the lower bound is 1,
16121 and its mode is QI. Without the conversion,l (ARRAY
16122 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
16123 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
16124 if (! integer_zerop (low_bound))
16125 index = size_diffop_loc (loc, index,
16126 fold_convert_loc (loc, sizetype, low_bound));
16127
16128 string = exp1;
16129 }
16130
16131 if (string
16132 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
16133 && TREE_CODE (string) == STRING_CST
16134 && TREE_CODE (index) == INTEGER_CST
16135 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
16136 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
16137 == MODE_INT)
16138 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
16139 return build_int_cst_type (TREE_TYPE (exp),
16140 (TREE_STRING_POINTER (string)
16141 [TREE_INT_CST_LOW (index)]));
16142 }
16143 return NULL;
16144 }
16145
16146 /* Return the tree for neg (ARG0) when ARG0 is known to be either
16147 an integer constant, real, or fixed-point constant.
16148
16149 TYPE is the type of the result. */
16150
16151 static tree
16152 fold_negate_const (tree arg0, tree type)
16153 {
16154 tree t = NULL_TREE;
16155
16156 switch (TREE_CODE (arg0))
16157 {
16158 case INTEGER_CST:
16159 {
16160 bool overflow;
16161 wide_int val = wi::neg (arg0, &overflow);
16162 t = force_fit_type (type, val, 1,
16163 (overflow | TREE_OVERFLOW (arg0))
16164 && !TYPE_UNSIGNED (type));
16165 break;
16166 }
16167
16168 case REAL_CST:
16169 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16170 break;
16171
16172 case FIXED_CST:
16173 {
16174 FIXED_VALUE_TYPE f;
16175 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
16176 &(TREE_FIXED_CST (arg0)), NULL,
16177 TYPE_SATURATING (type));
16178 t = build_fixed (type, f);
16179 /* Propagate overflow flags. */
16180 if (overflow_p | TREE_OVERFLOW (arg0))
16181 TREE_OVERFLOW (t) = 1;
16182 break;
16183 }
16184
16185 default:
16186 gcc_unreachable ();
16187 }
16188
16189 return t;
16190 }
16191
16192 /* Return the tree for abs (ARG0) when ARG0 is known to be either
16193 an integer constant or real constant.
16194
16195 TYPE is the type of the result. */
16196
16197 tree
16198 fold_abs_const (tree arg0, tree type)
16199 {
16200 tree t = NULL_TREE;
16201
16202 switch (TREE_CODE (arg0))
16203 {
16204 case INTEGER_CST:
16205 {
16206 /* If the value is unsigned or non-negative, then the absolute value
16207 is the same as the ordinary value. */
16208 if (!wi::neg_p (arg0, TYPE_SIGN (type)))
16209 t = arg0;
16210
16211 /* If the value is negative, then the absolute value is
16212 its negation. */
16213 else
16214 {
16215 bool overflow;
16216 wide_int val = wi::neg (arg0, &overflow);
16217 t = force_fit_type (type, val, -1,
16218 overflow | TREE_OVERFLOW (arg0));
16219 }
16220 }
16221 break;
16222
16223 case REAL_CST:
16224 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
16225 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16226 else
16227 t = arg0;
16228 break;
16229
16230 default:
16231 gcc_unreachable ();
16232 }
16233
16234 return t;
16235 }
16236
16237 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
16238 constant. TYPE is the type of the result. */
16239
16240 static tree
16241 fold_not_const (const_tree arg0, tree type)
16242 {
16243 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
16244
16245 return force_fit_type (type, wi::bit_not (arg0), 0, TREE_OVERFLOW (arg0));
16246 }
16247
16248 /* Given CODE, a relational operator, the target type, TYPE and two
16249 constant operands OP0 and OP1, return the result of the
16250 relational operation. If the result is not a compile time
16251 constant, then return NULL_TREE. */
16252
16253 static tree
16254 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
16255 {
16256 int result, invert;
16257
16258 /* From here on, the only cases we handle are when the result is
16259 known to be a constant. */
16260
16261 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
16262 {
16263 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
16264 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
16265
16266 /* Handle the cases where either operand is a NaN. */
16267 if (real_isnan (c0) || real_isnan (c1))
16268 {
16269 switch (code)
16270 {
16271 case EQ_EXPR:
16272 case ORDERED_EXPR:
16273 result = 0;
16274 break;
16275
16276 case NE_EXPR:
16277 case UNORDERED_EXPR:
16278 case UNLT_EXPR:
16279 case UNLE_EXPR:
16280 case UNGT_EXPR:
16281 case UNGE_EXPR:
16282 case UNEQ_EXPR:
16283 result = 1;
16284 break;
16285
16286 case LT_EXPR:
16287 case LE_EXPR:
16288 case GT_EXPR:
16289 case GE_EXPR:
16290 case LTGT_EXPR:
16291 if (flag_trapping_math)
16292 return NULL_TREE;
16293 result = 0;
16294 break;
16295
16296 default:
16297 gcc_unreachable ();
16298 }
16299
16300 return constant_boolean_node (result, type);
16301 }
16302
16303 return constant_boolean_node (real_compare (code, c0, c1), type);
16304 }
16305
16306 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
16307 {
16308 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
16309 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
16310 return constant_boolean_node (fixed_compare (code, c0, c1), type);
16311 }
16312
16313 /* Handle equality/inequality of complex constants. */
16314 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
16315 {
16316 tree rcond = fold_relational_const (code, type,
16317 TREE_REALPART (op0),
16318 TREE_REALPART (op1));
16319 tree icond = fold_relational_const (code, type,
16320 TREE_IMAGPART (op0),
16321 TREE_IMAGPART (op1));
16322 if (code == EQ_EXPR)
16323 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
16324 else if (code == NE_EXPR)
16325 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
16326 else
16327 return NULL_TREE;
16328 }
16329
16330 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
16331 {
16332 unsigned count = VECTOR_CST_NELTS (op0);
16333 tree *elts = XALLOCAVEC (tree, count);
16334 gcc_assert (VECTOR_CST_NELTS (op1) == count
16335 && TYPE_VECTOR_SUBPARTS (type) == count);
16336
16337 for (unsigned i = 0; i < count; i++)
16338 {
16339 tree elem_type = TREE_TYPE (type);
16340 tree elem0 = VECTOR_CST_ELT (op0, i);
16341 tree elem1 = VECTOR_CST_ELT (op1, i);
16342
16343 tree tem = fold_relational_const (code, elem_type,
16344 elem0, elem1);
16345
16346 if (tem == NULL_TREE)
16347 return NULL_TREE;
16348
16349 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
16350 }
16351
16352 return build_vector (type, elts);
16353 }
16354
16355 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16356
16357 To compute GT, swap the arguments and do LT.
16358 To compute GE, do LT and invert the result.
16359 To compute LE, swap the arguments, do LT and invert the result.
16360 To compute NE, do EQ and invert the result.
16361
16362 Therefore, the code below must handle only EQ and LT. */
16363
16364 if (code == LE_EXPR || code == GT_EXPR)
16365 {
16366 tree tem = op0;
16367 op0 = op1;
16368 op1 = tem;
16369 code = swap_tree_comparison (code);
16370 }
16371
16372 /* Note that it is safe to invert for real values here because we
16373 have already handled the one case that it matters. */
16374
16375 invert = 0;
16376 if (code == NE_EXPR || code == GE_EXPR)
16377 {
16378 invert = 1;
16379 code = invert_tree_comparison (code, false);
16380 }
16381
16382 /* Compute a result for LT or EQ if args permit;
16383 Otherwise return T. */
16384 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
16385 {
16386 if (code == EQ_EXPR)
16387 result = tree_int_cst_equal (op0, op1);
16388 else
16389 result = tree_int_cst_lt (op0, op1);
16390 }
16391 else
16392 return NULL_TREE;
16393
16394 if (invert)
16395 result ^= 1;
16396 return constant_boolean_node (result, type);
16397 }
16398
16399 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16400 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16401 itself. */
16402
16403 tree
16404 fold_build_cleanup_point_expr (tree type, tree expr)
16405 {
16406 /* If the expression does not have side effects then we don't have to wrap
16407 it with a cleanup point expression. */
16408 if (!TREE_SIDE_EFFECTS (expr))
16409 return expr;
16410
16411 /* If the expression is a return, check to see if the expression inside the
16412 return has no side effects or the right hand side of the modify expression
16413 inside the return. If either don't have side effects set we don't need to
16414 wrap the expression in a cleanup point expression. Note we don't check the
16415 left hand side of the modify because it should always be a return decl. */
16416 if (TREE_CODE (expr) == RETURN_EXPR)
16417 {
16418 tree op = TREE_OPERAND (expr, 0);
16419 if (!op || !TREE_SIDE_EFFECTS (op))
16420 return expr;
16421 op = TREE_OPERAND (op, 1);
16422 if (!TREE_SIDE_EFFECTS (op))
16423 return expr;
16424 }
16425
16426 return build1 (CLEANUP_POINT_EXPR, type, expr);
16427 }
16428
16429 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16430 of an indirection through OP0, or NULL_TREE if no simplification is
16431 possible. */
16432
16433 tree
16434 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16435 {
16436 tree sub = op0;
16437 tree subtype;
16438
16439 STRIP_NOPS (sub);
16440 subtype = TREE_TYPE (sub);
16441 if (!POINTER_TYPE_P (subtype))
16442 return NULL_TREE;
16443
16444 if (TREE_CODE (sub) == ADDR_EXPR)
16445 {
16446 tree op = TREE_OPERAND (sub, 0);
16447 tree optype = TREE_TYPE (op);
16448 /* *&CONST_DECL -> to the value of the const decl. */
16449 if (TREE_CODE (op) == CONST_DECL)
16450 return DECL_INITIAL (op);
16451 /* *&p => p; make sure to handle *&"str"[cst] here. */
16452 if (type == optype)
16453 {
16454 tree fop = fold_read_from_constant_string (op);
16455 if (fop)
16456 return fop;
16457 else
16458 return op;
16459 }
16460 /* *(foo *)&fooarray => fooarray[0] */
16461 else if (TREE_CODE (optype) == ARRAY_TYPE
16462 && type == TREE_TYPE (optype)
16463 && (!in_gimple_form
16464 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16465 {
16466 tree type_domain = TYPE_DOMAIN (optype);
16467 tree min_val = size_zero_node;
16468 if (type_domain && TYPE_MIN_VALUE (type_domain))
16469 min_val = TYPE_MIN_VALUE (type_domain);
16470 if (in_gimple_form
16471 && TREE_CODE (min_val) != INTEGER_CST)
16472 return NULL_TREE;
16473 return build4_loc (loc, ARRAY_REF, type, op, min_val,
16474 NULL_TREE, NULL_TREE);
16475 }
16476 /* *(foo *)&complexfoo => __real__ complexfoo */
16477 else if (TREE_CODE (optype) == COMPLEX_TYPE
16478 && type == TREE_TYPE (optype))
16479 return fold_build1_loc (loc, REALPART_EXPR, type, op);
16480 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16481 else if (TREE_CODE (optype) == VECTOR_TYPE
16482 && type == TREE_TYPE (optype))
16483 {
16484 tree part_width = TYPE_SIZE (type);
16485 tree index = bitsize_int (0);
16486 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
16487 }
16488 }
16489
16490 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16491 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16492 {
16493 tree op00 = TREE_OPERAND (sub, 0);
16494 tree op01 = TREE_OPERAND (sub, 1);
16495
16496 STRIP_NOPS (op00);
16497 if (TREE_CODE (op00) == ADDR_EXPR)
16498 {
16499 tree op00type;
16500 op00 = TREE_OPERAND (op00, 0);
16501 op00type = TREE_TYPE (op00);
16502
16503 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16504 if (TREE_CODE (op00type) == VECTOR_TYPE
16505 && type == TREE_TYPE (op00type))
16506 {
16507 HOST_WIDE_INT offset = tree_to_shwi (op01);
16508 tree part_width = TYPE_SIZE (type);
16509 unsigned HOST_WIDE_INT part_widthi = tree_to_shwi (part_width)/BITS_PER_UNIT;
16510 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
16511 tree index = bitsize_int (indexi);
16512
16513 if (offset / part_widthi < TYPE_VECTOR_SUBPARTS (op00type))
16514 return fold_build3_loc (loc,
16515 BIT_FIELD_REF, type, op00,
16516 part_width, index);
16517
16518 }
16519 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16520 else if (TREE_CODE (op00type) == COMPLEX_TYPE
16521 && type == TREE_TYPE (op00type))
16522 {
16523 tree size = TYPE_SIZE_UNIT (type);
16524 if (tree_int_cst_equal (size, op01))
16525 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16526 }
16527 /* ((foo *)&fooarray)[1] => fooarray[1] */
16528 else if (TREE_CODE (op00type) == ARRAY_TYPE
16529 && type == TREE_TYPE (op00type))
16530 {
16531 tree type_domain = TYPE_DOMAIN (op00type);
16532 tree min_val = size_zero_node;
16533 if (type_domain && TYPE_MIN_VALUE (type_domain))
16534 min_val = TYPE_MIN_VALUE (type_domain);
16535 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
16536 TYPE_SIZE_UNIT (type));
16537 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
16538 return build4_loc (loc, ARRAY_REF, type, op00, op01,
16539 NULL_TREE, NULL_TREE);
16540 }
16541 }
16542 }
16543
16544 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16545 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16546 && type == TREE_TYPE (TREE_TYPE (subtype))
16547 && (!in_gimple_form
16548 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16549 {
16550 tree type_domain;
16551 tree min_val = size_zero_node;
16552 sub = build_fold_indirect_ref_loc (loc, sub);
16553 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16554 if (type_domain && TYPE_MIN_VALUE (type_domain))
16555 min_val = TYPE_MIN_VALUE (type_domain);
16556 if (in_gimple_form
16557 && TREE_CODE (min_val) != INTEGER_CST)
16558 return NULL_TREE;
16559 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16560 NULL_TREE);
16561 }
16562
16563 return NULL_TREE;
16564 }
16565
16566 /* Builds an expression for an indirection through T, simplifying some
16567 cases. */
16568
16569 tree
16570 build_fold_indirect_ref_loc (location_t loc, tree t)
16571 {
16572 tree type = TREE_TYPE (TREE_TYPE (t));
16573 tree sub = fold_indirect_ref_1 (loc, type, t);
16574
16575 if (sub)
16576 return sub;
16577
16578 return build1_loc (loc, INDIRECT_REF, type, t);
16579 }
16580
16581 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16582
16583 tree
16584 fold_indirect_ref_loc (location_t loc, tree t)
16585 {
16586 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16587
16588 if (sub)
16589 return sub;
16590 else
16591 return t;
16592 }
16593
16594 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16595 whose result is ignored. The type of the returned tree need not be
16596 the same as the original expression. */
16597
16598 tree
16599 fold_ignored_result (tree t)
16600 {
16601 if (!TREE_SIDE_EFFECTS (t))
16602 return integer_zero_node;
16603
16604 for (;;)
16605 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16606 {
16607 case tcc_unary:
16608 t = TREE_OPERAND (t, 0);
16609 break;
16610
16611 case tcc_binary:
16612 case tcc_comparison:
16613 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16614 t = TREE_OPERAND (t, 0);
16615 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16616 t = TREE_OPERAND (t, 1);
16617 else
16618 return t;
16619 break;
16620
16621 case tcc_expression:
16622 switch (TREE_CODE (t))
16623 {
16624 case COMPOUND_EXPR:
16625 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16626 return t;
16627 t = TREE_OPERAND (t, 0);
16628 break;
16629
16630 case COND_EXPR:
16631 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16632 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16633 return t;
16634 t = TREE_OPERAND (t, 0);
16635 break;
16636
16637 default:
16638 return t;
16639 }
16640 break;
16641
16642 default:
16643 return t;
16644 }
16645 }
16646
16647 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
16648
16649 tree
16650 round_up_loc (location_t loc, tree value, int divisor)
16651 {
16652 tree div = NULL_TREE;
16653
16654 gcc_assert (divisor > 0);
16655 if (divisor == 1)
16656 return value;
16657
16658 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16659 have to do anything. Only do this when we are not given a const,
16660 because in that case, this check is more expensive than just
16661 doing it. */
16662 if (TREE_CODE (value) != INTEGER_CST)
16663 {
16664 div = build_int_cst (TREE_TYPE (value), divisor);
16665
16666 if (multiple_of_p (TREE_TYPE (value), value, div))
16667 return value;
16668 }
16669
16670 /* If divisor is a power of two, simplify this to bit manipulation. */
16671 if (divisor == (divisor & -divisor))
16672 {
16673 if (TREE_CODE (value) == INTEGER_CST)
16674 {
16675 wide_int val = value;
16676 bool overflow_p;
16677
16678 if ((val & (divisor - 1)) == 0)
16679 return value;
16680
16681 overflow_p = TREE_OVERFLOW (value);
16682 val &= ~(divisor - 1);
16683 val += divisor;
16684 if (val == 0)
16685 overflow_p = true;
16686
16687 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
16688 }
16689 else
16690 {
16691 tree t;
16692
16693 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16694 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16695 t = build_int_cst (TREE_TYPE (value), -divisor);
16696 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16697 }
16698 }
16699 else
16700 {
16701 if (!div)
16702 div = build_int_cst (TREE_TYPE (value), divisor);
16703 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16704 value = size_binop_loc (loc, MULT_EXPR, value, div);
16705 }
16706
16707 return value;
16708 }
16709
16710 /* Likewise, but round down. */
16711
16712 tree
16713 round_down_loc (location_t loc, tree value, int divisor)
16714 {
16715 tree div = NULL_TREE;
16716
16717 gcc_assert (divisor > 0);
16718 if (divisor == 1)
16719 return value;
16720
16721 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16722 have to do anything. Only do this when we are not given a const,
16723 because in that case, this check is more expensive than just
16724 doing it. */
16725 if (TREE_CODE (value) != INTEGER_CST)
16726 {
16727 div = build_int_cst (TREE_TYPE (value), divisor);
16728
16729 if (multiple_of_p (TREE_TYPE (value), value, div))
16730 return value;
16731 }
16732
16733 /* If divisor is a power of two, simplify this to bit manipulation. */
16734 if (divisor == (divisor & -divisor))
16735 {
16736 tree t;
16737
16738 t = build_int_cst (TREE_TYPE (value), -divisor);
16739 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16740 }
16741 else
16742 {
16743 if (!div)
16744 div = build_int_cst (TREE_TYPE (value), divisor);
16745 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16746 value = size_binop_loc (loc, MULT_EXPR, value, div);
16747 }
16748
16749 return value;
16750 }
16751
16752 /* Returns the pointer to the base of the object addressed by EXP and
16753 extracts the information about the offset of the access, storing it
16754 to PBITPOS and POFFSET. */
16755
16756 static tree
16757 split_address_to_core_and_offset (tree exp,
16758 HOST_WIDE_INT *pbitpos, tree *poffset)
16759 {
16760 tree core;
16761 enum machine_mode mode;
16762 int unsignedp, volatilep;
16763 HOST_WIDE_INT bitsize;
16764 location_t loc = EXPR_LOCATION (exp);
16765
16766 if (TREE_CODE (exp) == ADDR_EXPR)
16767 {
16768 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16769 poffset, &mode, &unsignedp, &volatilep,
16770 false);
16771 core = build_fold_addr_expr_loc (loc, core);
16772 }
16773 else
16774 {
16775 core = exp;
16776 *pbitpos = 0;
16777 *poffset = NULL_TREE;
16778 }
16779
16780 return core;
16781 }
16782
16783 /* Returns true if addresses of E1 and E2 differ by a constant, false
16784 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16785
16786 bool
16787 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16788 {
16789 tree core1, core2;
16790 HOST_WIDE_INT bitpos1, bitpos2;
16791 tree toffset1, toffset2, tdiff, type;
16792
16793 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16794 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16795
16796 if (bitpos1 % BITS_PER_UNIT != 0
16797 || bitpos2 % BITS_PER_UNIT != 0
16798 || !operand_equal_p (core1, core2, 0))
16799 return false;
16800
16801 if (toffset1 && toffset2)
16802 {
16803 type = TREE_TYPE (toffset1);
16804 if (type != TREE_TYPE (toffset2))
16805 toffset2 = fold_convert (type, toffset2);
16806
16807 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16808 if (!cst_and_fits_in_hwi (tdiff))
16809 return false;
16810
16811 *diff = int_cst_value (tdiff);
16812 }
16813 else if (toffset1 || toffset2)
16814 {
16815 /* If only one of the offsets is non-constant, the difference cannot
16816 be a constant. */
16817 return false;
16818 }
16819 else
16820 *diff = 0;
16821
16822 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16823 return true;
16824 }
16825
16826 /* Simplify the floating point expression EXP when the sign of the
16827 result is not significant. Return NULL_TREE if no simplification
16828 is possible. */
16829
16830 tree
16831 fold_strip_sign_ops (tree exp)
16832 {
16833 tree arg0, arg1;
16834 location_t loc = EXPR_LOCATION (exp);
16835
16836 switch (TREE_CODE (exp))
16837 {
16838 case ABS_EXPR:
16839 case NEGATE_EXPR:
16840 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16841 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16842
16843 case MULT_EXPR:
16844 case RDIV_EXPR:
16845 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
16846 return NULL_TREE;
16847 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16848 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16849 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16850 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16851 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16852 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16853 break;
16854
16855 case COMPOUND_EXPR:
16856 arg0 = TREE_OPERAND (exp, 0);
16857 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16858 if (arg1)
16859 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16860 break;
16861
16862 case COND_EXPR:
16863 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16864 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16865 if (arg0 || arg1)
16866 return fold_build3_loc (loc,
16867 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16868 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16869 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16870 break;
16871
16872 case CALL_EXPR:
16873 {
16874 const enum built_in_function fcode = builtin_mathfn_code (exp);
16875 switch (fcode)
16876 {
16877 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16878 /* Strip copysign function call, return the 1st argument. */
16879 arg0 = CALL_EXPR_ARG (exp, 0);
16880 arg1 = CALL_EXPR_ARG (exp, 1);
16881 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16882
16883 default:
16884 /* Strip sign ops from the argument of "odd" math functions. */
16885 if (negate_mathfn_p (fcode))
16886 {
16887 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16888 if (arg0)
16889 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
16890 }
16891 break;
16892 }
16893 }
16894 break;
16895
16896 default:
16897 break;
16898 }
16899 return NULL_TREE;
16900 }